diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..e9bfa7193f2a0092b0194c7da610900fc7866e75 --- /dev/null +++ b/.gitignore @@ -0,0 +1,133 @@ +# Custom +.DS_Store + + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/README.md b/README.md index a651f01af95373d3e90b88eb2f604962b9ce2cab..08c35ceffbef4f465295420a13060a7a42819f59 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ --- title: Fifa Tryon Demo -emoji: 🔥 +emoji: 🧥👚👕👔 colorFrom: yellow colorTo: red sdk: gradio diff --git a/Self-Correction-Human-Parsing-for-ACGPN/datasets/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/datasets/datasets.py b/Self-Correction-Human-Parsing-for-ACGPN/datasets/datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..433f15af93029538b3b039f8f207764fcfe426d9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/datasets/datasets.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : datasets.py +@Time : 8/4/19 3:35 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import os +import numpy as np +import random +import torch +import cv2 +from torch.utils import data +from utils.transforms import get_affine_transform + + +class LIPDataSet(data.Dataset): + def __init__(self, root, dataset, crop_size=[473, 473], scale_factor=0.25, + rotation_factor=30, ignore_label=255, transform=None): + self.root = root + self.aspect_ratio = crop_size[1] * 1.0 / crop_size[0] + self.crop_size = np.asarray(crop_size) + self.ignore_label = ignore_label + self.scale_factor = scale_factor + self.rotation_factor = rotation_factor + self.flip_prob = 0.5 + self.transform = transform + self.dataset = dataset + + list_path = os.path.join(self.root, self.dataset + '_id.txt') + train_list = [i_id.strip() for i_id in open(list_path)] + + self.train_list = train_list + self.number_samples = len(self.train_list) + + def __len__(self): + return self.number_samples + + def _box2cs(self, box): + x, y, w, h = box[:4] + return self._xywh2cs(x, y, w, h) + + def _xywh2cs(self, x, y, w, h): + center = np.zeros((2), dtype=np.float32) + center[0] = x + w * 0.5 + center[1] = y + h * 0.5 + if w > self.aspect_ratio * h: + h = w * 1.0 / self.aspect_ratio + elif w < self.aspect_ratio * h: + w = h * self.aspect_ratio + scale = np.array([w * 1.0, h * 1.0], dtype=np.float32) + return center, scale + + def __getitem__(self, index): + train_item = self.train_list[index] + + im_path = os.path.join(self.root, self.dataset + '_images', train_item + '.jpg') + parsing_anno_path = os.path.join(self.root, self.dataset + '_segmentations', train_item + '.png') + + im = cv2.imread(im_path, cv2.IMREAD_COLOR) + h, w, _ = im.shape + parsing_anno = np.zeros((h, w), dtype=np.long) + + # Get person center and scale + person_center, s = self._box2cs([0, 0, w - 1, h - 1]) + r = 0 + + if self.dataset != 'test': + # Get pose annotation + parsing_anno = cv2.imread(parsing_anno_path, cv2.IMREAD_GRAYSCALE) + if self.dataset == 'train' or self.dataset == 'trainval': + sf = self.scale_factor + rf = self.rotation_factor + s = s * np.clip(np.random.randn() * sf + 1, 1 - sf, 1 + sf) + r = np.clip(np.random.randn() * rf, -rf * 2, rf * 2) if random.random() <= 0.6 else 0 + + if random.random() <= self.flip_prob: + im = im[:, ::-1, :] + parsing_anno = parsing_anno[:, ::-1] + person_center[0] = im.shape[1] - person_center[0] - 1 + right_idx = [15, 17, 19] + left_idx = [14, 16, 18] + for i in range(0, 3): + right_pos = np.where(parsing_anno == right_idx[i]) + left_pos = np.where(parsing_anno == left_idx[i]) + parsing_anno[right_pos[0], right_pos[1]] = left_idx[i] + parsing_anno[left_pos[0], left_pos[1]] = right_idx[i] + + trans = get_affine_transform(person_center, s, r, self.crop_size) + input = cv2.warpAffine( + im, + trans, + (int(self.crop_size[1]), int(self.crop_size[0])), + flags=cv2.INTER_LINEAR, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(0, 0, 0)) + + if self.transform: + input = self.transform(input) + + meta = { + 'name': train_item, + 'center': person_center, + 'height': h, + 'width': w, + 'scale': s, + 'rotation': r + } + + if self.dataset == 'val' or self.dataset == 'test': + return input, meta + else: + label_parsing = cv2.warpAffine( + parsing_anno, + trans, + (int(self.crop_size[1]), int(self.crop_size[0])), + flags=cv2.INTER_NEAREST, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(255)) + + label_parsing = torch.from_numpy(label_parsing) + + return input, label_parsing, meta + + +class LIPDataValSet(data.Dataset): + def __init__(self, root, dataset='val', crop_size=[473, 473], transform=None, flip=False): + self.root = root + self.crop_size = crop_size + self.transform = transform + self.flip = flip + self.dataset = dataset + self.root = root + self.aspect_ratio = crop_size[1] * 1.0 / crop_size[0] + self.crop_size = np.asarray(crop_size) + + list_path = os.path.join(self.root, self.dataset + '_id.txt') + val_list = [i_id.strip() for i_id in open(list_path)] + + self.val_list = val_list + self.number_samples = len(self.val_list) + + def __len__(self): + return len(self.val_list) + + def _box2cs(self, box): + x, y, w, h = box[:4] + return self._xywh2cs(x, y, w, h) + + def _xywh2cs(self, x, y, w, h): + center = np.zeros((2), dtype=np.float32) + center[0] = x + w * 0.5 + center[1] = y + h * 0.5 + if w > self.aspect_ratio * h: + h = w * 1.0 / self.aspect_ratio + elif w < self.aspect_ratio * h: + w = h * self.aspect_ratio + scale = np.array([w * 1.0, h * 1.0], dtype=np.float32) + + return center, scale + + def __getitem__(self, index): + val_item = self.val_list[index] + # Load training image + im_path = os.path.join(self.root, self.dataset + '_images', val_item + '.jpg') + im = cv2.imread(im_path, cv2.IMREAD_COLOR) + h, w, _ = im.shape + # Get person center and scale + person_center, s = self._box2cs([0, 0, w - 1, h - 1]) + r = 0 + trans = get_affine_transform(person_center, s, r, self.crop_size) + input = cv2.warpAffine( + im, + trans, + (int(self.crop_size[1]), int(self.crop_size[0])), + flags=cv2.INTER_LINEAR, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(0, 0, 0)) + input = self.transform(input) + flip_input = input.flip(dims=[-1]) + if self.flip: + batch_input_im = torch.stack([input, flip_input]) + else: + batch_input_im = input + + meta = { + 'name': val_item, + 'center': person_center, + 'height': h, + 'width': w, + 'scale': s, + 'rotation': r + } + + return batch_input_im, meta diff --git a/Self-Correction-Human-Parsing-for-ACGPN/datasets/simple_extractor_dataset.py b/Self-Correction-Human-Parsing-for-ACGPN/datasets/simple_extractor_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..2cd000e26e375f6d02ef62cfd92c9b17916357a9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/datasets/simple_extractor_dataset.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : dataset.py +@Time : 8/30/19 9:12 PM +@Desc : Dataset Definition +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import os +import cv2 +import numpy as np + +from torch.utils import data +from utils.transforms import get_affine_transform + + +class SimpleFolderDataset(data.Dataset): + def __init__(self, root, input_size=[512, 512], transform=None): + self.root = root + self.input_size = input_size + self.transform = transform + self.aspect_ratio = input_size[1] * 1.0 / input_size[0] + self.input_size = np.asarray(input_size) + self.file_list = [] + for file in os.listdir(self.root): + if file.endswith('.jpg') or file.endswith('.png'): + self.file_list.append(file) + + def __len__(self): + return len(self.file_list) + + def _box2cs(self, box): + x, y, w, h = box[:4] + return self._xywh2cs(x, y, w, h) + + def _xywh2cs(self, x, y, w, h): + center = np.zeros((2), dtype=np.float32) + center[0] = x + w * 0.5 + center[1] = y + h * 0.5 + if w > self.aspect_ratio * h: + h = w * 1.0 / self.aspect_ratio + elif w < self.aspect_ratio * h: + w = h * self.aspect_ratio + scale = np.array([w, h], dtype=np.float32) + return center, scale + + def __getitem__(self, index): + img_name = self.file_list[index] + img_path = os.path.join(self.root, img_name) + img = cv2.imread(img_path, cv2.IMREAD_COLOR) + h, w, _ = img.shape + + # Get person center and scale + person_center, s = self._box2cs([0, 0, w - 1, h - 1]) + r = 0 + trans = get_affine_transform(person_center, s, r, self.input_size) + input = cv2.warpAffine( + img, + trans, + (int(self.input_size[1]), int(self.input_size[0])), + flags=cv2.INTER_LINEAR, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(0, 0, 0)) + + input = self.transform(input) + meta = { + 'name': img_name, + 'center': person_center, + 'height': h, + 'width': w, + 'scale': s, + 'rotation': r + } + + return input, meta diff --git a/Self-Correction-Human-Parsing-for-ACGPN/datasets/target_generation.py b/Self-Correction-Human-Parsing-for-ACGPN/datasets/target_generation.py new file mode 100644 index 0000000000000000000000000000000000000000..8524db4427755c12ce71a4292d87ebb3e91762c1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/datasets/target_generation.py @@ -0,0 +1,40 @@ +import torch +from torch.nn import functional as F + + +def generate_edge_tensor(label, edge_width=3): + label = label.type(torch.cuda.FloatTensor) + if len(label.shape) == 2: + label = label.unsqueeze(0) + n, h, w = label.shape + edge = torch.zeros(label.shape, dtype=torch.float).cuda() + # right + edge_right = edge[:, 1:h, :] + edge_right[(label[:, 1:h, :] != label[:, :h - 1, :]) & (label[:, 1:h, :] != 255) + & (label[:, :h - 1, :] != 255)] = 1 + + # up + edge_up = edge[:, :, :w - 1] + edge_up[(label[:, :, :w - 1] != label[:, :, 1:w]) + & (label[:, :, :w - 1] != 255) + & (label[:, :, 1:w] != 255)] = 1 + + # upright + edge_upright = edge[:, :h - 1, :w - 1] + edge_upright[(label[:, :h - 1, :w - 1] != label[:, 1:h, 1:w]) + & (label[:, :h - 1, :w - 1] != 255) + & (label[:, 1:h, 1:w] != 255)] = 1 + + # bottomright + edge_bottomright = edge[:, :h - 1, 1:w] + edge_bottomright[(label[:, :h - 1, 1:w] != label[:, 1:h, :w - 1]) + & (label[:, :h - 1, 1:w] != 255) + & (label[:, 1:h, :w - 1] != 255)] = 1 + + kernel = torch.ones((1, 1, edge_width, edge_width), dtype=torch.float).cuda() + with torch.no_grad(): + edge = edge.unsqueeze(1) + edge = F.conv2d(edge, kernel, stride=1, padding=1) + edge[edge!=0] = 1 + edge = edge.squeeze() + return edge diff --git a/Self-Correction-Human-Parsing-for-ACGPN/evaluate.py b/Self-Correction-Human-Parsing-for-ACGPN/evaluate.py new file mode 100644 index 0000000000000000000000000000000000000000..c1dd9088e5dae7783e00ac153d7b201ff437e6fb --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/evaluate.py @@ -0,0 +1,209 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : evaluate.py +@Time : 8/4/19 3:36 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import os +import argparse +import numpy as np +import torch + +from torch.utils import data +from tqdm import tqdm +from PIL import Image as PILImage +import torchvision.transforms as transforms +import torch.backends.cudnn as cudnn + +import networks +from datasets.datasets import LIPDataValSet +from utils.miou import compute_mean_ioU +from utils.transforms import BGR2RGB_transform +from utils.transforms import transform_parsing + + +def get_arguments(): + """Parse all the arguments provided from the CLI. + + Returns: + A list of parsed arguments. + """ + parser = argparse.ArgumentParser(description="Self Correction for Human Parsing") + + # Network Structure + parser.add_argument("--arch", type=str, default='resnet101') + # Data Preference + parser.add_argument("--data-dir", type=str, default='./data/LIP') + parser.add_argument("--batch-size", type=int, default=1) + parser.add_argument("--input-size", type=str, default='473,473') + parser.add_argument("--num-classes", type=int, default=20) + parser.add_argument("--ignore-label", type=int, default=255) + parser.add_argument("--random-mirror", action="store_true") + parser.add_argument("--random-scale", action="store_true") + # Evaluation Preference + parser.add_argument("--log-dir", type=str, default='./log') + parser.add_argument("--model-restore", type=str, default='./log/checkpoint.pth.tar') + parser.add_argument("--gpu", type=str, default='0', help="choose gpu device.") + parser.add_argument("--save-results", action="store_true", help="whether to save the results.") + parser.add_argument("--flip", action="store_true", help="random flip during the test.") + parser.add_argument("--multi-scales", type=str, default='1', help="multiple scales during the test") + return parser.parse_args() + + +def get_palette(num_cls): + """ Returns the color map for visualizing the segmentation mask. + Args: + num_cls: Number of classes + Returns: + The color map + """ + n = num_cls + palette = [0] * (n * 3) + for j in range(0, n): + lab = j + palette[j * 3 + 0] = 0 + palette[j * 3 + 1] = 0 + palette[j * 3 + 2] = 0 + i = 0 + while lab: + palette[j * 3 + 0] |= (((lab >> 0) & 1) << (7 - i)) + palette[j * 3 + 1] |= (((lab >> 1) & 1) << (7 - i)) + palette[j * 3 + 2] |= (((lab >> 2) & 1) << (7 - i)) + i += 1 + lab >>= 3 + return palette + + +def multi_scale_testing(model, batch_input_im, crop_size=[473, 473], flip=True, multi_scales=[1]): + flipped_idx = (15, 14, 17, 16, 19, 18) + if len(batch_input_im.shape) > 4: + batch_input_im = batch_input_im.squeeze() + if len(batch_input_im.shape) == 3: + batch_input_im = batch_input_im.unsqueeze(0) + + interp = torch.nn.Upsample(size=crop_size, mode='bilinear', align_corners=True) + ms_outputs = [] + for s in multi_scales: + interp_im = torch.nn.Upsample(scale_factor=s, mode='bilinear', align_corners=True) + scaled_im = interp_im(batch_input_im) + parsing_output = model(scaled_im) + parsing_output = parsing_output[0][-1] + output = parsing_output[0] + if flip: + flipped_output = parsing_output[1] + flipped_output[14:20, :, :] = flipped_output[flipped_idx, :, :] + output += flipped_output.flip(dims=[-1]) + output *= 0.5 + output = interp(output.unsqueeze(0)) + ms_outputs.append(output[0]) + ms_fused_parsing_output = torch.stack(ms_outputs) + ms_fused_parsing_output = ms_fused_parsing_output.mean(0) + ms_fused_parsing_output = ms_fused_parsing_output.permute(1, 2, 0) # HWC + parsing = torch.argmax(ms_fused_parsing_output, dim=2) + parsing = parsing.data.cpu().numpy() + ms_fused_parsing_output = ms_fused_parsing_output.data.cpu().numpy() + return parsing, ms_fused_parsing_output + + +def main(): + """Create the model and start the evaluation process.""" + args = get_arguments() + multi_scales = [float(i) for i in args.multi_scales.split(',')] + gpus = [int(i) for i in args.gpu.split(',')] + assert len(gpus) == 1 + if not args.gpu == 'None': + os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu + + cudnn.benchmark = True + cudnn.enabled = True + + h, w = map(int, args.input_size.split(',')) + input_size = [h, w] + + model = networks.init_model(args.arch, num_classes=args.num_classes, pretrained=None) + + IMAGE_MEAN = model.mean + IMAGE_STD = model.std + INPUT_SPACE = model.input_space + print('image mean: {}'.format(IMAGE_MEAN)) + print('image std: {}'.format(IMAGE_STD)) + print('input space:{}'.format(INPUT_SPACE)) + if INPUT_SPACE == 'BGR': + print('BGR Transformation') + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize(mean=IMAGE_MEAN, + std=IMAGE_STD), + + ]) + if INPUT_SPACE == 'RGB': + print('RGB Transformation') + transform = transforms.Compose([ + transforms.ToTensor(), + BGR2RGB_transform(), + transforms.Normalize(mean=IMAGE_MEAN, + std=IMAGE_STD), + ]) + + # Data loader + lip_test_dataset = LIPDataValSet(args.data_dir, 'val', crop_size=input_size, transform=transform, flip=args.flip) + num_samples = len(lip_test_dataset) + print('Totoal testing sample numbers: {}'.format(num_samples)) + testloader = data.DataLoader(lip_test_dataset, batch_size=args.batch_size, shuffle=False, pin_memory=True) + + # Load model weight + state_dict = torch.load(args.model_restore)['state_dict'] + from collections import OrderedDict + new_state_dict = OrderedDict() + for k, v in state_dict.items(): + name = k[7:] # remove `module.` + new_state_dict[name] = v + model.load_state_dict(new_state_dict) + model.cuda() + model.eval() + + sp_results_dir = os.path.join(args.log_dir, 'sp_results') + if not os.path.exists(sp_results_dir): + os.makedirs(sp_results_dir) + + palette = get_palette(20) + parsing_preds = [] + scales = np.zeros((num_samples, 2), dtype=np.float32) + centers = np.zeros((num_samples, 2), dtype=np.int32) + with torch.no_grad(): + for idx, batch in enumerate(tqdm(testloader)): + image, meta = batch + if (len(image.shape) > 4): + image = image.squeeze() + im_name = meta['name'][0] + c = meta['center'].numpy()[0] + s = meta['scale'].numpy()[0] + w = meta['width'].numpy()[0] + h = meta['height'].numpy()[0] + scales[idx, :] = s + centers[idx, :] = c + parsing, logits = multi_scale_testing(model, image.cuda(), crop_size=input_size, flip=args.flip, + multi_scales=multi_scales) + if args.save_results: + parsing_result = transform_parsing(parsing, c, s, w, h, input_size) + parsing_result_path = os.path.join(sp_results_dir, im_name + '.png') + output_im = PILImage.fromarray(np.asarray(parsing_result, dtype=np.uint8)) + output_im.putpalette(palette) + output_im.save(parsing_result_path) + + parsing_preds.append(parsing) + assert len(parsing_preds) == num_samples + mIoU = compute_mean_ioU(parsing_preds, scales, centers, args.num_classes, args.data_dir, input_size) + print(mIoU) + return + + +if __name__ == '__main__': + main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7c771109c1a943b0610978b7c01b024eabf9e08a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/README.md @@ -0,0 +1,38 @@ +# Self Correction for Human Parsing + +We propose a simple yet effective multiple human parsing framework by extending our self-correction network. + +Here we show an example usage jupyter notebook in [demo.ipynb](./demo.ipynb). + +## Requirements + +Please see [INSTALL.md](https://github.com/facebookresearch/detectron2/blob/master/INSTALL.md) for further requirements. + +## Citation + +Please cite our work if you find this repo useful in your research. + +```latex +@article{li2019self, + title={Self-Correction for Human Parsing}, + author={Li, Peike and Xu, Yunqiu and Wei, Yunchao and Yang, Yi}, + journal={arXiv preprint arXiv:1910.09777}, + year={2019} +} +``` + +## Visualization + +* Source Image. +![demo](./demo/demo.jpg) +* Instance Human Mask. +![demo-lip](./demo/demo_instance_human_mask.png) +* Global Human Parsing Result. +![demo-lip](./demo/demo_global_human_parsing.png) +* Multiple Human Parsing Result. +![demo-lip](./demo/demo_multiple_human_parsing.png) + +## Related + +Our implementation is based on the [Detectron2](https://github.com/facebookresearch/detectron2). + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/coco_style_annotation_creator/human_to_coco.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/coco_style_annotation_creator/human_to_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8eccb3a8f63e9b76eade5b2036526d91b8483dc2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/coco_style_annotation_creator/human_to_coco.py @@ -0,0 +1,166 @@ +import argparse +import datetime +import json +import os +from PIL import Image +import numpy as np + +import pycococreatortools + + +def get_arguments(): + parser = argparse.ArgumentParser(description="transform mask annotation to coco annotation") + parser.add_argument("--dataset", type=str, default='CIHP', help="name of dataset (CIHP, MHPv2 or VIP)") + parser.add_argument("--json_save_dir", type=str, default='../data/msrcnn_finetune_annotations', + help="path to save coco-style annotation json file") + parser.add_argument("--use_val", type=bool, default=False, + help="use train+val set for finetuning or not") + parser.add_argument("--train_img_dir", type=str, default='../data/instance-level_human_parsing/Training/Images', + help="train image path") + parser.add_argument("--train_anno_dir", type=str, + default='../data/instance-level_human_parsing/Training/Human_ids', + help="train human mask path") + parser.add_argument("--val_img_dir", type=str, default='../data/instance-level_human_parsing/Validation/Images', + help="val image path") + parser.add_argument("--val_anno_dir", type=str, + default='../data/instance-level_human_parsing/Validation/Human_ids', + help="val human mask path") + return parser.parse_args() + + +def main(args): + INFO = { + "description": args.split_name + " Dataset", + "url": "", + "version": "", + "year": 2019, + "contributor": "xyq", + "date_created": datetime.datetime.utcnow().isoformat(' ') + } + + LICENSES = [ + { + "id": 1, + "name": "", + "url": "" + } + ] + + CATEGORIES = [ + { + 'id': 1, + 'name': 'person', + 'supercategory': 'person', + }, + ] + + coco_output = { + "info": INFO, + "licenses": LICENSES, + "categories": CATEGORIES, + "images": [], + "annotations": [] + } + + image_id = 1 + segmentation_id = 1 + + for image_name in os.listdir(args.train_img_dir): + image = Image.open(os.path.join(args.train_img_dir, image_name)) + image_info = pycococreatortools.create_image_info( + image_id, image_name, image.size + ) + coco_output["images"].append(image_info) + + human_mask_name = os.path.splitext(image_name)[0] + '.png' + human_mask = np.asarray(Image.open(os.path.join(args.train_anno_dir, human_mask_name))) + human_gt_labels = np.unique(human_mask) + + for i in range(1, len(human_gt_labels)): + category_info = {'id': 1, 'is_crowd': 0} + binary_mask = np.uint8(human_mask == i) + annotation_info = pycococreatortools.create_annotation_info( + segmentation_id, image_id, category_info, binary_mask, + image.size, tolerance=10 + ) + if annotation_info is not None: + coco_output["annotations"].append(annotation_info) + + segmentation_id += 1 + image_id += 1 + + if not os.path.exists(args.json_save_dir): + os.makedirs(args.json_save_dir) + if not args.use_val: + with open('{}/{}_train.json'.format(args.json_save_dir, args.split_name), 'w') as output_json_file: + json.dump(coco_output, output_json_file) + else: + for image_name in os.listdir(args.val_img_dir): + image = Image.open(os.path.join(args.val_img_dir, image_name)) + image_info = pycococreatortools.create_image_info( + image_id, image_name, image.size + ) + coco_output["images"].append(image_info) + + human_mask_name = os.path.splitext(image_name)[0] + '.png' + human_mask = np.asarray(Image.open(os.path.join(args.val_anno_dir, human_mask_name))) + human_gt_labels = np.unique(human_mask) + + for i in range(1, len(human_gt_labels)): + category_info = {'id': 1, 'is_crowd': 0} + binary_mask = np.uint8(human_mask == i) + annotation_info = pycococreatortools.create_annotation_info( + segmentation_id, image_id, category_info, binary_mask, + image.size, tolerance=10 + ) + if annotation_info is not None: + coco_output["annotations"].append(annotation_info) + + segmentation_id += 1 + image_id += 1 + + with open('{}/{}_trainval.json'.format(args.json_save_dir, args.split_name), 'w') as output_json_file: + json.dump(coco_output, output_json_file) + + coco_output_val = { + "info": INFO, + "licenses": LICENSES, + "categories": CATEGORIES, + "images": [], + "annotations": [] + } + + image_id_val = 1 + segmentation_id_val = 1 + + for image_name in os.listdir(args.val_img_dir): + image = Image.open(os.path.join(args.val_img_dir, image_name)) + image_info = pycococreatortools.create_image_info( + image_id_val, image_name, image.size + ) + coco_output_val["images"].append(image_info) + + human_mask_name = os.path.splitext(image_name)[0] + '.png' + human_mask = np.asarray(Image.open(os.path.join(args.val_anno_dir, human_mask_name))) + human_gt_labels = np.unique(human_mask) + + for i in range(1, len(human_gt_labels)): + category_info = {'id': 1, 'is_crowd': 0} + binary_mask = np.uint8(human_mask == i) + annotation_info = pycococreatortools.create_annotation_info( + segmentation_id_val, image_id_val, category_info, binary_mask, + image.size, tolerance=10 + ) + if annotation_info is not None: + coco_output_val["annotations"].append(annotation_info) + + segmentation_id_val += 1 + image_id_val += 1 + + with open('{}/{}_val.json'.format(args.json_save_dir, args.split_name), 'w') as output_json_file_val: + json.dump(coco_output_val, output_json_file_val) + + +if __name__ == "__main__": + args = get_arguments() + main(args) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/coco_style_annotation_creator/pycococreatortools.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/coco_style_annotation_creator/pycococreatortools.py new file mode 100644 index 0000000000000000000000000000000000000000..3f3d8332ceda5fa4409095a0ec56d181ea162273 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/coco_style_annotation_creator/pycococreatortools.py @@ -0,0 +1,114 @@ +import re +import datetime +import numpy as np +from itertools import groupby +from skimage import measure +from PIL import Image +from pycocotools import mask + +convert = lambda text: int(text) if text.isdigit() else text.lower() +natrual_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)] + + +def resize_binary_mask(array, new_size): + image = Image.fromarray(array.astype(np.uint8) * 255) + image = image.resize(new_size) + return np.asarray(image).astype(np.bool_) + + +def close_contour(contour): + if not np.array_equal(contour[0], contour[-1]): + contour = np.vstack((contour, contour[0])) + return contour + + +def binary_mask_to_rle(binary_mask): + rle = {'counts': [], 'size': list(binary_mask.shape)} + counts = rle.get('counts') + for i, (value, elements) in enumerate(groupby(binary_mask.ravel(order='F'))): + if i == 0 and value == 1: + counts.append(0) + counts.append(len(list(elements))) + + return rle + + +def binary_mask_to_polygon(binary_mask, tolerance=0): + """Converts a binary mask to COCO polygon representation + Args: + binary_mask: a 2D binary numpy array where '1's represent the object + tolerance: Maximum distance from original points of polygon to approximated + polygonal chain. If tolerance is 0, the original coordinate array is returned. + """ + polygons = [] + # pad mask to close contours of shapes which start and end at an edge + padded_binary_mask = np.pad(binary_mask, pad_width=1, mode='constant', constant_values=0) + contours = measure.find_contours(padded_binary_mask, 0.5) + contours = np.subtract(contours, 1) + for contour in contours: + contour = close_contour(contour) + contour = measure.approximate_polygon(contour, tolerance) + if len(contour) < 3: + continue + contour = np.flip(contour, axis=1) + segmentation = contour.ravel().tolist() + # after padding and subtracting 1 we may get -0.5 points in our segmentation + segmentation = [0 if i < 0 else i for i in segmentation] + polygons.append(segmentation) + + return polygons + + +def create_image_info(image_id, file_name, image_size, + date_captured=datetime.datetime.utcnow().isoformat(' '), + license_id=1, coco_url="", flickr_url=""): + image_info = { + "id": image_id, + "file_name": file_name, + "width": image_size[0], + "height": image_size[1], + "date_captured": date_captured, + "license": license_id, + "coco_url": coco_url, + "flickr_url": flickr_url + } + + return image_info + + +def create_annotation_info(annotation_id, image_id, category_info, binary_mask, + image_size=None, tolerance=2, bounding_box=None): + if image_size is not None: + binary_mask = resize_binary_mask(binary_mask, image_size) + + binary_mask_encoded = mask.encode(np.asfortranarray(binary_mask.astype(np.uint8))) + + area = mask.area(binary_mask_encoded) + if area < 1: + return None + + if bounding_box is None: + bounding_box = mask.toBbox(binary_mask_encoded) + + if category_info["is_crowd"]: + is_crowd = 1 + segmentation = binary_mask_to_rle(binary_mask) + else: + is_crowd = 0 + segmentation = binary_mask_to_polygon(binary_mask, tolerance) + if not segmentation: + return None + + annotation_info = { + "id": annotation_id, + "image_id": image_id, + "category_id": category_info["id"], + "iscrowd": is_crowd, + "area": area.tolist(), + "bbox": bounding_box.tolist(), + "segmentation": segmentation, + "width": binary_mask.shape[1], + "height": binary_mask.shape[0], + } + + return annotation_info diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/coco_style_annotation_creator/test_human2coco_format.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/coco_style_annotation_creator/test_human2coco_format.py new file mode 100644 index 0000000000000000000000000000000000000000..17339187305a97fa7ab198cf1d8127a76ebdf854 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/coco_style_annotation_creator/test_human2coco_format.py @@ -0,0 +1,74 @@ +import argparse +import datetime +import json +import os +from PIL import Image + +import pycococreatortools + + +def get_arguments(): + parser = argparse.ArgumentParser(description="transform mask annotation to coco annotation") + parser.add_argument("--dataset", type=str, default='CIHP', help="name of dataset (CIHP, MHPv2 or VIP)") + parser.add_argument("--json_save_dir", type=str, default='../data/CIHP/annotations', + help="path to save coco-style annotation json file") + parser.add_argument("--test_img_dir", type=str, default='../data/CIHP/Testing/Images', + help="test image path") + return parser.parse_args() + +args = get_arguments() + +INFO = { + "description": args.dataset + "Dataset", + "url": "", + "version": "", + "year": 2020, + "contributor": "yunqiuxu", + "date_created": datetime.datetime.utcnow().isoformat(' ') +} + +LICENSES = [ + { + "id": 1, + "name": "", + "url": "" + } +] + +CATEGORIES = [ + { + 'id': 1, + 'name': 'person', + 'supercategory': 'person', + }, +] + + +def main(args): + coco_output = { + "info": INFO, + "licenses": LICENSES, + "categories": CATEGORIES, + "images": [], + "annotations": [] + } + + image_id = 1 + + for image_name in os.listdir(args.test_img_dir): + image = Image.open(os.path.join(args.test_img_dir, image_name)) + image_info = pycococreatortools.create_image_info( + image_id, image_name, image.size + ) + coco_output["images"].append(image_info) + image_id += 1 + + if not os.path.exists(os.path.join(args.json_save_dir)): + os.mkdir(os.path.join(args.json_save_dir)) + + with open('{}/{}.json'.format(args.json_save_dir, args.dataset), 'w') as output_json_file: + json.dump(coco_output, output_json_file) + + +if __name__ == "__main__": + main(args) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/data/DemoDataset/global_pic/demo.jpg b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/data/DemoDataset/global_pic/demo.jpg new file mode 100644 index 0000000000000000000000000000000000000000..870817943ddd2e0c23c26d4620ff51ea2c9d5ebd Binary files /dev/null and b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/data/DemoDataset/global_pic/demo.jpg differ diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo.ipynb b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..9ceaa358b93868b3c6a842776551578688646c53 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo.ipynb @@ -0,0 +1,306 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "pycharm": { + "name": "#%%\n" + } + }, + "source": [ + "### STEP1: Generate COCO Style Annotation\n", + "\n", + "Here we show a basic usage example using DemoDataset in `data/DemoDataset/`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!python ./coco_style_annotation_creator/test_human2coco_format.py \\\n", + "--dataset 'Demo' \\\n", + "--json_save_dir './data/DemoDataset/msrcnn_finetune_annotations' \\\n", + "--test_img_dir './data/DemoDataset/global_pic'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### STEP2: Generater Instance Prediciton\n", + "Here we provide a finetuned cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv model on CIHP dataset with human instance mask. Download the pretrained weight in `pretrain_model/`.\n", + "\n", + "- [detectron2_maskrcnn_cihp_finetune.pth](https://drive.google.com/file/d/1T797HPC9V1mmw0cDoVOPSF1F_rrTcGPG/view?usp=sharing)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cd ./detectron2/tools/" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!python finetune_net.py \\\n", + "--num-gpus 1 \\\n", + "--config-file ../configs/Misc/demo.yaml \\\n", + "--eval-only MODEL.WEIGHTS ../../pretrain_model/detectron2_maskrcnn_cihp_finetune.pth TEST.AUG.ENABLED False DATALOADER.NUM_WORKERS 0" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Crop the original image by prediction bbox" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cd ../../" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!python make_crop_and_mask_w_mask_nms.py \\\n", + "--img_dir './data/DemoDataset/global_pic' \\ \n", + "--save_dir './data/DemoDataset' \\\n", + "--img_list './data/DemoDataset/annotations/Demo.json' \\\n", + "--det_res './data/DemoDataset/detectron2_prediction/inference/instances_predictions.pth'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### STEP3: Predict Local and Global Result\n", + "Download the pretrained weight in `pretrain_model/`.\n", + "\n", + "- [exp_schp_multi_cihp_global.pth](https://drive.google.com/file/d/1s30hj8zeYj0wuTA5Rek-one-v5uT7kX9/view?usp=sharing)\n", + "- [exp_schp_multi_cihp_local.pth](https://drive.google.com/file/d/1dwDrXHkhAe_nYtnSqi548zrjo5mlSPF0/view?usp=sharing)" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/home/peike/Projects/Augmented-CE2P\n" + ] + } + ], + "source": [ + "cd ../" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!export PYTHONPATH=./:$PYTHONPATH" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!python mhp_extension/global_local_parsing/global_local_evaluate.py \\\n", + "--data-dir mhp_extension/data/DemoDataset \\\n", + "--split-name crop_pic \\\n", + "--model-restore mhp_extension/pretrain_model/exp_schp_multi_cihp_local.pth \\\n", + "--log-dir mhp_extension/data/DemoDataset \\\n", + "--save-results" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!python mhp_extension/global_local_parsing/global_local_evaluate.py \\\n", + "--data-dir mhp_extension/data/DemoDataset \\\n", + "--split-name global_pic \\\n", + "--model-restore mhp_extension/pretrain_model/exp_schp_multi_cihp_global.pth \\\n", + "--log-dir mhp_extension/data/DemoDataset \\\n", + "--save-results" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### STEP4: Fusion Prediciton" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!python mhp_extension/logits_fusion.py \\\n", + "--test_json_path ./mhp_extension/data/DemoDataset/crop.json \\\n", + "--global_output_dir ./mhp_extension/data/DemoDataset/global_pic_parsing \\\n", + "--gt_output_dir ./mhp_extension/data/DemoDataset/crop_pic_parsing \\\n", + "--mask_output_dir ./mhp_extension/data/DemoDataset/crop_mask \\\n", + "--save_dir ./mhp_extension/data/DemoDataset/mhp_fusion_parsing \\" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Visualization" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABLAAAAOECAIAAAA+D1+tAAEAAElEQVR4nLz9Wa8ty5EmiH1m5h6x1tr7TPdeMjklkzMrkznUkFXV1aVuNCAJ/VCtlkotAXpQQ79JT3rRiwABepMgaAAEVLcKrSrVlBOZTA7JJJmcyTvxnmHvtSLczUwP5u4Re59zmGRmqgOXh2vHihXh4YO5fWafmdF/9T/+UkpJRNyo1qrq7g7w8XhclsWsHo/H4/FI5LVWVb06zIk5pZRY3B1qxBARdzegWimmYJKU3H3VNcmRiIgEgBrMjJLknKuuIkhsIDVdal3VVnef6QGBmZmIAABwJ4MD0OpmxswpTcxsZsXUCwAQu4gQQVXdncinaSpF3Z2ZATIz4TzP82pUSgEsT8SEdb1VK0SYJAEACGByhjNAAKZ0rLXWWgFwIhExd4cCKKrm6oRoqkLdfaYPpcTurqWCbJJE8ForYCIEQK2omXld1/W8LDk9WNd1XVfVYmZqxd2JyI0ul/X29rwutVZTdTgREWFlBgBysNA0TXHbnLOWCvA0TTlPKaWcc07z8VhPp9PxeMiH+cGDB0+ePDqdDkJE5DnJ1dXVnKSsa10XM6vL6uUDVb1cVnfPeXZ3U8Tdnjx5cjqdVFWtALi9vX327BlxOhwOx8MEQEtV1SlxzjlLutzcmtXT8ZhSWtfVqjLzg+Mbjx49OV1fGwgk0+GUjgfidP34CUh4OuBwwvGEJICAAMi3//gPrZ4//7lP/x/+9/+768N0SFIv59Nhvty+WNeFWFmUsp9O89XDw0/flxfPn1qpxykfUk7E7nRZVabrc+Xp+o3/2f/qf5Pe+qhZuij94R995YPvfef2+duf/+yHb178+Ofv/ujPv/bNf/dvvvKDH7w3nx49fONNsL/zwdsAH+brj33kE1rpsj5lh7urqqoSUXTOupa6llJKrfVwOCzL8vDhQzP7/Efy5z/7aTN1LFrXnFNZ11JKTlNKkzutpYBsOuRpyoA9u72YGRHlnOC+rpdSSkxjkSzgsuq6Vnc6HA6n43VRqKqZARARInJ3M3PCNE3MDMDdL5dLrTXnbH42M1VNKU3TYV3XpZacc5Kprqup55znfDCzda2lFDBNKYuImdVaoRbH5XLJk6hqsUIEwIhoOsxrpcvlEvdPKQEmIimlaEk0xsyszWZ6lB+tepkPKWWcl1siknwsK2CzW075APfL5Qa05omJdV2Zkzx98Vzmw1LLv/2DP1zVHKTqH/voJxJPP/r+j2aeMuXr4xUR3dBNDNM8z0S0rouqsmBd15T4zTfffPDg6jvf+c7t7W3KknN+9gy1VhFxVzN79PjBo+vrtVxub18AOJ9vpjl/4XOfPxymn/70pymlIotWX5bCnK5OD8vq7777/rOnz4lkmg4f//jHnzx58rWvfe3q6upyuX3x4kUsVWYcD/M85w+/9cZnPvOpq9PxBz/43nI+u9bz+WyqRLSuq4iAsqqCqJg+e/b8spQvfuFLAH3wwbPlfHv74vnnPvvr6/rs9//Bl/7n//y/cF0mXERkzAQyj14nIg65LUJEgMVASJqYecwTAEYAwMzuLiI5Z3Vb15WIpmmakpgZuRORiIxhBXNMDHVzJmbmJMystQ20N3Een6VJZhJ3D4lHMc1QWvNg7goysFOTsQSAIEYMZyIGYBzNBQtATnCFGarDHdyeCCFI/DYWrxvBnSDMiZhBAjc4O6IHLBoKAJgBONyNWuuJDPFVf3egP6t90T/fv2z7Zjt8dyVFA2IgiCh+G9/GV7/qQfCXf0h37+ZwwPpf0Vrr/3n/MH7C418CkV9GgwEQtZa7W5yPvXIcmXN7aIgpd5i7u0iKP4lIiIkZ7u5utv916/9xh7iemUEUf1c9j8aMWW1W47Jxxt1DZKnlcdv9t4w23ONZ8W9dVtodoz/j87isNcBhVX13jBeJZ8Wvxn0AiCOUjVjCIdiZWVXvXRl3YyYiApGbtV2AGMwwix3KzHjXbwwe/SYiiBu6xx00RmT/4rpGv7mR17bRcHQXyK0SEVyfP3/+4sUzAAeexpu2nmQWkVrr6H/bPWWej6ra9CvmEClENN73Xle7a2yIOedQO0k457yua594aqFBMjPzel5DHoZAi1kXbYgeDo0xdigzI+SUEogMGnPCVNdapmkCbCh77q7uZjbN6c4kBEy1quacm1hjdrNaq8JFhGXu04wAMEv0fPQAEbUB7fKZuHUm+Vh6W7eE2AnRGgvFDyAiQ59dTCICYTciYWYGk7ubNilIXHeTsO8Czr3P9/OfsRN0+0lC6PvM3SkKgJEcbuZ9ujIRMdhgrxKM2AmiOwfdkZnbUX1DCuwANbHDFAKqS6Gx+sR2d7N+8zuHu1NfqqqFua1W06qqpsXdp2mKdyYSMMMQGmHIHyJiSomZiFzNrDLIoVBzN7i6KwCQSTrEE0NQMLNAzIxBbUTcvdQAX+4+J2VmxJ5lprWu62o1lkNGX1xMTQeo1ULQOblDzapZdVhdC5EzUWh05CAiIUqf+MTH29xy1FprCbHC67oSuSoBdrnclrrUWs2sXCZ3F6KUkhC7G8wAqPvhOB2Ox5QTJwFR1dXdYeoghQKsbm7ETOY1FjyDQNH/zk7uPk1zTJrd6iUmcncRjnVL1NTTopVNxvjFphCTstbqXXFvg+u6rmuF1FrdlZiFY02yiNRSiDgWdvRGjFNoQjlnZnay1io3jfGBNelIJC4KX9dLrUxEMCd2hdJuu2Vm4pSIiKdpmvI8rwsfj8fT6RCyaS0NDyyXknM+nU4AwZOqmsENNy/eBWKkVlW9XG7RpVut1aqKZBFhlgAqwpcQ/znnPKWcQ5DKG48fHo/HRw+uD1OeUn706NH11VFVr+ZDMlK7NcPxdGKWFy9evLi5ABeWiTjnSYizqqrBwUnE3deiqlrXYmZFaFqVmQmW8qSGy835cns2s5S5Xmhd1/nmmYPVwDlJOhjzzfkCTpSn+XA8Xl3P82xgM1urvvv2z+ZMtN4cp8wwUwMs9GYAxRyu5AZGgdV6JBIRJ6KitTqgqIo00zRNn/rUp4jZzmc+PRbgd373S//iG9985533L7dvkz59/GCC8+VyOZ1Oq7qZEYGJiEkSiRARl5tFEHKemHNMsFprzIuYM6WUdV1LKUR0Pi9L0SxgToo1lGY4uVOtQ4pCVZfFiHzIejND31CJaF2rWQGn2OdqtXVdCWfvijgA87ahSqLLshC138Y26a7u7M1EAgBmlcgTMTu0LGYOAtyrrnAWoSzTWivDYE7usFqKaq3ufjxMZrZqIbfD4cCCUkoty7ISM+ecU+JE7XHCoqqBNIiIk6S27wlnnmQKeTtNEwBvm6JUg9bV3c0r3EopVVfmgxdVVa4F5kIc8i7nJCJMLIET4MUqOyH3fcaMCK2vCNM05SzzPMfSNrNaQOBaPeRDyOsxHMfjcV3XJnZKEaEwTjGzs8eeZGZmoZhZ6B+7PWYD7UNq76SNp5SqSKml1qq1hpFunue1eCh8IaZELKUEkO/mxhhltP/RMKg14xEhpqiFcOkKx76Fm9ZFIKKUUkgVZnbHeETcJ+RjvxkBSCnFvUTEmcBNbcrTFOqA+bbTOzSlyd0Nim7+c5iqqdeYsMS9VUwBDwDAGWBCLEEBwJzc3U3DxODsBI5dGoAPSObteiKL+7YNo+kDjoAAXZN/nV7Sr//lj1cqPfcPh9Mvd+WverwSRr7q5C9+5dd+SxCH7rTAMbXQ8CA41hEAuqvoN1DUx3YsGSeEUuHufre33Z2a3u9jEXk/4+5hiR6P6FeKeaWuslPAZDCAxGmsgqE4mhmId4pyuz8FiHoZl/Y5sb84FG7JGaGI7JZbiPf9c8dvY6XHs7B7Sjx33w+jwbYDzUSkblRtCIGmIPb7D0B4fyDdrTcy2j8e1PoZEuIuNiYzC9UNd9dDkw8dfrv7sE+N4bj3yiFtYgKMvSkA1b02uPs0pfFn1yFRUHYXE7mhWxJyzkM4+86MpaqjVejwm5lr6XMxBtY9tjN3B5yGo4I5Rfv73I7eB0DM0tvctl64EZhYRIqpdDkNwAE3U1UaBtOOwqLT4T4aSb4ZLNrIxm+YxtMBBjb53+Sw7fFqG31mBjVBELKxz4RhugU2QdG3s1cJP++icycH2q2c2uref+Wg1wlGe8351wHCl5rCDRPizsyknb1md7xarIVIQV+hMQ36C3qYIYYgCtSBPp/Ht2N+ksOd0Ddmd3iYHt2jV+93GkBEZS3MLH1cxrogMncnM/R5ezweYaGNyP4dow3rujKHaQAgU1UzNdeUUgBCYjczmBNRYk5P3njg7vBQK5tOA6CU0u/bdCB3JaJ60XVdXTV62MzWda26Aqi6ni8XIxgU3MT9YT6hwWghEhJOjMxSbQWgMAbMwZ4cBGgpZRie+8wnYqrFhvYTAjmlRElQKewozDw6VoTWdWVOAPY7UK3VOKxublWdwzQW1pTQeLq6ZhQiiDy0omRwU1NVg7p7KcXaPBs20Sb3a63MSM3+VJmImVQLwOaViDgREaeUnEiri1A4c9RKKTmGeV1XrW4GZoGzqtdqZvbGk6O711rWddW6dslYSinrWsuyhi1hXdeQC4cjLcvi7ofDYZomotuU0nyYvv/9H14d5uPhoKqJ8OTR49PpVNb1Yx99cDgciCSl9OTxm1dXV0vlc7Faa7q5TCe9ygd3X0pdKhlN1eCKwC4GqLsVr1rhdpxmFqnGRszzzGoAJKHYamd3glar5k4EFpekhmKmZpQyQS7rcnt7u17K8ThfHef3f/q961nKejGtBNRqzCnnJLBLuS222qrK7O4pZxYG4FprCduUPH3+bL5+wwgOlXkGWRaWq/yZT3/hS1/8bFnf/tpX//Xlsph5LUpIWZIwU/IQrW6rWvXS5YTGhhdyo/aVz31RaK1FtYrIUqqqsjCbrbWIkplJTpKSKQiccwZ73MS7EuzuqoEtmjjIOYe5V1hEpFbT6oWKTFPM+b3ZtXnsren/PLw3quEHs26SjC2eyJmFyGDkbnXRgDQQga5rMRFJkidhSGWAWcLQBVUmEgYTjMBOZHo6nY7HIzOHHRKAU6h9ro7wHYkIQbrQ9GVZWSzPCUBdi6usel4uDg8dwVIGEak5rCqBVQkQ5iyylOpgq27VSRroyiIpM5kzkzfnVBOVDjULxdSCE1GrmSElnqZJpIZOw8zczdUhbYYRevQ2M6eUCC6iQ3iOzSAGb3+9dwgXuBHYgFaoTd7IGk3W5ZyXdRnbUrsJ1I1DmRkicdyfmoF5uzM7Agk3haw7NPZq31DXABA3gBdzQETgNE0TEU3TVJYLEfHdzTUULA8tUMS5+QNDXY2ucIKIiIT9AcRwNXcQGYsQk7deCokaBjVu/i1nb53JAJOTE8e2RZLJzZ20OthhROyM4ZFmI4RzLxaH8dp17qZFuQeGvLcp82YdR5jMMTZavLSFv+Z4pXJzF+HACTR8g9tFtOlMXbn56/sJX9WGV2LCVx6hNvHOrzj8lkREcAGw8wRyaLO7hwmBQEZ3X7NhYG7217ZKw3wAD6P1K1/3DoK6OxV9KNTYNC0iYqSmGN3xZxLF2tF9g50IBuN2qwZa2s1i3e1aEmZogzejPjUVuOGuYdi4+wpjAd6DRq1LX3pf2gPChhQ6Kuh6UncqqppxQwXMTGamZmg4rWvom6YLu6uVDiXV3UMOtAYLw9EsN25OxC+ps9vFdOdFBqDda8DUYfZm0tptZPuO2qQTIaUUzk93b/a1qiIyjFDNMxeCAnduNdowlMzmZtzgdxiMAG8yk5MIJVOlxq2AulPo6CKwDYuGNkmxlYabpHnvuvNTxHWjrY3GRHt8dwyDnXvH9miq6WYH8eb8d9ssdtUdzNL70+BE4gBx83lv60I4gavvHPgNCsbsRb/yFWLh7qC/Sm5sqPLOUg0F+44Q87s/p19glnrFMaBsUD6ouTfbTaxZ/rY2NN9Qk7p3njXW0dgozQxQIjZ3GjchEiLVYiQSE8bJvK2vmEbubtp9+KEqwgEGKzkcDiNA7wiujhmbnG/zoLVnIHyr2/raOlYa3wBdJgQ0c3eiti2TUHhI3ZWakNmLwfbWqda1N4uJSYhFQlHgYbAxr+5zOBy8yLquVpqjWWvQHdXM1GuwH4tWAJKTiJxv3nVCkHMgTGDJU855miYiZwKRJ7amfWHKaWp3Vi2lqLqZgYkgXUi1jhARTsJJGspi7r2gZoGeCeCdYd6tuhBDkkP70nZ34/DphfHMgt5EzASEochKWdTbOowRHn0IIjjcAyh4EqpVh0RVVYOl1LuxOtDICDFngGRGy7KwbKIhpTRN07rUYL0GtAsrmlACYGalLq4WYiF0/Vrruq511VJKrRYScy23KXmpdVlr+PHCpiXEbosqYG5mZf0gp+fLsvzFX37z6upBzjmldDpdTfkQHuPD4fDw5zfvP18eP344TZN5KM25aklEWSSxQMR0LbVAS87iaz0XJYeITJIVZVmW45S1XLCuYDKzUs0InFJKyZysmqlSZSJZbm5unj2rS53xYDHWsj55dE226FqEs5bVJB+mY55khd0+v4ghnWaRaQJM1WshYng1Azifb5aHc37n3Z8ty5IYy82L6erhstQHpwef/+3PAE9//s5ffu/bX9diibMJr01OhtcO7gZbHSyJvJp6qaVJ+dA1VJVcCWABM1jaf6ao5l6qW7k5L1lomqZpOjClYmpwTsKMpeFBit3OzNxDgW5Wn2lK59uLFRvgJgAfD74KENOgLeyUhrowdndVnQ95mqYBV8Kd2OxHAFzNUK1adQWUma1oKZCcD2BKAsRUDAKqAFVXL1Vh5D7lbMd8dToep9ndbbjZa805VXPUGrLGFE5qZro6M9ZyAfuDlACs60qYvJrXoLmkkDySGEi6FAJdHXKaptU9S6J6cUZ4rbjtyJBEnAgGYYltJrg3xM7OAILCVKsNMkZK0+l0/eLmJgjesbjbnmydwEMUsmuMixBzoipa3chBDgYJsynGnwwKCiR504e4q4NDfwr3Y9CfADBRCMC9EjOWuVvjSjA3/SloVLElDa1it2d4zIdxMPVZsdO4drssdad3x/Pdsj42J+qPYGYS0VJARMJNJHayxujA+IqYmiLauP0gApGDnAjC5MqxhwPm5mBCM2mHfB6uGDcw4GRwgzszzJ0JBO3aAQCSgQ+iMcQEhKrPAJuDgWb+AAPh5d/0ktd52O5arF8JpX4xU3TcKxxcv8zP/3rHuM89TasJkr/qh8EabaZ0AnnTKztSGrZ7F3fv/dY8LHvkGRqt7L0ZMUOG3aQ7WFwVAJhAxOR7tWn8lIg6nEFAOCICgiLVWmB2x2Ibzhd3911XNA0bDkKAqDhfa22tv+sxa2pZ76D98gz82FDLaLMpdg698W93hzp6KEhbgwYPZRQNYMQKSr3Bu1XZNJjotOG8auua4ATeAWDqtMbN3z669A4ivTsJUuLNOd/MoYbOPTHqGll7ikjz9e2pEME1uAt47oDz8XkTF7v27AWX18JEAJk5OmUshHnr77tvUFRHzEIToRbyguMp+ycSkTMpnM2ax5SAYIKIQELu2GDnMtHwhmNsxEQgMgIlSTvoS8yI2zRg2ZbPaHnvjTuAuXmqPfaQu144kvCYb7NHGLyNbKBBjMVF1LwfHtKfACZUAHdRc9P/W+ej7XSjTfvZcW+2YBA4N/k/dpfNnuV2Z7t57d3uHK8Sntt3w3ADd4VzyCIicmqe1cacHC1FyN2OF8c49iMoValbGSx4Uq2nY0pTUDpjTqFbwpoZBuTu3FiBAi3tgUYwNtSYRSnUr80O1Bz4OWcibvsoA9qG3sJUzRyWJmu08JIl3dMootnhIScGKIwjLCIMcrVQb/YmKHdPVZcRHcEcW75Qi99y7774WDi1aiLOWWhKKRpkczBGglBam7xQMytazezmxXMLjhyRqS91Xdf1QhJROu4uBBHKiSO8JaWF7rD5t8nUm72zKjHXtTq0A+jo5I3BMhSd/m/TbIi5G6cIWt0NgzgU4SksIjmGvpSy1mKdIRaGqGAltnbSzu7lHmYKMyNyhMICZoEZ4pWHlFS4MNWq3ZMDIjJjdxXJpa6laKN+iwQt2M2IyIyIk7tzV/9U1T2f/OAWHUGh9K+1ACilXC6XGJFSyrpe1styey6l+Jyylnp780xAZsZXvLy4ABeRnJ8vpg7gdLq+ufnxNE2n0+l0OgQ0TVmmafr4hx4fpvnq6up0Os3znPLxMJ2EqJQC4qpqdRV3FuGUEyZisgatQcLMSMxJpvNyidVgZOTkCtZL8vXBgzlhtUUnpttnPydAVSnTui5OStN0mg9F8fzFbTqkB4/lOM+JaF0WNZ1yspytGEk6XD08XV9Xt5sXT69ca7nMOE2sdS04X3DFbz5+8rPmRKVaa6m2rhdxwCoLCSVmCDGtQFNd1EwBayG4rjAHlMhT4pwlJQb8vC4353PKzNCqzsxuBKJqWgJEkDFD1dVhanOSsG8x85RySslctcYOxDF/QnQ23NjjLngXI6Gq48/Ym8eedykV0qiPEXkbO5hZjRARhmRhhauqayUYuVktdSX3dVkWOFvOV1dXRMQguAIwI4KT4+o4ZyFvXlNLKZm7kAuRurq7mqqByAykqqc0EQOVidr2TA5AD/NRyIgyANXiZrDABCqS0+HI04GLzpKsOpJnyZmFSNy9WEG16AA+JkfAxU0RCUEZnOppOohkd1J11TAL1rHPjDicbbfwjfVkZgKWhuJcREy67dk3XWcvhcwssVAPEAqxGTpN+JkBBtgNtRhcARn+q+1WhJgA7l5rDRkWyNl9QbfTx+jHvNmrHcAGaEbbtq/c0alf4frDBgC9B2nTvd2bU8LYJ3yzPXKn2MVNYgMrpofDgbktJDOFKYSJSCShm0gBhGQmIzCF5jxi2BiAs+rSd1NmCoYSweJnROTgTQ0gapQkggykYhShg0ExHb0wTPi/KjB73fWvVmgG3tiPwp7B1b8a7dGX7vGLjnHnptihOR/GZgoEAN6mN2hDgP2Db/FLd1+nRTHBPTY7RMsNEKJwG4Yi2HRB3l7N9840DIWuNdWZwuWmvr/m7s6+X9Sh9YqkoO6HehCvQG3PHQFFvcOJfFMRAQ4I1ff3bcm/um87RaiZb/Yd42MwhQGMEJTxr7mjOx3u/OcbdXZodUPPG/0wjuAWcGeem5kTpc4fCIlDDAoDf0qoZY9i0R/jcDfb+LtdQ3YzGraSqmZOQNs1gpsPc+/9TB4GuLHpjAHy3bEXiegCef9qzDxNk9892ledZBFvnVmIyEUGhkQnaumOIDoeNzrqlROJiKZp6swaNPNWvGHzsO2Gg8kNRLwXnwriiIA1ZxYKdXF0qXlHg4OygfAo+kbj9N34uOQ53gg9DK/PiqbOE+BEwkwSRoGBcqk59ZmZpdm8SLrRwj0mKbX126c69znM3p5FaIaFzRB230wWFtm7+wh1EkcYHPrSHqD3DgV6+xVeff61h7eHxV+2ce+1PZqsibRXSeaXTHvjvIYReSCRuDm7g8zD50bUOClEREYUqTmIRZgSBG5GnU6i6uQGsg2wszC42Wjc3ba461AS0Bm9vjN9yZTHoGDHFVcPRgPQYjC6y6/vsNa8X01MxaIfizDsOm6eALDgrodXiahztSnwFQua+lLDHUfgYGNGlpc0TVOxMoUTP8LhajGzh1dHd3diOKljXWrTfQ1WailFrbhaWb2uBcB5eZd6/oMIhxORyD0QaTDuiD/VYKsCLbqaOTy6w6TXTFMxrjyhVONmpHFymDXZndLgpkcA5hDBqq6Ahw0ODNVaa5gwBwOYnQbS9u5iNYBSYqEkKQBzDWeuo+lV3lwQcX8Jq1XksGFewzsqDIDN1GBVIVRjw2jxF0KMcAQpc8gaJiKhFMBgWWvO2eCR6oOIVHVZlpTS7YsbM2PQcj5fzmvMjHN9fj7frCumiU+nUy1mhvPFjsfjWnx9evPB05uYVSmlPMl3v/Xn0zRF5qHr4+l4PB4Ohynl6+vTMfCVuQhx5ofXDx/Oh8uzHxOxN+huAMis6jLPc/Q5a6m1LpdLWc6w8vD0aFnO63pJp+Pz58+maSISd805F9/2S4AIQpzIjFJyVzeTSRLxUhciujodz5fb+frJOz/54Yc/9rGrh0+AYuUyiWICRI+n9MaTh/XyocNxeu/999N8zdz0JDOrddVa4FbrygALurYR09ARMhsGUkCGkff2fHlxe86Z5kmIBCTF9LyWjQ+s5k4OhjPISylmplqBZrF2jwYYM8t0CJMNnM92josDDdIu+ISZIz1UjPjIT+Dut5ebSF9kVqeU3JUcU0opcTUlgLnBvFpqLUbcaIjV3EwdLEkkZzBX1aJKnOd5CuTAIGeyWsIwlFNyq2ji2BgQYhtOKRClcIFaSokYqh7cLXdTVYcnCgxZASeRRAlpcbKJCXBxmnkih7jUqq5wCvKtMNwpEZO7hkbbt/y2b4XzjYhymnOaCVKLrUtF16G5U5yIKISDqgC4XC6Xy+V4PAKNdigi00SACSdTY2Z0/LWjbQSks8Zp7E62QQGa5+NizasPgKQlZbksRVVJOMwo6MZmVa1a2uaRIsRaGE71jq09flLdkmFoQkREaCF96eXYpBaw3YHcAE6hJ3XLgrcNsGlJsRwcbnehpndHdBAr+vwEc9O/YRZh+hHoctf2B6YR5xNe7GgtY9OZLkDI/ARmIg7aM3cIGiZyINzV1vGJb/kYHA7uhuRN0Yn9VQcAanrY9v1LH153vBIKbo47v4MJB+4Kd7I3vyiNk78qQPUd/B93CO/XzmfodOczDQQYuR/usz3H3fplumub0Q4Wok2q1vO2/XCYaBGm3KYAWbPbtu9iV9256e7pneizfaf3c3tIqKE9eLXroO5uY6BpZy8evCzYpgj68EoHdBlKmG0e+NBw+R7IYcJw/vZXuAOA+/V7AEDMIG7ez7ByuGNvk7o7IZmYRRqhVHUEofdm+9DUua/Q3cwIHaT9ZNgQRwsBFC3sLJTcXd3gAFOKvDVh2O39cc/kBHSf5N2v9vAsTgCbaBrdMkxa9/qqB/BvN3F3djCx89aN7h1yRCa/Hucfu2FYtFsfEqEre8yMlKhWuLO0UYuOcDPvvjuSsDYwc3vomJxtDjAZnKnR79t94ilJQsAG77RJspBv3dUab9hgABOsPX0DXSTUzCtkAdd6bqGIk0KTbO6hITJ5hTUrXotfa6o+j0XhYzjiQx+jEKDku+Q1A7aNeUKbAnxnAnSJTTsx6E2e/+0c+4caAk+79cVuAW2cLOSpkqOnK4zXbPsCIfLQuHvQ36jlonDuZhyi0QmQnJpQFRCJ9BkeTBwwR8iDW8uxUmttiINdAKYENki3PLoD2kyYDHKqZs0CEdElPW4eLHA3VTONrDYikpKoakyjpg1yQh/Lrgd4WLqJIsSjAm7uoY55AD3zZBpXAKDubRRmqrU0uqRZn7IgaqQgipQ1XqtWKJKniHFS1bUUhBuHGuErLAKcMpymfDADEZ+O16paq2mpasWqhrH8UnKs3sipUUo5n89V2/BM09LzyhBABsw5h4KSUsueN+gK0YbIOjASQjIski5FaAgREbVciADcqWlo2mzkoBJ4ts0VbRwz6rMn/DZAsycJR7qqqlaISCQya6BvhY0w5u5mNYDtCGjeM9OoI3hQeF5DWSRYDxLFlls1fNlo8RdKJJAm7k/XR26x3XQpq6qSSJqmwPCmcLPD4fTwsSdiM7upV7XWda1ElPO8LMvlvILog6fPx9xi5pRZpNKFjhPVpV7KzftPXwSFJjLQnk6n03w4Ho9TynlKV1dXDx8+vLq6enTA4XDIeQIbEVJKwZV9cXOrWohIiNa1LEshkuvrh5zTjDkl5sTTYRbJIfwOV8ekANGyLFCb85RTtrXcXNbj8ViXtZalJoHZsiwidoNnkGm5vPjL7/75wyePf/0Lv4k0v/+THxKt4IKb90p5/uYbR9fHWWyaOR9kzqKsgJmrrV6WlZDQHJx3iJo1otuJmGHmDlOr5srMJKlUrWagxGQwlGJrcRhN0yxCDniL95DqLX+jO9Va63peltLV6IAI0UN0PCbsxPGm6Hfh2FjcqszbjhvMvWqupbTA6aoOS0yJmYIQ5mZtFZh5vVQWEeHsRM4ynabD4TDPRzMTIplPAOY8AShlISKjtdZKVCOlWUxKYbg5M7NrsohZFCFikrUuZpaZmGm9FCJnTlZUbdHqlMLDyZEABoBjuaylrIutpmFqQGJOtTQITUQgd3ZKNCVeTePVdzqbRwwhEdViPekUtewlQQMm4u58R6OfWcj6sNfQSDTlxpRSgrtShEM1kJbcPWRa37ki7JD2A9fmT60557rK0EsiR5QTD3N1T0wR+JaD0RTSP57lQ7sbex1z12vh1BwubZIMSmR/x7G7xlXDko0OFuLKnPKmojFLp0ihUdf6Dbh5XlQN2PyTxJwYQpHe0AKYhIPdwhZvpakLEagIb8oH0QjHangmZGCq7jBUR2VKbQpw7wnZK8Fm8JE1tN3E2+oIALh3ld1bWaN7urqJX+J4+aL7SnMs7dA2Xrr+DkD9Gxx89877YL97Z0az9pgwjuZHGjCyK4XDrd0NAdj3eUyV4TmE6TbH2uVoHKeXtX9YyzGx3eveEWu7OxgJcHB/NA1nkXvESJPHlAOFHgci7+7jYd0I5Smc/LTHnwFIRjuln48EfaE/jHYNFDRyKe0WUQNdHWBsrXTPNN17QeyQ1X61xm9F0rb6iCAcDFiHkzlF+IswAIXDlLqEMLNG1QEFvREOt8124I0CKu5UTduVQozQFwnO7rWvh97Il7LCYuPH7gTQRm/z/ZnxspvHtWPIpunWai0ZgeznjEgGbbmsxrLt7WuAmtsGOtJvNA0bwQohwUCG0pY6AEqiqhTLggMq9l4a05Ujy4uHs4IGfTQa02PZu0OLOqRtL74TMmH+aclpTdXdzS3IqfCgi3gkbqQNDXLEy7l7KKJMBI+0IQRnZidOaGYm8hGdtgnD3Xpv2WsQm0af2A0Z4rVS6Z6ciTcdMmSzlcD5VbcI8f6rxhBuFjSMLM0RTNj9kEEibR5Wj7UAItpN1p3xhmBKDgtc5XAncwf7mMM7X2iYGclBDPPNbhU8mvZjom5gAhmhR62AQVS1BpOBdtYQN3i3/+5tkPFny9natsaYZpFPppERiO7Jmo1FjzAL0Gb0CQWn2UTUUik1pdThTeQjIhCbGlMiipA8D4OBG2ldx7gGdEKQ55YlHplEuhJDiQW6NMOykkPgTS27ubkBwBARmdKMCa5qZg9y7mUemjaj6qXWy+UyeF/dqu7qXtdVt3AUCnyVUjocDqG3pTQBMGs2b2kpDWFeQ9MKXfxyWbmx74QZzl0ntjpksZq5dQhO3ECY94xh5sSkWkJ9XJcFMOTJRMzq6XQCYEbM7AQiN0vO5D2Rofd8lbyRAFtGnHBeCxFBtICZHWpmUDcLwl51s8jSYWbMMk0Tc3J4qefGpuAkQmaeJj4cTsuyTMdJIKo6TSnLxMyXy2V5cX78+HFKEWaG5VIil8blsgCotYRXytFCFp8/LUG9a7bASD8rcnO7hsKaJXLdN2bL1YTHjx8/fPhwnufTYbq+vp6m5GYfeuMN9RLcw+q6VhJJeb6CTPPhitwuy4vTw2xVG3tfhN2XUtZ1XcpFCKT1xbOnDCSG1lLXoqloqZfzTUpTKfWTn/nsex+8eO+99776J3xz8/Tv/N3fv3369m/8+mfAL97/yV+433z8E289ffb+B0/fyfmApmk4AGr5ApgpJWJdi2r4yWfmoHEqAGKJEHyzqtqSME3HA4RrqZd1ESJ3qbUKqjsRS8ozgd2dSRRuWk+nQ6yjWuuyLOtawwKSEglnB2rR6Mucc85ZRzKrrs00udFEh40lE7NrvpqDCxrTWEuFqbNd9Naquztb2y9FhCndrEYycZrUzIlTPsjhiubjaZ6vDlfMXEop6+V8PrtLyjnhhriALEsCrKyr2crMpVl4EOFjMJAwmLXWWmuaJ4KoriIyp1SsJbkR8XBhzfMxH6IIynO7vaz1bF48HUIVk5SOko7TzDmCUYu7O9QoWK0EoojlCfMQEU6nU1DlIyTEW4aYiWghImIHWhRfKSWwHADmRKSxvhogREsAo+pwAVoyT+qZY8zCrrTlfmgfrAcDuNcaKatdq5tT+B5VvZQlpYmokLCbDldw266YE7GkFvVt6mY1Z+5N5QEIhyaWOpNkpACxnhV/006YiChMCeNku9hMuniPm3qXXe4OphaFxZtakHK3JTdNW3uwk7mah24pQpQFsdUpkQCOZkNng5IzkzR7efOThGYPTiGoEXngiYS4kf2cKDTc2BEV5vCMOWhCA0UY8YZ+7mQ46PrZy4ePJDT3mbN3XG1096tXH69MNMcN3bx88ldFiO6x/e+4Xi97CPdt6yfvY8Joob70IqGE0e6isDeUjtEiIU3oWqQcanh7u/FbokQIVufOVLEbj02yDXJdPHE3P5s5aNOwm4kciNjRoAo7drpXaVZzsr23pr9MR5bNVxmeEoLDbVD4WuhgpI3Yid9+o93IutNQYnYYYKfVGHMz4VkUBWhxrc45wYPR1J/bhq4pc9vdwgbXPfzc7TWuZmacc/zJZq4aDZZdZ3qPqnKCuU/TwaIshHuYLdFibqUPUaO6BQSi0bywgvc4vf32RAORDvR1d/9y952Ry8dNVDUN4z5nMHut7i6S7y2i8ZmECeARCNdxWuBhJmY3D+cnACdXB5N0LGdmiDRuQDhDqcu31jZ4oLLRco3wHxGMmxAax9YswkaIqE3IMIebD3tBt+yb9epQzSohFJjGnIac8oiR6/53773AYXETarmZevh/n5sEAhMx2KFhmtjQ787VEXN+T+O0bsLo83zII0YXMnfZBANkjkF5KYOXv042/tVHsBJ2cdi9YYSO98Jn4+6dnYDW1Rb5zRAolKkJxtgRI1Aj+oeJzMHBIekguoEQaoQns1iz5GoOLQCISCiF4sg8R47zSJ9gZH3jRCeExgKOWYHYpKPZbaY1jqpHPJs0U5kjIqCZI/VazMKoQlNrtVBNybHznwOxvgTW3qgxks0TPJmyVgu1UiSJNByi6gC5ScQGhHoJaomnIgtCkhzag/TKV8uyaM+k5O6TNPZCSgks66JuRuIsxBA4B5AWyUxJVSvd5jxHUnJvrG45MT98+LC2SKoQjE2clGVh5mW5XC4XIg8+JMievyCmFt88TYfD4eDul8uFPAzqrTQZM5VSOW22+VgVqo3JQ4KUJNrGTTCNgAHqoaTbdjXNx8vl4tDDYQLgVWtdRWRZFnSLoHVEG2MRf4UKN5JVLMtiFiXLeCSEVK1GxsLSh4ODhswpLFgAZZqpzdBICxGIEYolprkBl/Wmqk0pm1d3k8wpUSm1lCWJmColm1JW1XQ6PLg+ieSosBdHRLLVWktZ1jBDqi7LUkpRd11rYAAK7UxadorDNB0Oh6fP64/feyEEEck5nw7T4XCYc5qnnHN+4/HDRw8eqpZlWR5eX18/PKTjAzVd14thBozEi5XlvEwTi4SLRrWsgKeJJ2EiEWJ1ULRwXQEcDofD8epnP/mxc3788Pji52//yb9/56c/+v6nP/O5dGWQW7Nnv/aRh1nw1pvXT964+tk755yunr54ev3wQUqplgLw5bw+ur56el4Ttawq63phZjOUUsJDzuylLDGIy7LM8+yGw+F0doOrJL6sy2Gab5/fPnr42J1ub2+j/sHNi9tSl5zl8MbjyNKktWtjzgRZlyrhWCHSdhgRm9YoWHc4HBo8K4WI5sMhehjA+XwGeooaYoVyyuRWayVgmqZpmp49ezZP07qUVTXlWdVL0Xk+ztNVZG9649Ebn/z0Zz75yU+99aEPPXr0OAwcdS03Nzcvnj57++23v/ud7/zgBz+4Os4PHzwSxgcf/HzO04sXt+6ekzC3+jJFFSCRhMjEQy5CSy2hc5hVMuSco/hHrbas58PpOpz65/NqS43SPeCD0QQgpbSUhVhub28fv/mYGJlz1Uskep0Oc7jpqBmYIrMlM7ODQtU4Ho83N+ema0Ej/UwpxToGq1GAyGoIDQDrWkLoTdNRRFr5Vvd5nq+vH9zeLGZ2OBwAW5bleDyu5QJgmrK7JUreM5SG0IgaicvSwqcvl0tOKUYTLAB345St67osyzwfARyPx9vnz1JKqpe2xOaZsfadEqVWadalpq8U15Z3RxBRlOfbxbtNocnwLb6l7VktDoWZmYtW2vkPh4ZnZkG5qbWCWx55M5OuXmjTGl2CMGq11mrQFGVb3V3AzJzTej7DaZoODImgl7JWTuSqrsTSiKYggog5X5ZVJB+m4+VyIbJ1vUz54NDD4eQwQnqxPrueHjhKRQ2CfXUVEUa6lIXTFLlnDNwYXs2MDYzomZ0mQURMw1o8NJNNpX5JaXmlutNODsbujn4Zw9dVZ+f+tw+f0i9/jOt9nIARkbWw+btfArvYHtts09QVJ+y+bb/i3V2aNXrz68aPdo/hrT20+87UwqbG/YY9yYe/5L0EAMTqi8/D8KG1CYd9H0XmE+xfhnnATtlRprdIne5zG+c3pDfOd+wYqhsGKzImho2f0M6o38hyPe9gV8GZRCC5uYtDQ6TuMQdASQKdOhNAA5XFsiIiSs0/SQZmDrdnSCTu4ISI2Ch45hswIyJHbUmV7gxQ7BelluYGYWnmBIp0o661MjMlrpday8CENNSYYTYa//ou6UUXERhyZhjm9h7CIW0a4qqVU4uAoJYLulcW6jNxIzSaV6/cy2+gcz2857lpPeOe+3AE493MGHAmmafmGk8SKGqbV/FEaxTnYaEAYAQWcbOw14eS17udSylhfARRlM4KjLqfJNRpsdzig5iZrbrWKkySUpCSCNQIw0SBUYRTDCuEQ6zByQlQN2rWHG/DEUa1QOMguieCXk2Jr1aYOQC2IRy8EnUFd8F4HZz5uH/g3UFDHQf3ruwr6zUGr9fLvXaHZljyYKezuyqGDzC6JGEzeTTx1dnt7XXCPsWpZeQ0d5B02ry6Q7tpOUhOplq1wjxKowNRyotcmB1EAqIop8eSUMNP4EJgEndVs1prSolEoN5UOhARhRWGHDnnFKVrgmqkvapjT64ZkwVE0La+gvQkxCIiFLmUagN8rZ6W56i7Tq3qCcxDK0s5H2KxMWV0a41ZSywchu1mzQCbqXB3DBI5UOP9w6DkcI/I454atekWkcSFtEXUBCFKLXibLbo7Pt8PNg2CHNDsU9yLBFpkZhdZU8o5m11FsQdVXcvFe9q9WlrNDFWtVUspiSWMT0RND/Oeg1hyeLok+jSmr+T4UPZGOACllIj6A4fVvwmaqisLJpmEgg+qUUutq1whTgWYVLVnPqDhDo3PImK2MA+zfdhEw4mfmvEPJtq+GuKVgMiwZJ2opj0tskXEHdSVocg517qaGcyY0lrU3aej8DpFMBJc28Ycfkw3IZBwlinUA3dXPXpqFoGoY+7u7DCz29vbGqlOa4Mvt5cl3Z5ZJmzlczylNKeUUpqnpGsRocM8h8Hi4cOHb7311tWjBzlLTsRiOdEhJyYyZY7c9sF/nbTqAnNTXUsVya4OTkkmmoU5RYQ6AGGwqWu9XNaf/OAvLy+e/sYXPqXvvf34jeN6PrvrGx96eHt+DuLz5WZVqrWWoqqemInEG2YYW/sWpB4RdOhmSOrh7FZLTP4X59u8IieepsmvKKVUq10ul9vb25xz1WXVNef8/PlzVV2XSkQ5tflZSqSqa6Uogu57OByOx+PPn30gMoXPsNZaawnzj4ble5fOK1pVq1r1xmwBuXmtdXHM8/z8+fOUMsv04sXtNB+vrh9dLpczpU985BNf+u3f/b3f+70P/8ankSeYgRPMwJyBo+pb5p9alr/ze++9//77X/nDf/njH/9wPV/y4cHNzfMHDx+z4OfvvZ+zSJ4OzLnoomphK2dnElVy90he4+7ackTh+fNnzHK6fpjStBZbalXnw3wyp3JeatXplKrp+XJzfPD49nwudalazGpwBEYFqjZGNhQUIeJSittgAZhZBbhF9nYXBBGar76nKdurkpG6KxEnYmYUZ1VzAzsSsxMJ+ZTSnDPDyFwAVZtyi7ds+581CR4UX+reNnOAmCXBOVh5RCQSdFZ3V/NqVlsVLBqgUcBDpITpOQKQtpMGiBNoSxsD3PmJ7/S2O1qOmarm3PJVdPtlEwXtYGLiyA/ZPErhaWROLIA1LblXPUlpkikhJVJ1Uzcn8pQ4KNNumucjZMpTKstKkJQSUoRLVTcl4LzidHqDic63Z6t09fDRPMPrQul4OT+VnHLKhOlGyySTgeHqVifJZk7MKSUm7pjDXu/H26sjr1aVXnW8fMEvAIcvaTyvNpz/9azpeyJWt+Vv7Cx51ZU7wqr3821UrX97p8coJlc7Pwz2v6jBMRtGDcaw4mt3U/DdPtjAWDfwEyiCuDzIXdy2ubvvsvPU0f7t4ooOPO5qoj5A8L1jONNszPxAYo7dEHrvu+GgHzdon5mDXLS1tRcM7LfdRQHsYhfHmVbbIHz+Y6AYkRjGrAVNGYJ20wQLJ9mjsmam2/VAQ5IhOeO8MAFRSYYdsChdExkpdjXgOy4fzxqwc8Aw3OGxN4A0GiO7coX77hoyKtq/Pxmd1TKLmrdc9NTsOgQSahGDYcu2Dn3H7jCcrk2zzu3+kS0EfVC1FGdv+2kL2IthZCJyJt+NctcDGl2AmjyMiESOvgsrxYhuj2hAD1jG3fnjzjl3XqIwTNru4CISEyLY2JvLzt0IHBUAYwwJ4e0i4gCH8QIGtCxGY+ps3X6f59nnFiisPZGIKzJnhOfcW/6u7fKgArQZfd885M0dcmeg3Xt01kvHa3BigP8ebto6efuFYVSr38QRARGa2RRj6ia39pWixY+Dmuu2AZwIPHEEMXqX+YllGAXCgddeUNtSYk6I3VYyhVByY0osiGJMIDKUIQ3cXcJYHLWXeMsfMx5KIxTNDA4RQU/OwMzuLfOwmxLFLh/FF6hXD219jg4w3N3Mk1v8wZ3m1GJamDnYyyATYXcOj6iR2a6yXzF1D+dVK7aoqq5GRNKwJZyYnc1Qq9aqxCncWwTCVniKo270XsMYHxxIaWrYyBHKBLWowmmaWn3wqBY91eYYHIAwJJ6qllKDYrssy7qul8slLO5EZGbTNM3HQxBoqXPWZ0tEEYsY5bYTkZhZKSXWbzPXuZIHRayhVpirangDg7zq7qpsZiGUQ+51h3DLGajqgI7rxyQYQrbpbTAARhYcsEj0GtLQzVzhbvEWEsED4ESILDjaiqRRUY3oebeyqpJHnpgWnGZWOu/GQZYyu5N3tBkELRGieYplME06uIvu/uTJk6hBomraKojEfGpZvGqttepSyi0KM2fhZVkYNs9zYqm1pvefff+n7xrzaZ6PpzxlkoRJeMoixKf5dJjm42k+TlPKyElSznkiJ0iaXZyqVvBayuVSit0eDqdqaqtSKnDOQvD64tmz//b/8X/5T/7Tf5Qz0tWkWt977+nVw6ufvPPOfDyQMElL88Mp11ovlwtzo78ThTmNh+PXmz2MMVI2d7fP1YNrUIWVJBtoVNXz+VxKSSmB1F11Le+8Y1HBhpkP03GaDmYWDNWuCTT8GX/nnFuC367f+1BSdjbacai3/A1G6q6R5nL1JaUcNiBOpiQGycfrq4dvffqTv/0f/w/+6Rd+7++BCMRwg4T4QzOdhqTNp4fXjx5+9Nd/4zMf/f/+q3/9b/71/4dFrh+/qcvt2+/87DQfQD0dK4kt66pGDKFMCE9h8/zDLFSEaZpKKRAG082ynI4PUz7e3p6dj3WtSGTVKOXHb7z59OZ8e7mk4G8ibGAtr3dstE0hKC0ymdmaBbpHBfRVpnZH6wIQ2QOaHtAtzVvtr27SDk6vuqvpVkeruxAb2UF6Kjx3H5ZjInKLlU5GSBQ555oMIaJeFYiDDL9PUjce1BesmdkAhNTTCKkZwSN97piWI5fdyKY49uZobQ/A5qFg9fdtFpBNkbiruUb3YVNNxnEnOCRNk6sa3IqiqJkFi2Z5fjvPRzkcsfq6FF0NKDe353feeS+yHB+Px5yFmSRliCQ9oU4QOqSZJvh5AZwkwWyimSm56tX0sJoxkmkBKalBWOvCU5JA9WiY0KiFoBE6ZfJlnPaKg1/z+XXX/JXn2SN0H8A2NH99YtVrSKH95Kvjdpi2nBoD+HnHePET3hWStu5/2Ctv+zt3j9meRgY0lAHttk3f/3hPV6O9J6GnE+x+Cbqv7PRn9d/6nVfevmiAcADJ8fxRwOAeMhzKSecrOZmHkNlLE3Tzyvb4xhEDABl32VTh0MQ6bbJhisa79b7QQDS4iNz2oD2FD8Mp4Z1MZ+H/aQ8OXhnFd97hh0c1Bvgw1wZmgLC3VC4cSfgornEOv1ZjKLWypbSFMO2UV96xnAYgDOkkMsKkaf/VXi51xS/kYJdIG2+zPwji3NAXOq6odWURjsTyPT1pqODogzgkWDA7+8i0PBPk5Bx2B2+mrp3X3VlitEI13aQlgJ6wZ4QzwQfncPy7GeO6CN0irt0jFNQBcGRCYvJqtdY8T8MkV02ZOfBn4L2+wgLjMYgcRE7OCI6Ydxb3cBKCtqXUJ/Fdy1eDVIxWe4a6KIjPbQcfZSr6LBoFU3D35hiZnEd/hpcBv9JB8bJRREEdoMGlJAPYyLhXq+9NRYN/7aQZNn5pBF616vYNdEWOJ3IP/OwKEjjQiJjEo9yIaCkijUJcw4lNwgkKCLGkIJZXi/Q3g0brHrUOqHO2O07zSFUafG+PwkpNRm5qgKm5t8jEPnt7zogeoU0tQ6Gbt6oQcR9q7Ni4DCls4p3kmZhZ1UrRaWq5K4ljhWuLQ4Y2ZTSJM7W4o7AAUYQwR7NgCoUTSwAltaqRF0W1lIJeEcgNTsKuHohy2M7bRDR3N6fwwO97AYg0EfBOXySKFCw2VlTKnFIkJu04p1yIqJRT9Ei1Vuzr+bObaH5HO81yP8+563O5A0Iys9PpRE3viTyrElaQ4hQ8PS2VmXPKmKaRYjQGa117csiUmCWyjAbOZLZw3piZd9VzbDwtHgm98+AVJk4gV1WmBoM15i9FNagEsyCWtEmmrqGtEihF9RIF3MxKsdyzrfag49SCQO7YfloGICK3dY3rONj1vJVV4ZzmefZuJmiAkCISrEVn1VqhBlgphVMmIsqzOVbTF88v/uwsacrT7TwxkYFqFDAQSZklSzoc5+M8zQeZEuUseZInjz90PK4ppSzpcEhOU5qT5FzABjKHrtbYYVYv5/V2vf2Df/+H/+Q/+XtIR6nLJz/9uXk+1upHTlkmyRkSYR1MRMUrc6IorYKm9IedVySPvHN96oYRpJZSTlfz6XQir24VCPds2yDNrJTFXGtdifzFzQcB7JnT6bCeTkUkaXURmaY0z1N/RHMrSZawbkSu12maAocnyXvMsG1CUwbIalVouFlgFeY3N5frB4+WpVwu5er6MTjNpwd/9+/+/d/6p//Fmx/7OAjr+dbB89UJ4GVZAmC4mmuJFNuN0fbwQ//0n/3zj3/qM3/47//ND777F1qc81GmZKWEE6xUU1V25zSllEpdyZo3FUJeIx7Y3H06HubDsag75Ld+5+++8davvf2z97/2p//+g5vb0/VVnuXZ7fnZ+cbIOEV0iLeyHDSc+WHIZgDcU0ibmWppu0nnOI1ezTkvy2WI1L3y12SvaiD2fpC7M1NkXwjRvK7rmAmBA4EWwxy2NjMT5ixpALxorTNDw6ZvoboYV3cxr41DyFuKhSH9O1S7A132mvGWSXBL0BcaAvEdpfne9nwH7DFzf5d+t10Rw0hy4GjZ7cJo6HDJAsCtaml8sOAtw0GcuL+BiKQ8I2fcwJToosR5ytOL5+dvfOMbf/LHX/nWt79zPB4ftePB48cP33jz8YMHDx69+akshdmuTzMdUrm9TDlhPsCUDyeUi6uRnBL0fHs+nh5CnzIzCDkiFW0gmti5bAss2YGE3fFXUKpec7x0zWvB2I412gOC0CNz8HpL+euPl38Rj3i5IDWAnUYSf+JOrhQ0718ogi1uojX2jnusedR3H8aVjEY2A5ra7gCiAA9RLNgNk+xCH7cW7l9m/ycR+SvcqrZ76/2Z++8+Wk/3VsEOGao3c4G7dy8BGfl+2sS7enTJluO0AYC+YH0HeJhGcKztU8xsNvEBG/ZvHm8SD3L35qUkcoLk5C2tRW+WhAHPvMMVbQRRZwcJtzDnhiWbKGjg24k2MdJHUlqNVfRUwDsYQOj8gj3aGcf+22H8Gi977333N3f2UMDj9amD23Ed0KfV7inxoeUjDIdMaVvAtj9G1J93+D3mCoGAlPM9w4C7U59J3jSu1jdEASBpAKxNtG5VKAgAp72DfSyl1sMeDHxvOnbPMxm7DLoW4uTQiIdkdm71ZreEzPFvy4/JPf6ZPZ54Z9LSq8Xabo1wJD3y/gKt2Txu1Eetv+OWtWt3GvBWI/QOBO1u11/h6AYj2t3GCA5EgY0gxFgzbGw+uPjRjv/SXrjHxg+rEJzDTtMKf1Jz8Y1KHMKMYbdpxh0D4EzkYZ8AMDx+gMG4W5PgL7v+WMhh3soQtOsi0wFMGiEigNEmQ+KBvO3IvL8tEPzLrYSP1gpAiFOoE9J8VE37HxArHHsR1hEYJtLmUc9LQ1zNzJmyi8ckIwKTK5G31K2NVBeECA+Ph3GCiCRpLqaYtG5hw4rVrmbmtJm9qdvC1XA+n0P4Dl4BPCrP3l+lY1R8mNb7EwGwQCQ4AxK0AXWIyJMnxczWpRYdAXu6rqsRSilWqjfYWQIL7UR2K7wRDhDtzNvQcqY8ESAipTQ1zqxe1mXoVRGKiaCiOBPCi5zNSszGzukYHHS4u6l1i58qICB3ULDWLUJAqRFFEAXWIkjRI1kOjNwxzTMRtZCzeYaN8tYxFannaKJAL95jskfsk4iUJqmjCi71GC1alhqxZwm0N/g5tegC1VRrrrWlz4qu6EUXlHOaI22ssbstqxGqe+2WjMKUzIwBacV+LGVMU7o+vj3Px2BUXh1P0zQFe3Ce58PhME3hzEmSyODqdpqu//Qr3/jkr//ax3/ns3j6c6t+df1oOpyKOrOHTdOSSOaUOCVR58G4iQFpIfUQh4UZCdiycaTEL26elTq5rVlAsDlPTx49eP785nScE8uUctV1XRfXWq3UWqOAk9m6XsrlsghnM8t5nue5zoc8Scw02tWZGCTJEYO69+qMpeHuOU1mVqtaXQjGDKEEASpailygGl0/ePDZz/3WP/jH//Hp47/hpRSt0/EhuOmDaZ6bkiXMMveU/gSWUpY8T5/67b//8NGb//Zf/7+/++dfOx6Pzz94X4hsVXevbs1ErVWJSlkAFk8iQpyCCenut8uSklASJro+PfkH//gff/izXzq/8/NPfPLjf/AHf/Cd731H1/LWr/3ae0+f/uinP56m6cX5dqnTUQ9qxT0lSpkzEV3q0kEXR2RyJNEF0OcSUU+7QEQ5p0Hp9G7I0F3a97YMu6MshiDEQtDam8BVpeaj0zEKakU0Ukr2kM5OxRzXBKffDFFRiilbp9Pv4b0INQvd8A32qtCbCjj0Le5pn3ubudUNv1/0jHp61QbRu64TfSgikTSrXbwXuczd2N71SImM561E1ZDJ7h5MkIh+oUh3VOCqvHrOJzjXYu+99/73/vKH3/n297/6p9/82te+kfOcchYRhwKWsxxPh+Px+OTRx9948vDqav7Exz/04Q89eng9v/WhJ4cl8XEOvYcP11ifwym7Yzm/ePcn0+GQppkfPAQcpgRWMiaxXmIBI/EIcJdO+dc4Xg8F/6ort6LSm7L41zj2P9zvlXvP3r0fhOjYAa2d1by7B7uesU22SBbgXTF6GRbuj/ZV8FAb6cK52dyJo3Y1YjB2b+Gb3jnCGneIcZuV+6Nj2pf4ohgK392jmehfOm9dD4ywLKDTLNvWdu/uIS7uI9iuWe6fQ+GecXdiCzTZ/rvTpPbmwzJgHrbv2EDbmzhc0JLJoHvyvbkTMcqWtN28N4WJI/XG8BAiFOpoRZRJcG8MXWFY9w4F1dvd3c2bc6+rOpsuF3E6uItvRUS1l3fuu9WAiEMubWgqOpmsYS0iCHOoqNxzEW/d7wCmeR4ovN3KbJ+GbRudmIySgF2ll+Zv3IGc7tKhnizH907Rfv3m5YsY0g1hbHyijtwIgI86imOLAUIO+1iKY9LHDBiigSLhc44K2rSfuaEXNisHw1kRqZpazNcdDge23nOju+fbsVGQEJNsA+3Y4Fnr8t1PXxZubf7s5VvPd/qK43UCsJ/fYpvRSKQaMDj8hIAZIYbTG+j3ZqxpmI1sE802MusQkRPcjT31qqrEkUwNuw3UYwR5zHMI5yiDNF5I1VrtEzAlQzUzyVOsU2aLVPJx43A3w1u2F5Km3dMeCG5TKEBdAsAl6JNDD2le0J42qIGzNsptXYAaU8lSretepejmZwBO7FFjArAgJYpwrRZqg5nZyCfu7K7eqiEmIx2bvym1dAJOIEkiClf1pju31dRwoPko+xTQtA2Q9eUxJl9zlJoWa/l8QqxY40Pa4XDo+slmSmfmnFM0XjXSxJiqkqS4npmzNfyAKFYmU8ukAjMzXZujY1nOZi094NZ17IZTtCSqTj531FLCgZNzPhwOKSXhjNSGMOdIUcN9SFoHdKmKFibUHL4Re1YRu+hQsLolbBtwdzAioomibGjMH6NGlbZIS6jrZSGi4/Eoid2hWqO2ahCp3clNQ9NVNSJkBnEiBsjANE9zaMwNqGpkBwhyCJhj52jUCHcnKvGtECUWazm0KKVkdmqQuzSRDWHceinLaou7OKdQgQEq6msN84k61L3mSeYZ733wHhF1Fl+aJIVd8HQ6HY/z6epwnA/H4+Hhw4enq4OIPP/hzx89evCv/9Uf/BPXX//iF/hmPR4eJZmds0WwFTMzzGqxhZTUNdLhEoSodSxRhDwZgU2DWx+VYoWILpeLWzFbCxNcrZZJ0pyn6+sHTGlZFvOqWpflXHV9+uxZhBeez+dWL1BLKVqKns/n5yAimuZ0OBxyFgBpzpE8RkRiKjLz8XhUb2lFoyeHt1BLVbVSilUVAk8SlPeHD6/OSzldPUzGL87LZz/663//H/1Hp49/yoqy5GnKAMzx4lIAHI65GhK13BHEhEa2p3S8fvf99x5eHd745Kf/80eP/p+1fPsbfwoWrVVVvVYARlDVZXF350ndBSDmCd6MeBG/sdaS1lWmKzkcTtePIfPxIx//ex/98G987u/83/7v/9c/+qP/8OavfeTX6/rVb35NocG0B8zdrNQqpNWZKUpEctQtbCCHa8+h5T27d5ctW/2xvvPp6DraHcxMkMg5HAG05/OFeWJKAc7XdW3u+l75I1jTFAZcbhrDQJubSkRwEhqVe3aUmyaoXd1159XcdBpmjtScsvfdMbOQqr681e7ffahfw9C+B4TAuGHao7tY3QYP4ldjoIWiycwiUQi7wd/ek6aa0uTm2qN6RCRsVfVi77/3/ve//8OvfPWbf/xHf/rDH/xsuVQzZlrnmdJEEcWwiteynm/r07e//pMkj59cffjx7//4/LNvfPAz4pITfewTH330xpPPf+l3cL5dbsr8a59Ic/6LP/njt7//vQ9/9CNq+OLv/z4iqwQlgdlLuMVfgS1+JdLmqy5+GQ1ursI7p/do8G/v2PTa3Z1HzUCMb9GNqru30AbY7gQZ3vMBYhBt97TSu/ff/zlgIXXNpl9G4nc9e32Fbu6goZUC3SrXp+7u2E/6e28at5PYcl8ere1VXnG7rsV21GB7hb632zcUvXtiaMC7pDLYrcReruVuWb+eceROS+JP7pDCtlUcLqYtsLCfJIrkkhTssq0aW4gpx8Ae/SftFmGRCmeZc4OvcAEMfVHvmzckzzg5shN7S4Q+2KGNmo6dxfnOu+/uuRfUMQahXAPEzdlM6Am0uv8W3vJIQ0QgEUK5FYhHsEYBOMi8+XlpY6VG79ZS9v4XCk0qcO82JQn7Scg0Gr11aQ/bDgQYzokN+Xe0Oa7nzm5t1DkiJ3byNj9acg9iSkgyMqDSIIu28SVqsBCdYxxw5p4RI57OvSH3sGK70mG+XU/Uqceb1zFmzm4h7gIMeJzpwLWbN+Lza+xl9JrYwl3DnCBhmQ43UBfhskOJcCRqeTk7wO/dQm3RjETKLWC0XURRrNYAIxjB1V0apoWZtv7cZmmv4BL4yUlVqyk7UhIQs2e4UnShOpG417GrMnMUjGzQPBAA0SB2uHsbZiKCqDbaUlcnGkoa7fHdFk9EaRIzgzV8ZGaqDrNU6kLNYhFioUuiFuNKkcPFTZklJa41CidQAEIicpKhVZmDQnJ1H7eCO+2b3ak6zFqGlfDPE5lDHGpuaj7Un5AL1IkB3I6mVzQ6JZPtkjGYNb4FMy/L0qxN5ERkNrbA8K05EUmihMnMIlI5tOq1FDMLX9m6rqfrQ3j/2iOm4i0A7Nqbwril3zSzy5ric2D9uqzL+VLqcjqdmPl8Pk/TxD0lNJn2SutOEPTYUWYhal0Uah52oLeJAiEiCrsHEUGH0zJGsEklNybwPsLBEZGAHmihvU6pzmxm6o5uQ4oIfyIJUoBp/FQIUDetyuxzVtXSc4K1IkhE5E5Wa7VVVUMfJCJVzaJtIGJvknhZOl8uIpKSMGfJG0Pv0RsPSinVVpBVr6VWdXPKl6VwraWamS3lcllu6GKT64mTViMygQOas6VkAH78zs9TCyXAPKXr6+vr69M8z2+ePvreu6T+9C/+4lv/1f/if/rF3/kHH3xwS3xgycW7ich9LZdFJpqoxWD0Iq9DKQlXUHi5zSzc4+4USXdTmuu6mlW3ej7rzXTz5pO3DoeDcE6Zc84iXPVSa3346JG7395enj17UdeaU+S0rMxcaw17xLKEDcLM7HB1DCZzzIoAhy1az7dtz3qQ4brW8Ht51ICqbl5V9XSdAOI0idGjJw9/60u/+5Ev/mZ4AEFYV1/KejjNp2O2kLjd8tVUwgibd5wVj994UwBbz3L14Ld/7+/fPnv64+9/W9ngjTYcnRShjMzcMYIyeiVNUM65Fi/VlOtSyrLW66pwB/kbn/rsf/nP/5cyT1WXy7Jwojwlg05zmqZpuN3MTCgNplB3+jUXdxmUIahDQZE/HGNR7Nca2kZ+J+dBKDfLsoSdO0akZ+EiABFXo6oxHKUu9xSdoSENWUc7ZmaLydAaUSXcmeSqyqQb2GPeR/PvLfT71s7z7GpDjOyimbYNZgcPt2xJ7u7dh7ltkzu1m6gFlIZ+Q8Ed7apfcDBHuQJqULMl7qpqIpLmA5gvtzfPnj37wXd/8sd//Cd/8sdffftn7z97sUT+YADzlOHZLZlBzZVRy0rsT2Z99uLmjUcf/f3f++Jyef/b3/7g+c3z40m+9Y1/97kvfB7LR370o589eeujOGfw9LWv/jta+XCcfvbOe1/8u78HU8gE4h1IMICtQ4dXG6sBbAlXfll3H/AqNPj64x6w2WlUv6LH8hXxgfSqN7vnMIzBkt0k4btQKm4yJl7saC119kuYcNzfXnO3gX8EtE9XuHvejpEYx1hor/xz107cfd97LWnHK/2YL2PCPfDb8NSrmtoAg6c98KPhbiLq3qPxa3e3XfaNu+/SSXkDdXiU1RzwXsjdyTbg0TGT9Yy1vANLIArFyENbM7PQxiLdBnZ+qrg4FKl9WbpocpjU0F0l3IXD+PnAe3c6thMiWiHvu5dZL5uBga8ovChsIGvundBOe8mMrVejixp3ptQdoyHstER5356dly96tcO53tDYNog6H46opYQBeqDpXtKij+u96gpErUQENVNjgxnU0ps4U8/eGFlMxg0b3vfeNupStBfAGFFz3j2B21NlE9Bga57oscL4btam7fMuueO9NRX7QpMS3cE4rGf8qhV3h0uyG9Zfyb726sNB94xZ40wfxZdlTkhva1lYERoNAW7gnR2Hx68IMgo1jYDcNobE/bkgIu2uVXevbgh7h7OZwogb75fHsg2DSPdgtyqn1hmU6BpRK19s7eZExCPhbWcPMbuZFVOoha6CZoip7qydcxFeOyFWVXP1Tu9yNzdLsfZCRQjUFjpN5OtnBgvMws/gKVI8JSGgaDUzblX+wqwPdyVzN2MIkTALuUaUTuiDtYbXw3LO7t2OQOEvdOsQeVvA/f1LKSKJe9loUzAzJXFBzrkBXAC9nsFQ+PY3YeZlPTOlUkr1ztRScBKRPM9z6uVvwt1HnIPE2NyPZtUU2opxhSsg8lwPNunj9PDm5mZd10gcb6XWtdEgI4HNuq69EheMUGvTCHOao44Cs0QaSSIj4hFlO9TQ7q/gnmOCmPlyc2vNiBjKZVMHA0sIR4SPqlmUVokuIsJhnpjZVINHt2rdm36pmdVyNSvqADi5MDsJxH0rgaAikU9eiCSz1FpVG+ZMCSnnCI2zctuLpYYUFiMjF7i2LNpmYE6ZicQUVDwRccqcSaFSSgURTzLrwdjBzrSUy/l8A9JpmvT5M5TiTtWs1rpoETGralZZMIlIomq+rB88ff5MRL734t3r6+T0/PHj9Jd/+b/9vd/9fVUXzmu1VWstBwAk8AoiT0nAQFesvW2mHlpy05cNwdoNB93a1hGbGcHmec6SjsdjQHQtS11XdqRDS9P/6NGjZVmWpRDRNE2n41XOcySeUXWvRVUjUPZ8vjmfz1HPk4hSSkGLjdmYpjziNjGcS2a1et9cLM5pWda1rhVvvPkWwEX105/6xOe/+JuQhEvRGQSkRHmeDSiAakv0SIACqU3OZhwUoBjOy/rgOEOXT//279588O5Pf/jd+XhdCKt7beR1RB3FSkutRkaIMskttsZVrQfdKcDz8Yg8AQle64sXTz7x6//sv/yffP3rX/3hT77/4PGjy80Lh4KdBcyRGjTN+RCSYZomgN3XCOW9Jxa2P9lZUC91n0+lpYrdmcBHT0bfns/nw+FALZcde88aai1Hl5lZsyitNIprwbeFPBLPhIbU6HkNcBJMWcDSAKH32ObR7MjTs1fC3B2JhXe52oN6BOow08e11HMz7H6+9c+47eiBqMs6oHLkUiamRt0WRi+YFp2QqaXdVy8tb3NKFJxzJ5Axc12Wd95576tf+9q3vvWtb33jOz/8wY9vbpbD/CDJfNGLqeU0M09E2Y3NzI29Zzq0+lxQxMsh2+Xm9vqaCexYfuMTbz5+mF988JNv/fmf/dMPv/XNP/l3N7fr0/d/+vnf+N3HDx/94Ps/AjFU3Raa87IueTr0ufBKaPBLHkMr+hsevAdFuwi6VyClv9bxSkz4quvuwbBNy+2hcXeaJD1HC+38e3voxTu/652vnIaJ6bXvOGbj9ivfCrj9Mq+zb0lv/64Nu5Hz1/QPsbxydCOn58stfPVBhG0lbrJIRIiHr/UOKmtwJT53O3EsrDuouKep2yxZRDtqGZVagOZYQLh7QkaFJoEGUCKTu7sXra0IL4VV+A6RFb2JcX+OzMs7WUTdwhXmyLs/coSh6m5O46FObO/VUTT119ndyt1bedJXiHQgp9wCDs0s9uYdyhq3YuaWzjGoesItYtBbun/JGdR/u7mVGnlt/+hIRbPXY9E9cW1biafcZWTYLt8sUXN4EpFXox18ZWYikS6QqZVLtp5Z1BEVE4ngjbMT3qXoxGaHaK9w17R0B8AO+bPv0may8ZeMKWPPItoWToNLr1rN3WkRvx1Byy0i9xU/eL20Mg90a/2qkDz7Ahe0u6f4HVj4WvvU3YPddd85DpgVM4DDg0V9uxRCZWGAXL2U4uZEJCxmJsQiKVJphncOnfVN3QIbaX8jJ2xwDO5J4A0cRQXLUJmi5kLPR0eglBKCD+WVSFR1WG3DrL1nhqPPTHdP3HlNAW+iF808pdQof4BRYjIClWKU4ajuHp43aCFiYfLIaeteI00MKafJclqfnUUkUdN3YUZMLbqXCAQ1q70KVjolL0t/c/WeNodgDHJVI1prNfPw5pVSXHjVyGwjIEokAGoxq84sTIArOdgdCrWicEJFJvGkrgA4M8FTgntZliVS/E/TxMxuJmmpZTGlw+HEzHU1M2OWnA8A3EkrQvV3R5KJpDw4CU5Hka2AGCK8x6JMYrlcLoFXmXktGljxcvvCGnuqOTEi7c00TW3oU5rnudaaIxU+JdPglQmJMAWL3gJUhwmEiHlys+KoUTsrYlWZmGFTYjO4m5oSSZ4mALb4SNNsZszGBNeqtebIwK+LWqPzmulyPotIFjKrZlHN3HuBRMn5GD1QSjELny23tPQcdi+yambVQXVZiGjKBwbqugbIuc03of4KhdmCkrv75eGUSrlEF13P5FMGMgBLjwZVMoSmqqqJanNKLJeNpQZhWi8vlInonWc6TfbdH/0HwIoVUz2IlBc3CcJ2QkLig/C0rDd7g0WgwmiGwUVkOuRqhQSXdXmUHz969MiNbm4XLZIlHw6P3njy4DBNKcv58szqxVVL9XVJOUtmrjfPHl5f48Si+fmLF5fb2+Mbb5HQPCeHECVKQsJmdV2vSinr7Q1TcveYUerrui611rzOTmh85pbnzdZ1LVbYwY6kzoYEmn0+pcMH75/nD11dFqLjgw9/9jff+uJvKmW9nlU1i6jaBGagPH/64MEVNExp0HWVaWpSslYKAeLldJwcqDZlOfz2f/Sf/9HXvvfVL//7xw+uLlSmWc4vnk+SiFgk1aWKszkWW63RhElIcj6Uolrk+tEbx9OjS/FrZpjXKRW6NvcnH/v0P3zy4T/92l/cPv/vro9vJbv1hUUzKxtpzsnoUmxlIrgye07wnkEqCefkl8tluWTTh1fHw7taRfJyvrCwu4qkqO54c2NzOsBFazkejrV4LUjp4C5mOB5P1b3UZGalgNmI2MBgVve1VnVP06GY3lwu4Fx0QQZnmea5whetp3RUr2oWJQprrZP0wnyxXkFrqQlsVhNxluTq11fHd999n9wZfr55cTqyu0qCsx0wuzuKW9KuL3pKab2caaNmRYxMGLmIiN2tWE1Ek4gwq9acM5hUlanFSCNqSMwa6dM08mtz5pSZUnAuvSircQ9tBQB7DCjBEjvIDZFU1W9vLg8evsGQ73/vx1/506/92Ve/+a1vfeenP/2plkn1utbD7SV2qROAopWX8+GQzCq8ins5rzLPxPQBf+IwX+rh6uIXyZfL87cPTNBcbspcOS3ntw4123vnH/7Zj3/wM3/n6cOPPZjP5899/AS9Rbqi/KAicc4EJNiABw5RiIMZZRArAN0yKLQYmIbhd1qM3YWUYbxvHJnYfLdrnZ3GE+8cd//c/YRe6eC6T/jc3ce7Sdubp6hnfWv321PLgJCid27bHq6hCcFl7zXzXvcSXQXst02jc7DDV+x38OHWV5sns+vQjbp2twxV83eQujayKJq35OX0M3ePVyt8gnQnqKYpd8MH8tK46K5GZyOOxvsGC2/cZyQv3bys7t7CLzuOBQUJbXuIkKjXWiv34p/NDNSdb03X72CSKdPwOoahh2uojRuYFCYOoOkcwgVBsowVSqgwJglXQJjDhCHiZhNlOFpWRXcQqqvBJSdiZ54wJXmml3LDxSdJxCrM7r6qAtjkBjf/5D1b/8Bj3uj+5h6VUrkVFGlKf4uTghngnEgkUyfFEaVhF+j2rJFFbw6EDAKxmLu6ursIB3vWOXycRlSZuD2GnFg6/VKoKeIJwqPN7S16oXl3r1Wjt0WkAcvgQQkEPeP3NOm6hrlz84J2yz46VozZ5wBPEqNvzuZsZdQTrg6YaqRiYiERA8Nd4QwRkDixgw3JDcIHH4UHu3uQQO6V7iDksegI4E4pveM5FOBlo4iNFo8r279070w/++rzv+rBo+ZkZHKBErK7Eg/WKMz3VmAFpMt57zVyWqAdAAU7bazRnl2JG5kx8jQBoMxiza4Di6liUKIcHkj11QmSJXDEdDXDHaquZnE9EzvQo1fIQexiDCJk9rUQJ7hVqzBnTghPGKWo3sduzImYDOquaT5C1dREkrMX01YlwiOkjiWK5FnYDyznTDCFWtXAXyCmLIkg3bPU7OJmphpJRJp7arMbG626tp7t6YMaNQlCTO4kEuHHUdn5FVL4DumiexcbGVTEa6/EZUZk43ySluoAQK06jOWU7pdI3o19/0AYJf26ON/M57EQQ1Mc0mrzCfRsSGZG2EriREuYg8JPEWvEzOY+yFpmGuz06FUGicjhIOEdjWtSngHUWpdlWZal1jXSb67r2mCGGWiU7UrN10Q05UNKoYuX8DXH/iGcQxEMKDS0wPFGYb6nXY4H941DsK6r73JIoNGLtcVcdbuX70wXnR3WzzibrVGfY/RnSi3PdinnbToBg9oR6um+SfG1NNL/dsT1MbtG6ovRwnk+mEUxBRsN1h66EFhxfGsEFzX3sq5rWYgoss5EtVBv1cOnlNKUZ2Juka79QLOgt3aOCT9NU2TiuVwu58s5pZTSg9P11dXx+Ojh9WE+JKF1XRtpQUDuVd2sEFFZzgautU7TJJLO5xe3t7eHwymq2bIIqUetrchReeDUeIPua69K3wJNJYWtodbqgDDmKdml1lrWUrMhgUGcOSdJv/Vbv6WSmPPhjTd+50u/CS1OzpKziLnNwre3L46Hw4Prq5YHr6wAyTTB3YtRAqUED19RZNlFXTUfBCn/k3/8j3/wna9rOZ8OpyzwqgK36rc3F5A23MMyTZnCxuwopczzETJN07RelnVdoapqynmSRDAzzcfjP/r9f/gv/8W/+MkPv3fIjXybcybZ2EfD4NXz4jaaZWeQuvfUl/GvUlORiWJ1t4tHtp4+CaNIjDpT9wZHFfshTrGfKt7FQizk2MCiTE6zB10uiOpYjuCrx3pfF++0CB9vNHhWe0HXZ+MdWk5/1rZs4wS6zyH4/EYmLkOoxgraJXbfrUeegc4QhBD17MpldXcSJpJwmLOru5f1qYhwIhBHyjF1t+IPHj35/ne//+WvfONrf/bNv/zuj9999+cvnt+WosUv7rHOeqJ5CHMrDHVHFevm0jaI0lUlN9UyxtrdsZbL5VKrYePlbokiejSLwb1nk/EBn355Z9p/P8d+BPen+1j/Va0N1todfexX92feYVLe45ruj1fY3e2v1vnar5pT5e5320uOud2Vzt4vrzP2v/q8gIfbCduS+QVeypch93j4zt0RtmzosBq+thlbTvy40f2kemP1BSbcL3DcXf77M/fEwva+I7tbuw8xEycCtjqErSXx3IC9Nrw4lDh52GjMW5E75pwznFLiELPWV+vYhXPOr+zTtoRfanyttTGJRsf1w4NcF7HK7butTiO3AgBtgeuuRhoQeek5JNVoALoey8xjPpu3EpXR+ykl3GVhxErqxag56m2EKksi1IHpWGpNFu1Swe0Ht7279wqQPY0qGjpmyaIt8eQedbVjWASIU3cHxisLgYhbxiz3nYNw59O+A8rupCfdH79IULw8D3/x8UpogI06/jc6+orz7rH8xQ7A7eEvOQ9HxCN1AoEijFwts9Gdt/ZQf1t+qG1YiclVRzTpfoWSEtBqCboZGOKN2BwVMpkji6HRXaZASwc8ju6B33Y9DUfUXYNUDzJ033s1aICyxJzc1UzDeGsOtabcREY6dPAdP9aqFAmMW+FOcvdwVYkIs3Ar3hZO7C3sjTra5OaE3PT1XeowIuahb7vXofGLRKGbwGO70d2wTBgwORpmPbAw3lMoKKDM2JIi7O/QSQtpgK64w2BnlVKYnKh18QBIERZMnWNWa0nEe/WxK6ANP7RK111WXi6XeZ5zzsx8OLSi1cEsDTiXMrt7hKIR0eVyae4vRc6Onqwi7iBCvWq8RT/kPAf+HP5GgOJW0fdmNoqnufs8z9HgMWUHnWMAS+pUOnev2tBgvKlHSYpam+tyO3rYZJODA42PedkBIVq2D7iY2RjcDjuNiLLI0LC5M/po/7Cd+Q0YiZn6+/ajuvEBANZ1vSyiqhGMyz11pLtPU1M0R594Tz4J3EmZHe8VMzF2l8CT1VHdcprn4xGc1qqlWE4sckgMuFpZSymLFlcTjtKFcjjkq7VeLisTHQ6HdV2d3Jov2qOsXxKRlM1MiPOUj9NcSnGmaZpqrUFsvlwul8tlgOfn8+nm+YvFbnLiTCJEmad5PjDjg+fPrp68ecj5U5/8dagmAcDwKoCX9TQlkEMNqkiZhBFBLA5KGcHfW1dLBGctdjzkdJzgQPXPf+l3P/uZz//xH/7bSXRKRJB5nqvU5dnz+ZDUDKDEOefMnGDVavPSmzNgtdYpC4REZnMDXGut6+V4dfzCF77w5pMnP/nh91SVaUup2qwY8Dm38hsjXHZIwN04bhOMOI1ot9jsx/gOCq572FlCBqZhMbmnhI37D4gYiJH6lj8WkaoOanqtGmOHXtQETZnYINDgU/VJjcENbuJORCSWmIwltlvRW5a/YKImauaPvmpkWRZO0bwttam7c01oHi9CMwBVI8ynOZgwalq1Wd2YmQ8KD4ngxIlIbi/Lze36p1/5V3/29W/92Vf//L13ny0r1kXdaZpORc8EcXI3LWtY2VSYmTkKSJubuym8lQAoa8XqOgGQFOGKIQB1dFQp5Xxeaq3hVYjO6RXGw2m299p5dzthBwvHEYq+/y3wQn+VkMK/nWOXOJRbwcXxOq88Xs+q6pWb9xd3WHXPB0iv+OH227+SwXWvlzbP2t2zAx/u3bP3tMBfpBf+khrtsHf0x21Q8BVQ/G4cWdc6XoUJ0XykPd6vZzTZWXPQMeEvaBwhXLg6rKt3vgkqaQvPsa40E4u49bCr3f0DwMDD8RKNjCCy0DLIIwugECeB6VD2hr9uLz9HG9DlGBF5D9THTtkFAGHedtVdZk6WADtt/YaOt6NfAnt/6Sbr4lsGfLOeAEDkH2myghGTx4ZmOgwNHgTNTtPb653UMtAQU0sh3f7b+nG8XegIJFHCkeBw2414y9G4TSWziC3MIBcmmEVszRhWouTi8d49J+o2eAHYfWcxQRvFlz7fYTFEM+g+0wGvlVqvm5Sbmv3LXf/KUj+/3EFoeWX03knsplA/Yg1Sl2J72gXRzq60l40WMq7dM/yPW4H71nx3J7eRHqknYh3hf6P84VAJGhjr68KBqGOGcOSnBDLreQ3C9IC71hag1WbnppFy2yntjv3NO1EZvfxVzMidWu6pZzoNCzS5gyDCke2DI3JxOEOIaJYZXbS5j4hlRDiXgEAM8mBhM9iwxJYwMs9FG7Vt2+ht8kh7kbd4mC0UhyDIPP4M7aeF97hHEQU3oOVSbwkTCK1aCwCwuTHQ+Pe6OXmIo9Bi1GOAg8kJkUlZOECCuHuthWA5Z8Br1UALzCwShIHGeq21gth7lsIhjHxLPezeXRaqKmkKrXFdLwCmaYruvbq66pldBcDxeAzN9XQ6LctS1ub6ixDEeEdVLaWqRYqXIG3yPEebEYAwELvtSjUyM7XcHs2iYLsgqCGpufMi7m2c/astBQV1QOW+39S168F3DGNjLUU37p/ormrerGK9D7s+J9R1+tFaIU6SVLfKGWPSBjmm0Wcgoyywui8vFkmcp9PhOEd5usiRk3Ne11XVIzZPUnCWNd6YiIdZM56g6qFgh3IfJ3POZc2l2vmyno9lKipSgZxSvl2WOSeeM0uGC0GEsouznVUNrgQGy5QPWv18Ph+Px+iu4goYgYQ4sXiFVddSYDRN6TgfALj5PB1iFCaSmZO7R12KBw/p2eF4uT0dUk4sQRA6zCd1z1nmLF4uEEJiqK4f/FwrjldXXmtZV9WSc/75z5+++eab59vl6uqqOtJ0QKLLs5v5eOB5nlqggcCA4rBaLzfp4fEf/oN/9Bdf/6rpJZE5Uy3mTiI5TxOrQt2JqzlpgZpb7VyAVdSYbU4ZDtQlH2YYZJoyExxTypfbMzvWdXVr9ulmsomYihDlL9Vwj8+d7N1mUUopKjegE+7RVEANgataoy6rCA2jz1hHPRibxhKImTmeTkTh3AtP5rhDnAljkKvGlCulXC4XwjTug52NxnemDdUNrNouYWm/3n2XI8d7hHqsoyiZuDfbtV0KrYYycx71i4hIa6BlNwY13oXDsV5uWzKbqD7AJK2uV/FS3S3nxJx/9vZ7f/gHX/6jP/mzH/7wnZ+//+LFbRGeE/Oiy1rMtNDUsuGpkzNFev/BJTN4k9IwbeO4wEv0m5m5KVtkETNomK4SnFUtVuh4UVBL7n9fQcQdnXg791od5lczpf/NLt7/6h7u2rwsAHY+q8ZTJVDfGNtP+nW+C+N57YOI6FVZOuneb+Oytv3eAXXjmpEmb0DKoY3t1TJuussvpx++pv0vI8N9b/8ip8H9KXHn6Cy1/k/o3i//nIhG740VFJjwFUUtWkVvBfpKbDpZ8FebOAJtP42VBqJ770IQgInuZB9oG64N7xFpd0G0txh+3wa5QERGHjGEvunDBECmDDWCoee1N7MSo0pNK8BO18QOyO63Zr/rMEF7KO7Yc0flpIDd3sAoRqA1tNV56y6K3RFZFhyjTkkTKqEWknROcqv5LRLUuOZRaPiXqhu1rJMBLFs0YK0GN8aAUvGCRtF+90FjbUhn58mJLohEIjQcm9jZ+QBvFo4d5CBEqpLGBY+VPRjT0QwjJ5gbSHoNjZgSYxAaHXbnnG1QkIjg7aZxpj/4ryOpfuE6+ts89ksMiOLeHAunO/eilMNIN9owIcAEuwsLo9GykyqGnl2m/xwBPmMaAcPm26bTzqcSoJyBVuE+SgLGjpaHc0WtG1hDJYCrEhwicIQDkGU4bzebARERp9d5XAdoHNr10Pybut5d0NGe5NorKwKRcYGZJCV3ZRD19rfbu46F6h4Pi0hQj8wKRFxrdSPVRtBCS5m7S25Brb/2yke8mJlZt4iPFrsR0WbOJ6JgYDfPW6tp4eQ+yhKaWUqJ2kA2a/GQPmbmLWS6rSgR0VZhvJEzQ03sCMdbcb8GhziusTsZAlsYt4hEEcne+wP+FW6ypQmZfj3tyZtjXKuuA6HtF1UUrrBD68BoWFcQayllLZd1XWsNXGTn8zlau64rQOtaqGXoae47pjS0WNrRRMcFMRwRojY8LcO5Ea4qEXb36LTIKRo+jf5Gmz+2+9CIRl2U1slRPqWVQQsErapFt8D0tnN4U3D3+xxtgdx3NlrvzA1uXOjNTdQawWCRlNIsAI7o+U2ZuZMMJURk1ZbItbdzLHg280aT6/YeN1pLef7s5ucf/LzWSuxvv/3uPOckkrMcp/nRw+vDYb46Hg9TTikJsaQkSTKgtVkoTlfHab66ubk535bj8SqQRfjlXLwFeaoMwA/geDwKcdWVwihlngkytdqYzDTlE2m9nqbD4ZAlreViipzzWjUdJvViutz+5EenT30KL17cfvD029/+7ve+973lfPnud7/7yU9+8n/9X//X/+f/0//x7Xfe+/jHP3E8XefD8TCfPv3Zz37qM5/7l//tv/nYxz529eD01lsfPjx42GqV5ZzoBNWPfuQjV8fT+WYVkaWUy2XJOc+HK+cKT25aa61FYzMmmIiUy6KNTeHny80Dra6m57Ku6+nqCBZYyZLapO1rfKCyeOXOj2/alZl5r5QAJjVTMxImYdPaUk+DAJjCNOxxTWdtWIuaz40FqsV6nbHGePdN3dnN7W7eJgrv35g/w/LSf+7OW9HUuI+qltJIqtSCVSrtZILvEOOgSG0uE7i7B+Dslpq2McTNx1qImU9EgEcQdV/IvNEEcATCjW/m1oK6YF6Ns1DKgU1dUWo1s5zA+ZTIfvbTd//8m9/5yp9+7at/+uff+8GPj4dHazEYL6XWYrUqQO4WeYMj5F1EKKWWHL/JJ3fyro15VKsjBmAOhWqtVdwIPJjh8RamqEW17jTUIR/i9t0yib/pwa/5fPf4W3APvowJ70C13afdZ6LutXv5bvcv7kcEAeq9KwE0ozmAvSuAxuzlXUte/nnYyV8HC/c/ecXxCxHs7v53Tr7sMIwt/f5YeAO0rzruZ23ttentDjWxhxfuriZ08kv8+Ro35iht0BT84Inut5utaQ0T7N86wsQAMMGlPe1OXToPKMK9+AHQqkoQkXWsGLcl7rkMm2fAA6gbgYlNHAaCO0HhBifbkgL25drcIK11dymvzLz5x3rvRUGnvQzsKkNT7Ruua/pkj14JQeR07ynEzrInGCuG2sAtXhDSI/fC2Ri7fNPfwq9BXMrIMRrd2yAcdv/xLuGNt2m0qc9NE2lAsAWvxR3C4xg3CQi4MUPJHeqKVkm6d0jnhLb0KdRvRQQjJ4fzfUTXupC3v3ZHY4q254aICD/hfnX894TufpVjk3vhAPbGwWYgajkoOlGWwnjZ0iXaHhPGGbTCFWh2nBiO0NhBABmMt8jnZrpy7KrldQNZD9U2bz6tcdh+yQ9tgZLAQz8xd0dVVRVqvovY1k2VhRAWTNpWDXd6XefC7SxBr/HiTNMUWTDJsWcqRWF66sDJ3X1QOgOKOTRKRTfNgNZIFxkz3z3S/ARUQ1MqfCj3LCmjW+V7Y917lBp14/TonQGlRCTyD8StmvOt+Vh3JkkLuYOuLSHsAV0j2vLstYA6kugo6bRDEWFO3ou/D0hGTSoFsNmoaPFvsPI6/tYuRjxN4qrD8e2dTrkJggHNiZg5qKFD92JmkGWW+MXAnEQIR2VZ1TumH1ovEakWonw8Hs2jmt86mhcoJaWsvVg2uuII53v7btRjHPffu1aGlMfO8BPesEGjdSNma4pbE453PKUxeYbSMJRmszrO9Gc5dsb71gwz3U0nIhJiYhQyMyul7Cjou9xlTiPNDIDBbSbC8XgkdmpV04gohU1hXVciaYlqLAwEgKWWBXNTrVpjWiUDZ4JMeVLV58+fn89nyl5MrdpSXshzcqgQTykfT/Oc8jzneZqmaZpTnqaUUnp8nQh8PB6TyOk0XR8frYVuz0/TdGyMPLNYj5H7t5zXR48eTdNU9byuNec6nU4ppbVcyNxhItJSDFc1Cj9fZUZKLIkzJ60OgVXlnMu6Qi//6r/7bz7yrY9dLuvV1dWbV1dPvvgpZv7Mx990J2D90KNjsgdP3/3pD1/cHq8e3pwvruVzn/nsv/xv/l9M6SMf/VAp9UNv/doXPvfF3/md33HVfEg45dNHfu3jH/3IV7/6U4oyPkSlKFCTNJ3JtLm8hCmT6Fpu9UWeT+tyZvAH7//8w592mqfEKeUMAqxCDdP06PpBCLXAaTFhGuHYWgHVGK8gUQyb2Zh+kQXUDDlncyYInAdDwTeWJiQRmahq1bUVVLetaISZqW5Gh6EM7Y0Xg2q+X0p7OWBkIaljohKCXu5blgt321JDj9sA3UvZapCQddXuTshKCF3q4YJhTYgfjrVIna/Vz5A7KZwdx5Td1QxKcA8ihQKYTzOIUOpaFc45TzEKt7f2/vvvf+tb3/7Kl7/6ta998+2fvV8Uc37j5nZ1o+rVzEE0HQMeF0ICYHDq+pMBUEuZOQlgDgubWnAZDlMGXYidqBn0YO6qtZZW+BESJrx1rd63Zu9qWvuzZ155WePxPTdshJRsYumXwHW+V8v+phrV3a393t1eR5UMNaVrRRhgoynad8HVK7HT626+uf6I0BSRvQb5Ui14onGHfS3pcCv73affV2fbq2z/bsduk3q5wfdueO/kr3i8Asnr7ubhhWj9PP7dBxYS7qHKu+0kod1LEAC4MxtaoA7uTICXUXH3xO3CfBowcOv7KQEMArM5uWvLixmAKzChkDRST+RLai/U/AMA1KuZpUAQkamOwSytjvaWv8qJiGQrSLi1KvhvtLXTW7K5TQ1Az7xIRL0i38CKITzNezywt7A+AoEb0QOlmvd7GrzhpKjX2uRjp/U2zXv4EQPjSbRTEvURcneHwRzMgwlHHZJh6JH3BiYcISltodoD6hrctY6dwrnDO6KggJGFLxhG6GlO2jbX5KR3SAxB+BzBTBlRLRriGEX/WiO7GpOwJ5Q2VyN3x9e4fMy4v7klq3X2K88z/2r3Dw3zzoIadj1ih4EEFDVUaXupbsny9nnDyXTX2ERk/fW7XRjG7dE0ShY5tpncdGbmSOEbxs02amEiaNGnDMDKur31TidBm59tu99DJMTs7JTymKjETGYjvsPdCc5MLGkgL+w058bM6ncY22Ii8tAygUhdGgCmmmmUzDCr3vIyiTsbacvg5OFkbON4W18IZxFp8IlATKOIRSesb+NYSo12cFTwHuoR8m7Hsj3moe4pGm/xulkVo7KLSd6y48BkPNa7CAiu4HCO9Zfd4uLcXTj33ve9jT+gy6BvOTs1B1cLInJ35kRE6AVDVLXXG+RpmjbY5nVdWtb7KGLmrXiID8RltoFS2gG20BFHtJuImGkou0DLXREK7kinYZ1pNk4CuLm5GQLdevE6Zj6dTrRLXLZbMLTBM7QEdL2pMRHbCPY53TYMbyebXF4W7fN1syYAwC5LBwPea9028z/Ie1HH/vPhitxjex5v2pX7NuOqF13V3Yl9gIpxB3dlTmNEHcota9yeBNVNNdjCw8byCw+tqjJRKOvEXMwvH7wYW11mSbklTzqIisijR4+E09XV1YMHD549e7ZcLvPVw5xzzlHUVNydkQGczzeXWp3ZmUGkhmWtBFvXSrBJEgCnVhbFoediAFLiy4Js2cmUHIY0CwsDQtCv/PG/+9pXpvP5/LFf+9iz2w/efPPNnPNvfPLTU87f+fJ/+MKnP375+Ecev/Hm1fXjB48e355LPl7Nbzz4H/1n/+mq9b13fvK9v/zBl//yu8/ee+fRafrmN7/54Or4uc9+8iOf+eSHPvTmPKVaFiLinM/ncynL1fRAVbWawUlYQtC55ZyXZZmOUFVfy3e+/Rfz1cPT1YO3PvobRIScWt7kdSWiy+UyT8l33t3G1IR798j5lg+mOfPH0k4pIk5rUxCYmdkMqjHdvPvWmvWn1KXW2d0Bq7WtTWpUiJ40rB+D4RzPGnrSmDbWy0h0i1U7iQYIqYvlDccOhBmPiDdDN5w1g19bRMR8R2KE1hF/1qpR8WIIWNUaWfLGEk4pGTU1yMyWUiIvonsFPHLEgKwsbopSFC7zfPSant/cfPDBB1//xrtf/dOvffnLX37//Wc5TSJvllqePj2nlI0McE4EVvNS6nIpy+n4Ueq+Su/4uVeEdmL0PB0MdicjhmvVUthBQiKktZS1DrxHRGZYq9Zqo1K0RQJ6DhwTfsKx66NrSO67iJZ72TJfdfzSYO9voFTd9eBtmOpVoG73o44GxwXWPPgvteoVQPcVjrWXvo3e3tctBGChhwHYSEZbga/x8/hTX/IWDm/I/lWo/9+eobcD7ffGZ4tUfPl1BuB8Cbm9fpS9GQP2nj9pmxwNbu2oin3HxEA9dc02AV7yNw5CWjRmmEW8G4Remjp9DjS+Uzcb9YplO8S+AQnf+zOFYffDjEFwsBGEmnKJoewREVBMGXAmNvKobMckIiwyZKy7Aw5iplS19l2SFEagREKcnEc92K2niIjTRlBC93V0lBSezOhIop45w/sYdl9AWOCb/xDoBoswrgPqHqwwBocoIGag1xik7oJxb8UqGnrzXnswtvwMoOfeCZdSW1nhWol38B7oCDCo1ZR0D6zaZDjA+2+jAcIeWqU00/am5cTrGJzMISwko1OAcJexNiZk2IO4/4e4DwDssvju/Id3rVf/f4h2HiP7t3NsmZkIGIroqB/IfVKTI5K6NVhIkG4R2+QSYYsh3DFtmt3H4drBZX+X4JHKXhC17d/YSIOW6e5G1it/9t2fQD5I0U4AhMXh7EwMclQlIkrCjRvXhAztvCnsIBHcWy/eE/buLSxdmOiwODc/SzN8p5R4WUqthYgiKL9WrbUEO5aIWgAJNQveuK97HeAkLHwxr+Hc/RgU9xxW+bbCu0q0nexl5wHMMg89abQSLb9CM9WjraX+5tg+7P/s+U8DljBM0Xlfw6I/HEcRYUUdYoWIqbUmmcwscqhu4FBQ69qBXPO3xg/LpaTI0s41krXE45ZlEeKBzQCI5JQS2NXULLJZICRpznlZLnvlb6iz4dn3nbozpoWqmlfvMXR9uupOEeSIJWVmphQ3HPgQzSJVB3YacZI559vbW76bVCYePR/yAMZjaFS9ycSePKY9wrnUdTyLmSXd4fiNqbI5QJBqra7Kd5knpRSrWkcBiTue0vtTgnrs1vDPBNCtbk3P60zWoQdP09RBMjOzSGfNEW+vsxuCiDVNKY2wtGmapml6fHpzPV9ubm6WZWk9oL6UkmLOdKc3Lxwl3dQu5P7wpoT35urqajlfUkpv//zZg9PVo8cPrq6OOYlIi0N79OZbqnrRyiw5pdX19sWLsi4StUAyzcKmeqm1rkso9RB2p1IWg4IZDCMwoZ7Pqn59uspic8b52fn9d3+ktvzk5ufTNP34e99W1TwdHj1+I8+nD3/ko5//4m/dPNNf+9Jvwxk37/8n/+x/iOrQM9L0zre+85Of/Ozxg/nrX/2jt3/yk0ePrz79qY8/ffbe7c2zaUpWFcwpyTQd11JqraV2Ki/gVVctVw8fLctymg/GaTH/+te//pff/9HTm8tHfv2zbzx+8oUvfO7jH/vI9fXp8vxFFOExM9ollYk5LiJrLdStGwMudoDXeMboqExVWQ5RQBVYvbEnNuTmu4SlzByxCkOOhYdwKEMxNza7bwOfyXfmEjNb1zViRGN+ppRPp5P3OiJuiGWi6i2T5xaje+cYhsD2uQUvyTi928AcLUeOhZIaMBKhwt97BLco83j0upy70Ry9JBu5M5NwihWbteInP/rZl7/8p1/7s298+esf3NzcLEslenB78XVdmeV09aHL8mIti1pxUqWF2CTz9fFYLoV6gRnvedISb2GTkTOYBe5sUbKmlHUV85pZcmJdrEk52ZFhqrtFyvTNBDs2ivi77ccbz8v9FYFe2BmDfjGZ6m9fi3r9ccc+dffoRjFIvFeDu+Es+aU0s816/YqD7v7fnUy2e7WyqyP3O2roW/3+I/Lc99+//Njth3c1nrsezj2efAU4RAup21iVLa3867rFoIDfZ5mSdb9Kf/neXWnTRPeQZ7zs/fuglWrcOeOae8G9Zb5v17a0LhsBMG7egskMPdFazzRiMIcLsWlLfh68GICc9F59yci+YGYkZG6uBjN0qnnLKcXERD1k705SqyGO1Iw7XWJowIM8uVdht4NbTds7t2qvSDLNRlv3ujsxR+5OdCmnw+Hv8F6GvYd7deckiEJNhXuUMw7CIdNmjaBm3h5T2KjRD4koinm02RrvjnaB7wyCm0EkuslM/n/s/VmvbcmRJoh9Zua+1t7nnDtEkEkymczMIqq6kKUqNCQ9CBAgQS1VoSCgngTpoQH9Cf0C/RK9SoIklKARBWhovahbjaqu7iIzOQaTSSaHYDCGO52zh+VuZnowd19rn3NuRJDJzGR10isreO7ea6/Blw/2mX322VZRgohEQqqee+JoGz/uBMRa2BB7FKVQ457e1WK7zAxu9GEjNCzKcHKQg9ycJbU0whb368q6vtpUj7BJL8bnb3JB+xzLzudsIyeZ+zRsIyzy/dq65x604sbqatTjLQKMZoCgveS2knS3oMe0iHi7unGsPbwySImoq7gFs5Qig4xbBK+FhWOYUpcdiWHh7kBzPYPYlwVR2a0bLYjyLRJgL8ixXSTFTXoJTRrjLQaSovkivEXboyuWpYiISKbmR4+QuSSDciIyuJtBS1F3m3YZyGpFhMzo7u5WVff7/Zw5cw7hk2VZKipz1OAqu91eVWtVEWHhCIYxS067w+EQjpCylJyzpLQsS87TiveIrCeNhEUlIiIJoForEYd1ztx0ZQKNaC8rDxh4TTPrNRUQlcSmaQJhKafIPNTaUobjGHhIVbpwChkF6bDHFG6ryqhvIk7oKj3jPQ37z+Fmdj6fmTnJFIG+ZTlRlxtCTzcK9KLVBpQK3KJal6UVY9xgaY/SFEmuY/EZNmUYspIoxPCxWZS9u9iHYRqWsVaHXIiFmtkgQA5cZ12vxcyePXsW+DnkEKPVWk/ng7QKmDxOON5FAMKUEkFCW79VROgme6ReoIdrqC+Lw5g+HQ7rXmIWbz+lfD4Wd+3rNnf71QLTjtcRj0DWCGbjuu254Dk1QOtoW9ropfhJ5GslmbagAhuXUtxnSLm6rVD2nXfeAVCTT9N0dXXVyj+411rLuQZ0tFJj8Ki5wgAs6on541cnZhbxNwdNKTEv5by8jxfzz9LV9U6EVMuU8243ffFLX4wbTilNWXZ5muecd1fHw22p7JLY2WDVsYBZOCUzs2qFTapGHpgD2O12ziDz4/EuUSrL4WqXSjnWemCRChURIT8dX51Ptyz5/Z/95be//c2rm+dX/8n/4+add776B3/8la98Jc/7q5lu3v3C7/3DP/69f/h1nMu/+B//D//T//T/+/HHH37/e98q9ZQyyMtSzu6Uc1ajU3N8MEAwd/Ip57SbQMbky3La3Tw/nnSa0te//vU3h+MvPnlTl/LVr37l/fff/9Y3/qsv/94XPvnkk3meT8e7lHie57YyMEUJLLqMDY6B3URi6upyiklNVgKkhejLPM8D711dXZ2XYynnOPOyLN1gaJAyFituwi2sqsfjMcjDY/WIdUNElmV58uSaqNeS0Rqs+FhOVVW6r11Eal1qLbF5nE4nf+pENE3T7WuttT652avqbrcTEdQuHyWNK46eWDD8OLxmaK+y1EttlVFjBnGkDQvnzoiOT3bX+XQ6TdMUPsRpnm/fvCKS46ne3DzJN09++J0//+633/vTb377Rz/6y19+8NGRvlyrJ5nv7g5Jpnm6uru72+2gxilNQqJ0FkqOszMWtSTUotnafD21qrru93tqeaCNpsFwBrla4lYobCmn29vbTJxzo4mGCyzvdgBK0Sm3ojJXVzfYX8Gzm2a+WnA2N45UI2J0G7oHeYYN0doDa+YRNNgRS1jaG2j0V3O33wtRrg7pEdUcjqr2SbC/aI2ctICGXN71Z9hnkbJ5+RltDcr7J7qMfa1Ous2DuGtDp9Du6BwVwFb5sYen9wefYACi1i4En7BRRB+fh+Hf9sp7vfp2a5VGgGU9zDf6E7S5XPx7fOWbzx8gw37bCgWaiMg4SfvH5bEtTvUgYBuXY07jk/GARFTVARYW9Ep9cRI3IwaBU0rhKwLAIoYWoIBJUOCqGxTTPJsqiKKQWjhUSymLlm4ItS6KFThWM0Uz85pzuVaJatLu3Ggaq2ByxBvDWPfuyHNDRCZCPjSqkoHIVAd3dOjXEBFIHc6SmDMHy98teZTVQVRnA7qizIZd3/h7gZmZo85eILSLQdL7ubvA23S/BwjJPUzslHKEHHt2JsGJOPF2bFyQhoUAsAYTnoa8lhsRCQukw0iHN04dMxhITsIgh5jQBg3yem6C9/nbEcRvGAr+5oDf52geTpIeo1pdMI0p7f39BHhvP2rEhJW5s/VFhUPYAQ+hnU283YMBCaDHIQGrXpmYkRxVrYawWad3qntLRG3Y0rt7rsXzDC2ZDpwzzGFGSZKFnr967Hvdoze2dSayKPUJDK6yWam1Mo11AL6pZRolzZdlwWXOSwpwklKUGeXz+Xw+n6IOW8SjmHmed+4+TZlJkiSCqGr4XJlDuHIlSrm7e40MHNXClAb+GfAJG74WBuk2HgOhLlhtfTALKZQBIMcqY43S2U0Fc4e5mcOnaapNEI/cndq8YZE1ewc96LTO3t7iblNKUVSte9/XdLvItdsQIxsw48zSDS/z6tW7/VfRHPOXjvzwn7VXAniIenHV6mbulZndai2k2lJXqQW++q3yOnfHENn20mitPxUDXQ8oONbwsWXGz2PtNrP9fu/ukd3k3edn1vJFtfqItUbcOOec0jSAXIzLMLLHTbo7OklahPrLakHI/rkMoDjwmHdVoa2Z6+5wP3fVxGEYDVt8rPXaN7yI+jaMRz6ubj2mOkZgsRAg1TntxgiJXYBoLQqHViUW3EtugKJYreScp2liToEJreoWb0enmVldUmzg6l6qoRqfK1FEv+o56akaMxwmcs6H/JNf/jKllCeZUk4p7ebperef55xTYrf9vLt5crWbZoBo2iVJKEcIO5mLmKNoiSKN07xTdzWFmrqG90JEzEI0V1sqrCP8Taql3JXbuzfVSEH/9t/+63nezfvdV9599oUvfGG/f7Kbr6+vn7x68fLmes75nb/4i4+FShIhYlG2qnC1CoJFtb+m26SuKMKyLMvz588//uTlO3kWmf/xP/7H/9E/++e4frYc7IMPPvjqV758PNx+89/928P5JCn1so1tzGtVd6q1Ul8A8aBRD2WM191+q+FosHhNzNDczZcO1eLn8boiy9Tdaw1fVVviwhU1TRMR5V76wi912LHZL0+nU0yuRKuXAcBSOtf90vs+FqXm8iBqGkjusZ5LIuqUUepekr6cLtyyDbNqjdkvK/8QsQJDeJ7nJLnWqtU4S5JcUaddSknMy+l0Op/PSeb9kyf1VH78o5//4L3/7F//6//yxz/66ccfvcp53u1vXh8PNzdXqjYrSjka8vWT6XB80xcxgic3UxdnJ5JFzwxKAgRHQxcB5WkiarKJ7goHbd1PEGYWYg/zkEhFz6dzbPmqiiiJlGS8BTODKZDMjDgoMA/HSDR+yz//6v7yv+oZHkTbuFs3iCAFgHsZbluD5i0g8G2Bubd10KNPYY/g3lZWYb1Bp2GMdtOt/ScsUyeQXsIgf+SpH2/eLjXyFSk+WWVFNmj/V2kryBwPtn78aQzV7eeXv71ovP2aN0//qRHTT7nEah2Mz43AHuqTdJnn2VAQ+FIPxjp+7sl5tZqbNi57RNnAgEW9vu2Su654wrwhPbXVLK61VYXo1C/0P5qPteVEtJVw7M41XJoUNjvRRkTU4et5AG/FAomI8n4XTomWE9gXWHXrLP0eP08CIo5CE+2eadyn2eMkNTMLGm2PECIS3bubiAJ8oouFtrjoo/Ap+pacyLv/0ojImYzaOwscDIBciJpufnDtPapHUAAJfjDg7+9Ef31Rwb+edskIAC7po9hGAoERah8EUevR/m5/wtGlRNv5ukOnozcjivdIThbqQH1RErVlkNDHuIo0+IH5eGDRnhtCRCwyYsbdZUfgRGaA0bCEO9eUtjm6IFd1V4oRvUE02xG1xQLD/sHGoiCidDgvOWci5DRN08QpSc7MbK5GzIycc5qumDml4GUJALcKBpGnLESUJjufzyLSytMAafBCFwu2nrvnnFW1auVNFChuOf6XmWGrEU89la4XCl+d8bTm5Iy+DcERmJm21LsunQKoNnEXmXKAx2EERJxPVS/cdQSE8ggHIFkBFQCQqZUQsgMgnNflSauEqEP4uqzZZ1itOtDG5qu1ABRWynh57i6JvDo7x2+Hnv542UHS2K7aw9xpPdn0p9aC8mYWmZ8DENZa3dfgKhFFTuPw0o3Txt8jKDo8CiL7MJ0HUAzL/ng8Emn447Wu20CDwdBIeWXijjmb+n+MTHc0OQntq/wYvmrV1jSw9nQdlgcW5VUjlACoXYiLjtFFxGRR2yfwoRMoXiwzU0pdyKKpCrHzyBAYqz+REyFi7qPHwLVRVphGf4pIzkJEU8rhFt26Blqkd3flkXqq1kpuDM0oZXU7LAUtrdmAE7IlNakCP6sWAU3TNGV5evOk1pqYrvdXV1dXU+IgRT/dZyKSnLMwi6BVB+DDqcJMi2mppAs7x9JzdbWn8LeQg5KjCSAZKou7miROnA6nu7vlwPLsvff+8mc/uymLlcVvbp4yy+nuTq0khiZOiYXYVSpiRzRB8w1XVXeYV3d2tbvbu6vdngUppTztvv71P0ZKIJpurv5g/kPO6WY//bN/9s+0Lv/yf/+/SymBbKjqqyrApRQxQUp9UXYjWPelxkLuTMU0wymJh89vnQurG6U5p7v3JMmUc0SMRwZI9wdxy76rtUbabSklBIfGQrwZOfdGI23nSGdu0+ar4XFryyC6d4M65IvwoHtUYTB36+Vn5nGfQxhJRKpV3lhXYwninGCm1Wo5BYBsRPc51Vrv7u6urq6ePH2+LOXN67uf/PSDH/zgh3/6zW//6Ic/fvnidprmnKXW5eXLl3Sd/+k//+///Kfv/+f/+b9hYcbkBng1JQiTUdj95JkJBHI6i4gp3E0IrmpGTCZhJzdJMrhr5Es2gh0REQmxhCQGKJ6OGL0MD7xXFkVHiQifZCiGb/bNB3GbT21+D6H8zZtQn3rFi8zA0egRMBmHf5q258ML+WMWrPXKE9tPxp2Ete4RfXF3kBMJNZ+9IN7n2q0VnxsEbm5yNdHGF9TYX/LY8Z+/beVSx109xlBt99Eiqz1P6d5FP0PYpkmyP4CFWK3de1cc39+/hDdrJ23+GTiMN+EpbqW3xzoQkROygCUd3YTFK2AikpQS0pzh0p9mrGxj8QqghW72jAMefhIfjZ2dEIZMq8pA3lZpIgI1MBoUhubW7lU0GgXCWzdyM7OJhCFNIDTULUJ4mpl94xmErMMVXeVlu3wPAIaxmA8oC9AqQRnLWvN9B1qlXjOdBly89/YunIDUwrzUALw7mYMagInof6ckMEcxemtjhDzq0dNAg48M+H4bb3N+fXajxxaCv8F2CQs3hT0fW/toXaCGJ7/7W6hN1VZixxHuBu88keH+4Z4ACG7+phboYurlICUbVQERQ7X2JdG2fUV983J36qJ3MZgAwA09RhHHN6EBXsO8wx7Y/r3aDJePP+zk7a9aDzITUfr+e38euXM556urqwjUhNJAqeeU0jS5iMxzFkwgNwUzJ8nIGUBIIzLz7d0bIorKB8HcJSKHT7sp59yLp0/MqqBAhiFeF7PfG/xmgEVaulegUERR+LZAjDfanx86XpG7N6kbRMAqUnpARKZUDIDPrYjnGgdzdzMK4t7oSgwd+Q67VutQANDpdNp2PXU/VlnO6+barI14OgOGlslqR1InxHsPGAJwaLBk47Q9FoGU0rLEGIrKGept+W4XpFbfEm4Uhk54iwIfanWzYptUqFqbY2DcQ1AfH8yfIMitxmgHbxZ4NTiTIpLzHKN2mnbxymqxyIaK/rw7vFHrjJRN2+Jb37D4RrAuwHBiiRcQRmrrxo2MEsAiAQgtIjbMNNwq3kXGu8gyd0udQKaKUFCMkdmUAkiYIxjOnLkHiMZu16b0iEl2v48B7O6cklWtYYmaRzKA31ebjOojSUSSzLBRRaBF0Rlh3GsppZbFXeNVVF1K1WLOrLGgACh2PhVZitelmJnwJ1HBfpKUUnqyS5LTbrebr/Y55yi5kSeZ55nBiXYiHpdlEIMMEZAxgFQ1ypmQLKo676+XWggyX6Wc6HQuWk7P37nJaT5JBQpsAZIkYhfVEhy/PlMSEUNB4iKRfMYEAyc3M9Onz56czsdpmg7n40TT0+fPAOB8KER5nsrpmJmeffX3URaZsuS0nI9Bq45RJ2jFNk11QKxYb8fUs6Yh7LH6xetIiVigpoS0nQgeAgBdN4ggFOUoew3Prd9kYA/3yJrjGMCy1uxZ1+Txw5jp28XNzJgz89B8W9d97sTUbp+055Wcu0O9eWfQxZM3c61dQlWJPMgV1CwGMLMzqWotymxrqFP4fD4fz8s8zyjmlI5n++lPPnjvvR984xt/+ufff+/ly9em3csu9Md/9AfzPP/y7idf/ep0PNqUXzFk4t3pVG6uJiC5sTo5sVN2UNQYrKbkBnNTBSdmuOuynHIWb3JTzk4wgzURajJAI2cqVg81M2JXLdYEnMNAajSEcDvCLNbjlhpFfXfZLn3b7JL2Ci7/+Iwyyrz57z2T66+EGzfb/HqePlzH+Bkshv6jt8SjPqvdjy6uT+Eb8LyG8uLbNR51cZP3QZG3pME+/Pox9/i69zHh29V9hmEX8zR+ZV1uvu0vPd3o12jbHqPNAz6MHH7Kbx8iw/HhI5a1h2vx8onfEjCky+83lwjrZURlN5EQc+7Y09EiaX02EMMjs9haeAMgIpFkKCABG5MkkeTE7LUU2nhv4+Au+39BxRqan2MFi8E0DvAgAzg4zOzWFdz2fWaAhV1Rmcmp1fLqIcGepRc0on4nUYgUzDZSgWJJ6Aa0MhqnIm4yflutE3TXOOFasxFAB2YrJhQe78D7qPPIiliDlmTd4BkuOfSJPE7PDkRtAyKPSGHXim+3xzIiUUJCRNbDg2FR97WdL70Y/f2OsdKecbhs/j1olwvwvWjhI4H6QbGmUEVeoeP6200PhNYu3LVlEDR83pRpHFGhIUbyMCzbXtJCzc3pwgNFgIgsdjznLgXUCJB9PEhk+1ljr1AXWhu+DmzcLuNX6M5ldw8/r+n9mhNbNMjdezJMlPTjH/10YIB5nsM7HuZRBFumaRKR6+vrcHgDPE1TzjkYhqezzzNyzldXz4M+UOtiXdLTrCZkZnEv7hEZM1MvXnPOQ4Be3UwNcDcwaJRsRutJRMynR8CazRRTwlv1sHhUAjTEZILBFZkmRmLBNtrYZ+301AhX3SxbBV6p1TdjX/UMMDj6gKWefmbaFCCoi5d4pzIG1BjWPxr6sqG2Il3OFH2j8ksabQgnjBVWG1c4QF2NZSLWs/GOB1oLwyillGTqILkhMe1Kp2ONjlESQHfYzdI1S9FdCCMZL46PqwzqbAAtVY1BFYgxXAZxnt0+qWopJXIR+zjRrsLVbbzOIRnvukGUDY2z3dLlEF/zEywAoVDnlG6TE+KfZoaCkHMBJPIjhDhl6YgopAXZyYU5SVYrHQ1uV6Mo9r3uO/25nGHkyjFsrGpRItK+Q9hWbAkAQTybGyMCOyBKYyrBRJiqxKyAu1dNZ1eYm7dgIpmX6mWptaDrWbn7mbrG6YsXhZlTSmmeoq9SStM0PXnyZJI0TVOWlDJPknLk+1L2kBSnuHFxM4eRpJxnNYQDKCWmsx0Pt65gPsBT4ibRREQp5bKczJyoEpFXF5EkkwGgc0hRJaY0ZXcv56XU8253fTicQnyzWPnggw/e+Xv/AOp5mrRYvtrDDeW8HA9Pnz6NUFoSivE2pt52qFysm/2f0vXQQ+ul1jpNOzOttRIpETmkrWm2spRL0fP53ArAdKrzmERj6Ri3EX+nlMxyrbodrmMYhMvMzIJHF+8FADwBCJXmMa2HARFQf3WXpORLE8hJjRS6sl/GlIkztwmS5N63zhTrQ0slIHL3amq1HA4Hnq/2V8+rvvnLn7z/7W9991vf+s4vfvHLn/z4L6+ubqbd1TRNDLu62v29v/dH/+Jf/Iuv/eEfFPrwL374433+/Wf7/8Evfv6Lw5vDyxe3y2n55MUn1dyVWOaUd5BU1RfTnNndc87kuVYNPFyX4lbdld0SsUPNLEjXnMgNjQEQ0889qCLn83lZzswUvksimvc7dPAMVUxrHGNEFTBiF78yXvhNxgY/H1zhcaS1EsyrldD/23rl3i9XoYzHrrsxB1fu/WdkP17Kt3QrRO4fdfGvsF14YCFv5a/agcNbSnSvNsZAv/db8/S1YOD4VdPSW3/V4dWDHLzPbPciqMNaGGbovUceCecPf/X4h1sEf3EiAj/y0G8LNj7+fuPzGN59ePRYlveIykZq2wAKgozz4GQCUeQ2pGrgZlotgEg4nrY2qNG2KD3a1buUJtBWzzHvhnmmcHQHWXtx7kSpj+3OtUMCNzn0Rgnl1STgFGY6Vi2VzYVGfZ2IRRIR+mm3HddlRZssifdVAut4i8P6YzqYJDD1ZqknNLXJbTzN+1vY0Brbq4lHJiMHwA0W9Jm7BgHGErbxdK+RQAbQaxJ2NDiEZB54tH5dL8lvVXt0RowP1/nbVZE3YsgA1tUvpgaP/45twsHekoww9Gki+xfdoms4sQcYOaoamHXY3bb0eNnYpI9RKI0zo1Q14yi4wpCun4JmYhJCeNSMsEEQF60FFca0HUTT9qTuQM9KjcL0x7MaQvjOl1qAYl21z7u+iJnvdrvQwbMF05x2u11P+kKeJCV+551nknie834/T9MU6ZY57fR8GsAygIG6m1lQE+PO1G1gFQBb28t6HuPgNK5ZEs0qKhF09ealY5Az5FwKQYjgTqoaCjHu0E0kqvVIX/5UW+7QaEQkviqIRpTG19iOd+xR+74V0hvxJlZk4hbYMjgCazmKNvyGz5JXnfNuptjAMOtLjtsIOEHxRldjdHuYmcHAhBC7bRBxvQGizs0YDzhWQu8WKHoIcbA4xlfDPO0/adA3rtLLLSTu9FQievpsF+NhpImarVnpvgHDUYXSPMQeLWpojjcVQzS2hYFmAUiaVAf1lBvF31kktbKW3gI73SFqBBYWh7lL8OtC8WWzlLhb8NUurAFiRw9a17JVlWwvi5m0VFcjb6oMw+xm5qjUsTHdwsdzHq8JnpgtLiKAuWWmPOeYB0QETJh3AbBVFWrxd10KgTmxdCldVzNQdXdKVo0X5dO53SNzlvTRL+9iIw4YsJ/n/X4/TdM7T2Zh5Jx3uyknIdqxVBY6Ho/npaq2UHNKaZqSqookM4c7MY3XahVlUfOqSuww82liJlZVTgaDoZpBJAuRMtz1ww8/2F1dnxd9fvOuE/2bf/tf/Ml/+7/j5kc9JxFSZyHkNN1cX91cQzilFByfka7sG5/CMCu3f8cU077Oes9ZpSYCTtQSQdsETCmllEOeN+LPEbsbM3RMhyFpO64YDqNa6zDzOoRcJ3sP33kQ0eMMSSYi6iLy66ykLtUw4uTWw4xj+nezY11vO6F0qMvQyBx2d0DNghftOedYrEop57JM05TzfHV182bhH/zwJ9//3g/+9E+/9b3vff+X7/8yBsD+inKa5jlf38xf+vIX/uSf/P1/9B9+nW5uMO1vX3xwJe/+4Zef1fN/UM7LixevsuQPf/nx3WF59fruzd35cFzeHI6vXt/eHc/p6ksff/xxEs95fvXyjdaym/dmlT2FO73hFTUldfctUGjRBgfCuZmaAwvktdZlOdW5dWwDhL2jRHoJb+Ai6nXRfoN47zdzqkvrbUWDWG0dPBawenD1i8qBGmeOkNpmT+HLbhlhwAvLuZ+w39hDmdY1w6edhMiC5OZRVm3FKg2xUPPKo2tvDnPtU9omKnuhHkHNBBzI+Vc0gP0CFT/skPu9Teu3D1/H4z/Znn3cXQPqtP7sQXvUDr4PC22wGMxHkMtdRSSch0Q9Nht+T4363eF6ZRA3xaVaPSxph1vIxxFAUWFrNUAjoYKYaCwy5CCyHoQUoksvBgAwey8YeM+qcWrq0OirKzMTBKSbJfWiikbIfo76jArjIOU1Zl+L5zSThhBFHZrrWTg4QiEq02okbiaab6WPtowSwjjn+jLoYpKud/iQm7DBlmH4bzcOd2dp07bzSFuzNhJaeNARYRgCuLOwNyQFx5iM96BgJ0f8Jp1cfx3tUxHs1jWzRgg3O0aMkyFnhdV9FgnP3pAFkbkzvKXSdh4eu1uPFvfzr6+4nSqi6qugeVy47+MVVbpQL5hCjxTMGOokaG7Krd3i3vNkV++2XowrspW6sZkL6AObegwsBvbAI2kpFllyQbEEwsdDzGJmak0vZFnOp5PXWuti3TZi4uhHA9k8T+6626fn7zzb7/dEnlLa7aYZEJHdbjdNE7Aws6TM7IfDCYCI5JyTpBDbICKpjewRPvvuDr8/hTYP1qL95h7kTCJhEl+WCGgpXFt2W6w91g2+7n2M/9cphdFLAWaIws/j4ZNurAlqO0rHDCt3i4irDS7KCpyCcOhdYqhbbxSoiYjG+xsboZk1OsemzEM8tXWGZF8INhcbJlMrXp9UVxkY6sxGM+t8yAvNlbht7olMgdnGb+OAocgyfhLHD8OUiALDAwg0uB1wL168GBclIpERSGwKNKMORLSl9ChiXcU80AOhbhe7qZnRppAGjTqN3fq/7AEHNfxgq0z//QyHFn3tELMXm4o7XLnBVQsRDU2n8YzBHSemCEbF625Pyp4c4bXsj2vildzIjSACDS3h7iwsFAVpADUFKOf86nAAIKDMQpIA3s/u7kHxDeak5Z5fqlYBc1Unsf6Mbgt0msTRWI7MnNIpz3ci8lMvQpjmfLPfzXPOCUlYhL747tPj8UxEznI+l1qNGqV5qVW1Evw8ybzfPSGi8/kYqWvEXt3NQHB40WKUTpzEPZKujZlh1cyWZSFJpfrr2zc06Y9+9KPvf/c7//A//G/OPDNwOh6X2+OzZ08gwinFShLRfu90XACJeNn4aKkPA2/SnRXAqALfx78FKvYODMJBsCzLmJsxvFOaROSsVRsvUQIaqTZJvSg0EgitlBJOkKjYPkZaOCCIKOccKx5a7l8DqzkRtUhjcl/r0Y+VodkxRufzudb6ZJ7XLQGIRxhKTvGrWBBEhFlCFGp0jsK9T9VgmzghKCGq+vr163/zzR9961vf+v73f/DLX3y4LBVOzPndd979ype//OTp7vom5+wsWvTWcSIWfPLh85vruxcfLYfDs6c3PnumvN/v//APvm4KdTglZ3FKajCzf/ud2//3/+s/efHixW7Kd0Ln82I+aSmy3xM7ddFUMyNzJnIjcmo+/I35wkwpJRaYVagGj3RZTtRcYAZ3dOXnlvLxoD2gjH5Ke2g5/S3aUp9y6U9Fg2870nnTPRfHX1q3LTqH1US+jwy9e0UvYWEwtSTgaDeo2nBtJxgU0E7Xf8tNBwXuHlRb3ZrDBBy3+ldoDxHdZ2C8cTOf+snatphwNMPj4O/tJwww6QDkwkwMHhy6rgZ7aHKO6TD0DpgACSTUYiRERAnwVhiBsxDglFY+JOA+jKGxpdqGwDnuZLvhEhGEPcj/Pa4SJvA4EkDX9ycAhuaDi9ONwUYARs2GfqS7m1vHdt6TgdpPhhmDHtsMA33EZuONjOPHSLs0x+neaVuhjs06451EG7+18eybk2ADeJxAxt5VlpgZzi1iSdxE7BBk0e2LF4CI0oDam/9uD3sbrPqVs17/xtvnXGbX+9+QIFZ3yUblOD4L4AfA0BREt8EA9hb3JvRkQoAcniiti09Y5uTka6f1odWam2tEruIHgfHMtFbqbpgWlel1iZulGC4YCqm/RzYpb3ki6d7nY4wNy5+6ZKO7J5GZeVJtkpPMDE8EWc7FfbAEp5xnkWRG+10UUjd3IYebV9Vaz0R0PB5evdJXr94wo9QlpXR1tfPTgZmfPn263+9Vdb/fP33nuYhETYjdbnd9fT3tZunlCnKWWiuhcS+7NMiqmDKeqiGPoS1ormYjsSfnLJyJxKsZVSBKNqcOetFSTXql5tEp4VBfKwQUhBOd2Ck0iAnoNtYqRdNpDEULWEYwre1nkatmxtRWLurorlZ36JiQA+y5rzAhTNL+gqMiNiP0oJk36Cku12AVM7uTGXVQ3bKYOqJbY8db1FfKEjw09PMOEBWtxf16r9Ve+Ht0HRG1qKwZUwp+Wk+9u5/4NDrqfD7HP4UvFuWMxh8mRzxCoIVaq/ao8rCPscYb21Vi2KBTeaNtVEaZN6XkAGeWxsDUCAElESZiElVVujCMsCHbeGfVokl8DRxizkRJ0hA5BEwYS10GQCUiW+uSI4onMTmTC7XaVn3wG8Bq9Xw+A+Y+C/Lg2Ji1TCoi2u+vm95868nIAKzFWskNhjBQa9VSzbwU63YDOdiKn3UhouSFTFnwSnhKQqTCJkwff3yjdt7t5qurK2IXkaurq/1+v9sznFVtOZ8wyfUVE9HptAAcrucgCBubFluWhfyUbCKRCDaKZFN112majsfjzdPnt4dbqih49a/+1b968vydL/7xnxzP5+v9fr+f6/EuMZ4+fXp3d3e9m70TDYaipoiEN6jvzT5A3dbrEYAw+ip+GyWLI9waYyNCfAG63J25iSd7bZSKzXhrA6CUQkRBuY8pPJxzm7Z6iNp0wwoRxyoBAlF2t9xTBNGrpHjzOyBo2G+Ob9oBFJMaKXHUetmMk4GZyda96WIJYuZwXkhOZvbBBx9+97vf/bM/+7Nv/eCjH/3oL00hksxgte73+6989Q9urndmy93d3dWN3Mxpt8s8Oajg2bv+/i9uX7+5ud6J16rnL33h6fF4dz6+yfN0fXW9v5r310+vbp7w/hrz7r/13/tHb968+dNvfPPp02c3NzfH43mXdy9evDBzIbYukRXpssMrFAz81fxyPx+Oo4YHzKZpmuc5ekBEog4ZwCxCRgLxX8PQ+VWjS7/Rdj+nbsVIfPnh9qh7Ub5PQYN/lfYYwnnYVxehwtYIKxLpv2vw8eJ56a0m7MCNwyyLUGcT+bj/I/nVcftD0Z1HMR7ebjp/GiZ8i2PiN5LTRYB3A3HcwNpLAZbGOhB9SCzt4k5hqsLJY5dwbwGTXtQCoMhzABp3rk3JzlagTaBj/Bftt9s7bbkG1L+ynm3IvH1f9+1sIHzhIwHvUrozGH7u7pZS7osv0NbYuPntfbi3VJbWS3bBtQtRkYtoZO/n8Nh2y4yJaYRWW2+37r2HD3vPrx/S+Ml6T0Dj/gVN1CnqSRAwgo1Bao1j7tO221V+tQH124YD/1paLzG/Bfz3V6qGwSMQjcCEgTCZYDrigY1xSmZK3jLRiCOQbtLryFszvJkANw8hwbaOp9R8DxHRESFmaLhBrbs5AjSu5eWo+dq6/IRcxFcGZVpVTd3UeaPSr6pp8TPMJEkOqOAKoADISCkRNRur1ANjkiS7ZTHVWmsnvomTMO1PB2Y8B0s5Bw2bbaFypFJPOefXhzMQiUxHohfqllKa5xzJijnn/X4fWWfPp+vdbjfNc845zdNuN4tIWC1JnBPMFK5RIxQonueuKGIeHewgIEtyN7eaxKYpeqoanUnpdDoJszDcMc9TFrq9vZ3miaEEq7WqnfYTcmb3g+y/aFaPxyM5nlzt1cqyLPM8C6Wq1WsiYphYS82TQmdIJtDQBc3cnPxwN/dzqCQHKbIWoZVzH9DFnUAw0zaTCeYolQji7olZrc45Ael0PsB9mnIpBQ4RgkiX9ISZVa1XN++cj8dlWeL2CMLknFBK4RDEVy+lBBwvpRA7sUewpWoBXFhi8aha4WFOpZbMzLwsi6q1keBeS6hQZK0gsDtUjTmlNMdb0gLyKKoeFRGbK2I/X/km3tiGrJn62b3l0RGRmBAJCU9prjVTV3mlEepUA1C0LssConm3I6JF693pOKfMIZboLsyuthxOst+59xqkaLQXB0hSShSn2gBmF0o91wtjIjWobMbumbnHk52FzYWIFjgF3psmBKpLHQmX2mcyA1RpcnEjcyKDDzRTag1D9lTVzCnNRLSop+xEhqata+4tWrUsB4CJWDhHnJaIk+RMrkqrVhUlN6jq+XyO8HisRe4evqizmTtIiQxUw+EiRHRLSZX0hRLd5ZyniXI+M5f97hR9tdvtnlzTa7PUfbRM4TK2vM88SbVzxaLyLruREZGRQxfT6oS5lJJoKodylSYxneqBP/zJ/+d//b/6n/4v/pc3+70tR8qJGWB+fbilaTpF6T/JSJlzNqI55ePxaKl5Z0rRsqi7T9NunudlWbIkAEx5nvamYIF5LZUc2aHuNVgSp7LIlEOyd5qy6tSsH5KlhOsh1WqqxczhXMrpdFrM7Pr6SZT6CZapmeU8VVVhPp3PKRi2KR2X8/X1tXafHMfSXGvsBUtdWLDbTaUsx+V8Op281pubm8Phoyzp9nAHu0lpdquZBWpFKwmbFYdN08QM9epBwk+ZiGo1NRNJhOQmWl+oat7tp3nnJkzJkZZip0I8PQOlDz569e3vfP9b3/3eX/zwxz//xfvHN8n5Rulwt9hOnhmKi07zm3m+I6PklIra+TjJ83J4k/fPIfMLmeh6p36rxYXotCjxtNvJxLb3N1N5JccPFrmi9EzyTbKb85v3nuxffvHZ7mqC+/541qfPnh2O5fb2FkI5p3rQggLnXUoJbhXzLpdS5inr4lWdfTJ3spwwT+nKCoSmcq4302RlsVrmaYdJUM8LMqV89pIo0pHDCmCmlXa1MRg2WMuGEdszc8KGvqj2/gBsNALRal4My2NjlNDbUcr625670r9YSTSDvBR3uJqDj4CKB5DM/cJ87A/G92OB7RKPJbUB6Lysh0/HrACasmvEmUBRx899pai19+DOPG8/ubiry/M3N9wDQBX/jrpt3WDfHmMrdHzM0X7/bMbjr0EFQtPt2OQoDpkK845Rt00e6U0AgD6m8upY60A2N1APH7E/bqm/7fzWjpcWDFmVGAMeRc78yGPkqj2G4AjxP2upcEQp1BtUc7JSzMnNrDvggObAZREIw4wAoSTuqrqYMnPOmXohYjCx5EHslJwUDo8qiMwk7l7MyJfugA641uxdktxMXopqa+0eZEqxQyMyAFmgqG6gSkRRv7qNQbizuWdgiwfYnFwRaSAj4zQcKQzIGoEZKC76P9YBoKcgWkPVa8QmoGgfHtZ/zJdDvTmXQWZQ99R0BK0hagqUu1owHoJnwAQIkOACbwZbD84CaMkcb4OL/3Vttk7EAZ6jJ0evS/8s/reEp4Moer4xzuCTwzqqNsCbVwRqsCim1mg2Lc1TQyUcndbGIbOfco8kOiGBqplGWbWqaOiO2OHq7tUnZ3AiWLUKdZHkVqsqe1Y1BgkzwbRUVQdSrU3cpLl8qekRnMuZmSWncJqYu7kZPAmEjInYVGFRmI0NrKpQcQAq7MyUGEJGZzuAgAx3q65wJRAoAtPkSmFNRoVCArGwE5xgZvAesje7u7sLqRjVwszzPEcQ7GdnTNMkUxYRSWm3n6ZpcvZ33nlnP03TnESIyVOSKTFzujudCa1kKgGZZZ7naZqsqrurV7VSaznVEsoQVzdPcp6ZOSVmhGgKrp88y0KRiKWGadqllIj8eDxWO+cs+6tWddDhEWQLkoWkYAmytaqMnMASprbIWl3QDE4dSIMjWtk8UiVwvncyG2DuLT4QS1nbA1wdXrQavKgBZu5wnEtRrSnkEQFw9aiNQATmEeJAW1YU1OOHZvFtqSWnqQcWIqFUB2YAUGsVSfHmAvuNnXtFYmMGbdzVAyyNvwf09Z6F6J2JOnpg6wWc8g6d56bdE2Fmu13Th9jOdiK6efJEVZda5nmO4G+t1SullGQoOUWkzl1ERmQyZsy42zGLfJPfRUS1NuogrZOs0/9ibegirvEgQ8h6e5+jK7zzG0dIljeuU29FAiwUPsIptb23cZiZmQUKbcdTW8XiiYZ0qjMjcl/Hy40FYp7n1RrbuPOPxwvOcA9y4nQ6jUDrsiyn06nHtY7uTo6U0rzLU8oCcujVbp9zmnOeksyzqboQuwklMjDUXNVUoRXuZK5uzMnBqr7UpSiYX5Zq/9f/y//5f/Yf/8dMOL55s39yjXI+3N6JCMGFmpLTZky2uKhZFG5plOMIpsV7zHktdDkEWkQkYk6jn31TCmJ0Tg9H03ihIlHew0+n0xje0eJO+NKJoKqJWlGZyAK1fq04wLpmsrtnliAmhOpyRPJXZfec9vu9ns/zPLuru6YUMprE0njvwtM0MSZGS9mtT56/cz6ftdrhcFoqpvlq3s15N088/eVP33/vBz/67vd+8L33/vyjD1/eHQ+n04J6DVGDqXr1alprq6jp7Gydcu995UIT4g4Lxi1c9ITmI+AkbJSEU045c85v3v/kdDocDofb21tzSmlOKeWU1HA4cC0adJvg695bYeLtB+nGO52MiOKNovuYNveGPk3gfUeP2YWW4nQvsGVvh2qPts97/NvM98v225bP8+sFSBnYLDHerWZv6hj3LvC2a/Sg1EXE6dPudRP42pz/Ikwyzva5TrUu1GNZ3vDKelTB+fEbe9tl3hb5HMmTF3dID2Jrn3X+wZTsHSIg6/iQHv5u7MXhH1Q09U4igjsz+TqnEObydr8zMxJeKW3uW4IE0O4/3n3c2YXezHicfoGNKzPKp6Z2BrQ13GKh6TjZOgGeu3YoM7NH+Y3VqePuRoBdOFka2gqRj7e2X20W1C5a06dzu2LvsYeM7u0/Gi/s84zSt7XPtdL83Wi/Vk/ypi7LOl+8r0g9mN7O36LajUUYqhH9Xcd0bmXW2g/aphZldZsZYK6boGXKGLyhzchpEUhvGrnS6TO1lnXp25gWw550b2ZMbNxpn26YmUDk1dxAxOBEYhzxcI/qPUJCRu50pirCwhnupqYeZYIiJMpgDP1MZWFmNnHiqAI4iNXWpajcPUpyA6VWc/eJZqvqpZ7KoqpB9mPBbvfBbjdNU2KCu06Jp2lKia+eXs/zvNvtJs5EtJCfC+Zk19fXKcnEZATVsrNWgLuiTnlYxrQsi7nupvnN3VGEUtrtOIXZ98knn/zsZz9L+cl+P3/hi+/e3NyonmE+BG9ieYnVx2BqTqqcndwYREzSaZnmBnWvFapMJALxztiM8gfQcPdRU50xoHm6qDE1ekUKoyRTQNNp2plF0TzpFg7QYE+FszspFXTp1MEYJnIWaKsnCxGRFGOQQo2g6uKdZdqyv8hqraYQyTn7sMTGpNruBH6/KV3uQ7bqi8ZzcVDyMMDVhnO73fWD8cLMUTAj2gaOYlmWZVmqaZDrxkay3++9qkddja6zQkRF64BG3pGwdTWne2vHBeVPVpZgs0Q3xRuHMbr95F6ncCfibvtQu8e0ja6N1sjo2O2GSrSaCOMpxoLl7qAmJxsTsxdaWIPS8chDCbZ3e3vM0BYeNz/Wjq1tHRAr/s6pxB1KonxMIhK6yx/rqynLNE2ZKXRNsyRmzjd7Zqa4Oa2whusSR3B9cm949nhazvry9Tf/3Rffff4f/fN/vn9yDVekfDzemSrg5s2LQRBVraFE1RS8KpyZEZoH7q5q3tKpu2PCCMzDMTEMPtrkna5w4uJdcF/Q2IlSSlE8cxwwhHa3SzNtbBF3jyqRhjYafVOQEG6qjE7JqG4Dzba7UgCK8CLVOac5Uke55XgzHLtdNjO1Amf3xnpl5uPR3IVlzkKcwTSdjuXVm9ff/s73v/v9H3z3e3/+8/d/+er2DiTzvJ/n+XBWIGLJXq2ajpEQ9X4u+IrobCsK+WR2dweZkwkJBCzg5CxCnJBmyPTk97/y9//+31+WmmRSgztKKcLY7XYiB/fS16XLZODN0kFEof01+nbMx9rknfusjK5GbIw9UkQ00rmd7hcZ/I23R4HKr4u1fosabfJ1tx9+5q9wHxa+BQZvMQawcek/fgm6/8/xwaVO2GciTBpfScsnb+lDsbf2yBt6FOlBDPbBDVy0z7RPP7MPP08jolFzwtdPRtsU5m432iVG4yYHntzuZyACBxVubG3DoWLdMwoiCPPQfQmm1hbrtlm43tC2T1o+CxGcFM4jB5AgUVGXNnmnfXcG0JK+iIgpOTOzE6MXhdOLnL51+wZTFOqwnseFYIp+LhzO2Hbx5sMH45EfGaGPnZXui5Teb5E0CJJ+znse83uH/7b5mP6m22aB+rRU3v7tPWp3m0QdFJI3ugF5G1eR0pUIFhHi8Dq0AcmAdolHN0GTOIqsqLEHmRu6wzrueLgyqTmbmn+EA1gyBw+F+q+25iI6UNxadKO5e3p29ZyZbVOTIO4DzEFEY1cQmHpB890cIYbYab05yC2ztNRkH1PLQK6rxgMJtVwAIqhqlI8fNxo3dPIad1wUqjByYyThT17fpoMws2sxq0IQERaqdZmmab+/nlImIg4qI+grX/nKNE27/TxNE6cIyQlnnncT52xWQ2KvqNaqV7tdyg5gKXq8O0Xyz+Goh6Pa3cvDYXry5Mn1l69Vp9PpkIUiB7Kp2jk0ymyLw5HgZEpoZUCIyMxBXpeFiZrPnsi9hCXaCAPDvmcQnMEjcsXMXTLOASggKQWmzjm7kqkJ03lZxKKehIcEIpFRyIy0MhUWhdiC8sfMZpWZU5Kw/lV1WZZ5z2bGlAx1xJHG+hija2sWj5unXu1wvMqN6byZaR3jUROMbTmQAyJSr6jWVmpdBkrhLjbr7qdTy8GD8/b8qlptBWZmpnARiVhoBJCpJ4MJcZqaJD06OtpWtB8TBn3vM4sQvAw9m3HAOnfAo+vMynZCbkHUdvCvA6DXiYoeGgf0OpAY3/ZN18YNxFBpdOQWtmo31daWLuXqXrF6K1YhFqKMHkiJcbJR912JVWY2hFjG48SvRm6kVtdagcpBRSJeauFjAcDwUJNiZv0IOct+mpNQlHBNLIklZ7iYSwwMZknKMFM/n/5P/8d/+Qe//5X/4J/8k/LmNYBaltPpmIjN68ClpuTJmZmC28sE187sIrTigeOdYqyJ6NRNbBp1J9xIl43V0huqHz4Od1szVK2Hatviw8zMuqk0GJ+0r4RTSr1QNNtmYPg6rlaHWnOU9M9b5iTHnBICw+MFxWBgre4wZpZEQX8K5edXr5dSVYSneS/En7y8/da3vvPNP/32n//FT97/xS9fvb4zYuLkTsfzYmawXRe40y0ea4sV+v+n5oQAbCg3W6deu1AUewGH5F8CZXO4Q1J6/vz5PM9lqUTZvYk5M69qOm3ee5R1uUDXzAyz4SVpfdWqemJIcz1s7vfJhu6N1Pjo8X+d7VEr7dc13X4DWYKPnuGzu4UeR4O0+Rv3vv0MMPbYybcHf/oPH8Vg/tlGMx5821f7JvM4WJdxP3LBHf0NtZHu1plp3Z30K/sOttmD3BxGNIxdGl+1o80IoFZPnRjscGJIy7aycLH1aAi4O1KJ1nIT1oXZPWhjm/JR/dCOBkN8hci7qP6990HNhiQDoqY8mq0d29/mUKaggQ/rJH7YCxo6MQ3Mxpdjr215D4DfqD+B9qLblUaHXj4TgA6nx+cPKJpd4+RTGgGhjm7unznAwiYd1x3mxIMKE/SZ1/2vYWuahRfFJz59BtFj8f+RDWrbrwgcOq8+3Iqb8wyrTygMcldHFiEzCtGCzUAZwRum7seIKVPqsLoalApmYLgJhhO278tjog1rvJlnK9HJiRA2CIB0vZ8hHEeYXkTwDBEwaQ9Way2lTPPzYQU6uXGLMJCuyCFAbbsc07ADmkCYO0CqDrgIR51x644UVQsWuBObIHrNlJwnbaWnnEmiFmdZTGQ6ne1wfEMO4ZxTypyY+YNffi8snpwTJUmJc85pys+e7+ZdDsJVSul4PAJWC11dXaUsObFhouUkILX0/J36+tWLadoRyel4BrlIJqFzKaYtCtztPyIIkzHIzc20SZkTGMQs6ktqRFEKMJ4YYD5bi4y1hbUDDPPqSnGfRD4CF/0dtXBWlH03W0sddksorF4yqwIi8o2ShMZQUS3uqYOxUFxc0rRDK44hWt2lxcdFpEto8iCUCmfzOtCgiMCb42CswxtYgnFj8X8PjYAxOQdeGsmy3viT6OhlUFmwPclASmZ2Op2WZQlA2DpVEhHxxpiI3BJvfD8RkWmaYrRvjW/v7pZlWeIGW0hkc4D10AlRUxkZ0GIL/Prr5q2rBt143c15nFO1oidVvm3ZGpG6Tagwqjj6pue9Oz77U5u51+G/ZSGmLrYB7njYACqljv4EVlHWoDSPNzhuo9pERKNSnKp6pNdTZP9GSA1S/HRWAItrSmnOJxFJjCxpmtOccn1zt59P0zSFGtRuV3OeAah/wMz/h//t/+af/tP/0fPnz3/6058eb+/IFQy3CxJyaLVqHVmRq9pQ5FWCo3sDqqWtKUAb7B1P12dic+ANAV5zCz6qqrrDjUK2lIasc6cQx698w+4Y7x1dvDcsJ+v8DTMTyfET1fau0c/Aqd15SqlUL6WYwbSxqsy8e+iadg4zIzGJOKCq5XQ6ns/AO/Ne3P3nP//w29/7/ve+/96f/8WP33//w1L19ZtjNeR50qqnjrQnTDAL7sh2YnOENZt0UJ+SZoBfeF6dXIg5OZE5lGjiRClznpBmSMZSYuKoapoSOe/yJHk+n2v4EQBuicfEzMhCIQ3fZmtL0XPfeFXjbMw8TVMbtI5tLp+5hoWJEAMIp+LjrbNAVyF4e1ht75Hj0Y9vaYT3WyesPkox/Ft05P+aaHA99P7C9WnPMo7dDK23uu3fAjjfcvDjaNA/85hHfrRS6xUI67AFBIbt2Ffjt97820Vi3tY/dPE/Dz7/3G3kj2B9losgq20eBBjIYcQj6F4wu226QhJMADUNA1K48bRVNefcq4BvvZ/rhUh6ACQ0tOIWN1ZBsM0XrSm0/doWmsAk7suyVLfk3JBPT9thXzFkR140qLaN4LF5Gge1KdjH02YTXRHgZrDdQ4P9ZMGgpcsPN7384JO3OKoAkHWlcQYFmOEGaZo3Y3t+ARE9UEt6FA3+nW0E8c+lDtUPBzaw0HvisbfajzBAiKL2acQMW0TRoIHSomYFulkWRC1/oD4afzGzuoeM4uoS7q2foUuSUqjoW4QouHs2u222cXYPv203Pn0TKoxvk6IKBCBmj9pUAMxa5GQ1sJxVtZRC8374p4nc1UKmT1WDcUgbfiOAMmvTpXQMLATzNE3BUWtP2GmyaZ7cHWAwkepSrahDa0qpOptVGEGIjQtU1TNJraZqAopYUaUggUlVPy8FdI4ulkQi8pOfvUm9BR+Sha73H+73+5ubq2fPnolIrdW8Hg6nN8eixtN8vZzt/fc/3O2mL33pi7v9fHd356IBjcyKu8KJGI4I0Yx1JswySsRJJgK5tWrhRJJTEpFabge+RkcFQeFTVepVwsIWZGbiBtfdrZRS6nnYiOMFR0gwVsxal3BODGs4CgqllLTUSDmLaI+IpMSh/roxZ5twQLPqOzBrQjJ0MaR8SGKoMnf1V6IL27GD3nsgZ8CebW+MCdD/y0BLAEwpBAaHcHaPpNXSkEmY4D0KHSOtuuF8jgsTkRPc1sttj/QNM5M3TFFgir/H9B6wkEmsE1zH5w8fZzurB9AaMaUob9B/3hIGxg10plvrKBtsVR9hpMftDIoiqo2QvGarBvOViJpuNZFv4vZEJImZ2cwa8bR5rGCu1P5BCJ8xMwCRFnE1s1ZBwaq7L8sSuhdCTZy2lrOZpXlX1Uo9w5wJKaXdlHLOpZQsp5bUl9M8zxFRxN352bNnf/an/+79n//k93//90Xk5YuPd9NMREosxItB3ZIkJxStqtFv5o4gJRiZS6BcouYFSCJVJKeUUs4xAanTdLWn/o4xdm/objqZqKUgRvroWk+1gzSjS4+dhUaWasgr+UaOr/vXWgms8WaJCMIdZ7baWap+LrXWSi0ep30AJ3d3dicyghWvtRKzSHJ4KXUp9vLlJz/92fvf/va3/92f/tkv3v9AHQ4uRZcKYlmql6pqyHliTnaAs4NU1YXWB6FhdvXOMTP02jkypBFARBJKtgo3ZxdJ057315ivPF/Bc+iy+kQppergKba+MSzJzIopEyKI3Zzimxk3Eo0aoaD7SjhKloHdeikpAKMq3cOwcHOL4FeAZC1e9OtJNTy8yt8up+vh1R8O+0+/wwfLUV/B1g82LrN1kH/KGbZ387lzCMcpL5EYrUW6P5+J3OlP4cBuaeNAyDwOLZb2kICEp/WR87xloWZ6vD/DcfnwJKPQ0eP3+cgXI62ANpVpwovyMFTYRFMGNHKPNGBEQfo0qKTMwW6KtCGgUQViilowpjZ3RE3+D70GCRDhjo46A5qNmwCabGIil5BbdHcw2BG0MAmxbqVmQUe5iJD+ocaMsbjgCAA2N+nmug05bbqaAVf3iAfGgtx7OH7MuD9GmwOoocFLKGYPxz8A8GPvPT4pkYfqCEF7JiD4972jtnOFN4Xp1//2onXxaA+u83eyPQwVAngME45PaDNB0GsSDi+eeRtDcYBG0heI4WsC8D0VUyKyUDhfbQvEgIyba2/L1sr1kgQKUotZ42ruFpir7V9xImGYg4g3NqFvFHEHXrCNwoi7p+PyetiXzBzl4NxdawmLxNmJhIIkn5iNSYRzs+DNrNal0+fM3WGrG5uZb6dWJCCuaqWqalQYjxs1RRRaD9v7eLozM6aU54k5pYSNZU5mDBtFwxjupSoR55SZOVFyp1rVzEO/1NzhQiA1coWan6r7aRFR5jO6Pf3yxV3UZo1fqZZpmlJKpZSrLHe3p+Nhcdenz56A8tV+piZfAXc2JDAJ5yYDUw8EhCFoUU7EoMIyzZGd5o5gsILYnETyBmAPmUfknNHKKq6hhjhCbUkpsdD5XNixn7K7B1AEjImjcLe7s1unlZl5JaxGP2DEDmfu2UphQi1ViRAlqq3VhSMAxSI3LHx+bSQZmW8InymlJGRNaaMNAHQvRVwiS1ZVM43s0wvqyAbq3NsvrRVOXPX9YwxERKVfKEZUy82DOzPn1CzyiJC4t3qG6VLbwzexmriHrUz/+CqekbrDciu+UhZFrzQYz44W97eHj0OjQCLWnmnwoJ4GGHgr6uh3rqrcczb6lF7hXHR7HN5/66pl+5gRPCRic4XRCGkOcuk0pw3gxCgGMyLS40HihOdSQU4UJVYJxEzZoU+vnoXTR8iDnJymzMzqXVlHzaFqdFz0VIzgx3Ohwbe8O4pISmm3r69efHx1dfWLn//0e9/51te//vXb1y9T4vEuilaziLdLrUrGMVTdIBAnh7NXVyjIRARq3PLaHGo0daSNNSc2HjYoEjEpmqCLu7AQDRcYhQyvyFrhZ5wkfhW4uhUtZU7EHBCbORErN6p5e3EOb4pK7p0yzSxu5O6mqLUWre5Uo+SG+ZRAZC2uzs4sCqcYii18Tbur3TTPx+Px9Zvb73z/e9/4xjf+7M++/eLlSyJJ842d65u7uzxNkthJ3N1cmGGgspTk2a3NeuJmW40BwC3/kYmIJfazJhbi3hhexEqElFIEEEGZ8oRpj7xzCC3nu+PheDpN00xEzJ7zbMqTOEdOpKmZmToJeQ8Jtrmz4WNHJE77SzRvo24730HExNZdQ0Shvc8bD+uIYGzlYR5KxXz6Jw++fRAkJNCDM+DfBzT41tZU9R8E8TaG7GhrfIq6i+FTb+Neuwhvfcod+ebIhwvy9s4/9TTxP9JW/9U0RxjrDt3ezFvP9taLfMrz0jhhfxZ/6/GfylntxNetTfwwVHj/zxZUa7NZyVttd+qvDdYcN5JS+4QA4SwTupEQOvu+2cjaDQ1U5r7qvlK3EMKOAZIIkgAEC0xqIekYwgYN4gbvlIGW77exNhvMJBucvqD99t4aRYOICCRtWW+Op4H9NvyRR4Bc69zAxY9+u/brGDwX53nk7y0m7FQGu3+SePi2mKwTbSCBNQL/4Lb+brbHQoWPYkI0L4nzY6IyEcIlb+S78KeskXQigoNJwOYe1bdAIGHxuqZHRb3N2FXXpdDXnIs4m/uqF2Jm5iqhlBSxpv5/jijOFBkiRN37Q91du7V+xz/T4XyHUbOCWXIrlmVHE04iEsq8w8Dl85JzTvMsHF2pAnXS/X7vQK3VqnWbiUXE530gKyJiR80NELq71wYqrGrU7hORmRCJQJSi3oZVNRuMPmZzuGv38jDluDGoajEVRAJSUtXgShEjqORaYeTMUkoJBFSK5pxJ5Hg6MhNKTRVEVKueliWlWmtVxutXd8tyzjlfXb/6wXt/wYx33nln3k1Rr5mIcpqurq52u0BEu8ZVEEGtXs8GBURY3GrEfDhnEBUrZSk0sXWrnJiJnMGG6iRGnZYKV3jwlvV0cterqytmaF2YeZ4m6yDc++5kMPcmpEnDaidFWD2G8/mstaaUWJgNtawmbKyB3mUn0CtBm1nArRH9G4NyLWEPiT9KKbJSZNeg8W53ZXCtpdZQ12yV36OeofcTxoCkLrukqqE51FgigIhobaGANrURWZIa9nSplZkFOcJugfYHNqKeCVb7GQYGow6hbAP4vLO6hTvNrG/Goyv6ecR7WUJ0T0zvvb4M9TxJbOKf0aSrXAIwa+KTETHevJ22LphZ1TpAsrcwyMjQ6NgY6+ZRy0CMqzGxrgiG8UTxw2XRURBvHOyrSlVfoXpQa+rlNFpPG7TFiswRyYpeazX3PE37/f5wLKPfAaiWUoprsciy9ZoB6ph5mqY3t3f7/f7Nq5fzvFfHD3/4wxg5ahp83ZECSh1j19oAElOK+FIQCAGE4yMi3gH2KNfgfEbJ0LFYm1l1DyFiZg7Jom0ipZm5A12XSDfSO2PwM3Pt3P1Y7lKSlHiaplIWbnVQ2+sIfjwLW9W47SheH4MHnJjNqXm8mcRjhSQHVZC6u/PkTKRi8EjMVNWifnfALz/85L333vv+93/wX/yXP3758uVSC5Gcz+W03DEnSbvTUms1Jwu9zgjPmp6DMBDbDRERCcf1iSI1SMSkN0StnU6aJVdTihom05xBajADE0+g5M5KzDdPdrvdvMu7eQeQqwpR0ZKyELtZNWuqsETksXNhnUFjNCp6cu3gqpRyOp28i1DEnkoIDezY3qnP7CCtXXrfH8F1WxXSz4MSfT1hx4Qjp+gecvhN1J379drfEAq9BDaXX221uN6OnIbQ6Pacb+u37efbJXSzfXzG5e4f0DytLWLTz2AE6df6tNjy2y/z+PERQVpNTMBbIsDb3tdb+mHtruh/7RIysSPIvbvTSAnpqXxEFHPDGoIhNBknMlOoVRSRqC7RUlRSSpwSKAT37Z5iZwA27iX6vCUGejBIqacRUn/RSy3iFpusk4QtYs6UM3k1Ix9afK2/Ahx2kgE8XG7NnUuN1jnEYGMLNAJD+kVjl5cWf/F7IPzyTYaGQnsa3n4YS0X/aODA7QB+ZFAYet3IgeVcQWz9+dpC1U8o3jT/ByZ8OA5+hwYv26cgwA62W2BQGyZs87CRQcJKjzRAbyqyHiHuVj4TZOQCBwl5MC47MONIeWC4m6qbsjszotJgWE2hyN08m6WGhZMj1Q4t6M0iHmVTAHTDAxtuzjDsrdfrHk+7RYapmqdEtdRSSs7ZGKjUbA70WJw0K9zdUyl8ZrqjnPNut1MrZpZSsvM5Ok5dzY2dwRPnKdF09fQJM59Op9PplLo2QOMdKsgxiFVmVsuR99NSS61GnBLRIhrWPzMLyAVWI9fZQDjVRUQCnQMk0mRIa10AMCdhBnso8YF5OdfwPddac5qnPJnZJhbXAybmy6JEXEtca4Kn46GqVjO7ffNBhAgkkSks6g3u5nme33n3CYBpmvb7/W63S41Tt+x2O5GJZ4b7qVZ3ZU4yzaW+IRICtOqipduyspxVFWYuyVPK19dzUNeIyRxlOU1T2k1zKefz6ZhSIhjBVGugRIAJrrVo43V6ELEosvJI4cTMpZRQrxERR2QG7iQlFgBRcC/ieEREET0GWswWHtw/BdAriOjxeCSinLNI0i7QEhMs7MK7uzvvSapo2IOJKKXmdxjjldiJOZCUe1M3iUcbQzxGTsg8jk/CMBQRhVvoMTKpmy9L/DCzeAcwaYUioaTSyhKE5T0shmFrMrV4EW/2PKZ2MLPEXhWpngSRdEGCHdBrnuewVwN1dOgSxQZHuG9F6T3/07YTOKzwMautlxTPOZ9Op4HTtt+GQNFISaUm3BqVLbJkVlW3rfGk53MjTIpIBBhjZVFtlxvjalkWTX3xCiIfObELk7NT5E6TcyKBEPnpfBDZuzv1DD2qDBK3KSXePi+sxlidUz6fTrXWu7s7EZnnPUkfM+Db21uDz/PcCNKUitbeUTI6bbfb9UHYpF9yTldXV22pzbmUspyP8Xe8kfP5fPPkSc55WZbwWPGaWjnAZ9NwYebj8ThC05GPGqM6xmu8fS3FE5/P59PpdD6fOhBtWCdGY0SqqbvlA9VEjuKidZqm4/EYjpuUJicBFXNOu+yG4/EOlHOezLku4EyO+dWrN++99+f/7pt/+v3vf//jjz9Weu7OVaWUooaU90RsBHdjYQDqBqdSIssuprxLTq6VSPLE7gdmVvV5Tq6IsR0yTpkIRDnnqksGuUFEjsuSZaeq8z5Lcm9ldMnBgrm7nHie53IuIrQsy26eS2V2LMviEM4Tmxp8l3NKKfG0LIuHkrB7VWXm5XhaliX3Ns+z5HR9fe3uu/nqYBUiqBUTVVQK527UlYqaGK5GcCTa0Gw2zThIPeTwESA1B8xWJ1GHGivx9D4mBAa59BLJcM+fumj0q1aT/o01etRa7V890gZ76V7vrV6k9YNHzjA+pE3PDCD36KXbCvvYRR/8ljb/EyvM9rYvbuAtbSTFxA0yhVBEzyny1d12Adi2WPTR8w7GzL0j26awTe1rT3H/83vtwYNcRLzp4rdbBmn7pG0fjQfantzImAJjEdyrGxGYo4AetzrsfTdX1TpUrMNPGlshD/0N6n7qvqHkZD0tv3njzEhYRLx6dWOHcIrwpBOI3HVBmAfsTu2h3FoVSDIiYh/WsXs8kTucm3pq6+FIndhAwYD+gQ/bQgVGCJPiXnQOAHem6HC2tsjnRbzo4g3iLag+3uwCBIWwqfig4QjaBhVHgcSQhQOALpEcm/z23f+ubRuBcBkn3EztwPHW0HyTpeURG4yYbSA/IumBd+8LizE0/AONEuyKmDjSXoY0D3WhQH3CBAq1BTWNRHXqjDBuWTlCROoRANwUhqEI/Rh81d0ISzus4h6WIKLL9WfNn/ckPAknFzYlgngNu5byNHfSIDWp38hx8lrdVJWsHLQVoZ58EpFIXGn2O9HJTJZyroec51BiEIQCOxNJ8baZEro72d3M5hTpc9OiFc5FbaeaZFp1QayamVdVVfOqIBGCUQUTOXG8V4/yYoCpFatmQYilNEmCmgd0Nq/npYYRpyC+IL/FiuYVpu7MUaRRHQ6Foy4uoly5xwHOdHsUkQ8/eRmW3DSlnHPqkc+nz56EXZIliVBKkRZFcIiwiKQ8EbfAgrnnSWSIixg0ykZCGEqgxJJZwu/g7onpeFzMWl6pmzM7gZn4uCzU7eB4j8yc07wsSyRSjlU4hpprYuqiYq4MAYycXAGyHsHrlF0gTWlAHe+8U2YO27rHRlYPR1e7aea+r37ENevGzNSKqwOYphQE3d4afdGNfFWWv5CeHygustrUjTv4aU6XuGhkZvZnt8v0rRHi2z5dvwJhs866O1OUzWgE8SHOBLTgyXZ3H1G76B9a1WgtpaR19ei0EbhBhiPi3zvTk6QtGIYznGoxUxCv0csRhDZTYkrCQTsEwAyRTEQinQ1L1uvUrdbwQwtp3MnoJSJibiuLmQMXBmwzGqJngnRNHD5d29ROZBYwpwhLRsqtVnchyWmu2RujciktEOemZkaQyM8NnxlB3NxZc87heEJHxejFrMZwWweeV0EwFRspdPv4Y2CMBZqISilBKMo5m7lpEBzWsiXxXgawHMMvJWkk0moAJkl3gKl6j1f3EdKGQfd9tPuJMhXCOU3i7qHYPM8zFapVrQDEkiZJsxnfHZcvffmrP/vpB9/+7nvf+e73/+IvfvLLDz68Ox7MpuoVgJlXhTqpO6AGb5UzCCMB2BooYDNj9/ijuqVkmzEQXdQTIAf5657Z5LzUMmuiTCKZU0LsL3FSIN6RuTJzuMgZrVCQWQQnBWB3KqXItLpaqGkdIzj/AAKQt7MyMae2LcbQbyRRBtwjTkLbgun34MrDoN8jbYN5tGeqbHDgr9kuAlm/SvvrifitVLe36qb8Zq6zwaJbO2a7qI5v39aoESIe/e7XM5K3sjEttIVOG9viZ78HCMfl3nq39x92A40fOdLfAgi97xQPMLRdQr5u0K5tvFAH2FfgGBYw2i4M28bHGzBzkvD4R/WJILzESfusi72ROYTM2NxC/zPwjXe90naXRCRrnSQikqmLmXkT8ofDOmbmnMIsD7E/Q9/cyQcPxekiZBcpzdQYgNE/Y5nlTQy6wXtbe290wNqZtiLnzthsQ497jHX7Rh7NNL63VCYigtvIXqOmKNMBqWtEL9ttDiBK9+/td2jwU9saJ9yuLUTUX5OFYM/Wb0JtkQdWXgAGXOx/99FyWbgijooqnczs5C0OH26VsBLNiTkooxTBeV7zyNCikSAiDVplGGwtczAmRcQqLh5t+JiY2RG2cLOQ045T4lQZOZPkFCqRRMTOpRZVdUCVwthSVZsyCJRARLWHGhY11dKuF/kjImzMTJPWUnTpAHdEmRJLMPKYLbRa40YnoSmL5zxZdsei1W0KrlTYf+SR5aiupqqTFyJSdV2KFgNAToFBACIGc1IztchHorJ4rdUJQYRTVXbLG1QTXCOMEI2Su6FBj1CIjdsWB5vDwSAO0SA1Op3UTN1dzspcKHKbmX/+iw9Tq52YIhtqnnPOOckSQHGe5/EtEZdTG3bBcWLmlOYkUo63IDiTEbu7OtTUFJQyu8LMgi3qiOtONFGnsQXhLcnUUrL7+IgCg3GxKWetzWR3915CUEdAL0bjmDADqHfs15ZdW82u/lG3GjcoRdwtgk6qGsD43pwcIbLxX/SUwoHQAhCul7j0qkaxjeFH2ZzBL7HWNrCO8cl2aSCiUpZ2J5Hk1fBo8C0bx6D/iu/d/2aJWfufLmvF7OYrX9tKO+zgxO7dlQj33hPhplDaijo6BybsNAaKSGCLxQm05zNH30Rt4nZj7YftHgcyHHdGmzZuhpnBPWvSmgERrwgWkBTdF4WAq8KEVhq1sXY5N/eTw9mF2AyJPXpg0vPCklJmSW3Ljw4vpvEymEY6f7zQ9seW/NnGQw95UXPg2vbpRqh2DO9Y69qLUQ2kUUoJz3i8k8EyjnkR3TckrEZ3DS8MM1uotqYUyxERoeexWCc8CrXKhGMInU6nu7vj7fEwzU/BBPBStGidWCSzGy3Faj2nnIhFVf5v//f/509++ov3fvCjn//8gzd3RzOEfOt5UWZ2hzoFD9Tdm2eJbRt/JmuieTGhYqiUatNkgxxOzYvRVgCoIneP9ep5DTfK6LEx3WIUVmZ2IvPq7jmLGcNJ0RyHVRXuJAyjagrFNME5FhxyJhGp3m4Jl4M23nuowkY/OtxMmZO3JV6HmnyYcveYjZvVYMzni/L1m8WtP/X9xKwLcPhIbMf5ETP+b6rdu/nHr/4bKGXxeZtfWGZvXUjf1rZT5i2HPC7/8+mn7W2IXQ9i8gZrreUoNvezYXB95tkvN6PP8V4uf/spX/Y/vBHhLuRkNhlTwDaHDW249j2ovQAiCJHG64mvcFk9GJEzz+u7aNOEKLaoRuBsgmXrDo6+Mbp7KNMkSvFtczRtbA5ncm/osK3EUSJjCHS1kGKI31DTg6HAgrQ+KZG3TLwGBSPea/2P6JbYNPpcYDRLn8cjuruPg/se49u44uYdxXM8fFWtUHV7NdahuaOXLgCxt3sGgVZKcLvE20fB79r99ggm7MFV7+7+oOPGizAHuqyo9wkSTIE2ugw2VnMHU6wYF2uLIUT9woEORxd74ig9TxZBQnZgdRKt+3IrsMlEBiMP8U4aZSq0BW9GMXB063fcwdgZmTm9s58ALA4jmnJ291OpZsbOTJoJLc4fvomcXhYP4QQCFdNEmSW5e56CfEfVoap1aVFL0jfDfiXiKmeimI7k7gShJCKnblvTftIgRDm424VqJmHjEnsiFuEkU0zynRUPXta5Lkt1NXKHc9Rty5PknA1Yyul8Ppd6Fk4yCfWyG0ytCgi33CeQeavYrmqq0jjca3ihpRGz9PygGssWR6EbBwVxsKXP1arG5gauxnpWPy1xntzioqd499T1M0SEpeHViAzkLPM873a7aZquXEAObeKfoCxJRKiez0AI1dRa1T2AKM71nPrJQ3Ak4Kg1qVKHM0HCqA0Q5+7BFB1rNVpMPGydVf/wcsdah9dQ+evm9cZNZRRLeBNquyBS2vjnSFEa4JyImFMbJtYcKe5opP+eNla7Yj+Ee+I/YoT0RRkW1Vt7xGrc29Z6GIgRDTI1O3epNoz1dpgRoNQ6a2wq0s9TLmbvY5Sh9V30a/V34QPJhNxRQETruqbYQMpBW42vUkrxZ3+u3i1b+jg19eDAAGjroLVktIYNYiKjb9N9cRvb8GV4dijtJeLxNuGkHrK0FGkiAIwd8ARTWNAqAA+Zzsg6jscUkSwxkJQBBbuTObFk4pZXaQSu1Zt6zMrRtVFYshHGB40nhiWoS9SyrM6zbdu6EkaiLG+qxo+X1YSUqJWmiPE2rJntq4/GPZltCw7v2bgcZdNpnSNjWI4bmOd5N18tu4O7l6JWC3Gapvn6al4qXr0+fu+99779nR988xvfvjuc745FjSA7hy/LshyPKaWWPerm7uw9fA3n2GgabdvincYDBuGcIGoh4dNkHKjndkeHQAtoP/p8a5wIsbuXUpcF87IkbdQauLMgVv427I1qRaEqzEJNOotYnBsHONZabPY5H8VvI7LaUxJKKTHgx7d9S434wGBlhwFqEadB88p7/5a9E13xoHXxi8HRamL79JbjL9rjQOtv0aZ77NIPgr2/4il/1YjiWDw36/MmV/DRPWh89TnO//j9v+2n1hERgCbK4h0W9kxC2nw7AkIXbM9Puy972+O8pQ2D9aK9NUVzBZiXkO/Ca7GOVWpWb5u9BDIINXLs5dYZ3h1qjnNs3LLOVN0EklIa3kd3d9OwHgPDhXXR9/Ze+pDYidwi1Y+cbXBbImgilJybsaFeXTv9RITIERGSnqrnRrE/CHLH6wPLhdUh3j/s6aDsgLau2Wp4RrxU+s9btQoAcIYDUajDW/fG040R/Jb3eL8NG56QAPdOQWwXpwhT0njjj81HxsPx8bt22fp8ucCEQLgEtzOlcUsilzQcpA6m2DXgl4eNv2OyXEYIAWyNMW4aciNJgTpJBd2IhZma8dAUdGus0SZEFxoNRhS1tRjM8FZ3Ar0iVMyd5u8mi5Ep0gjJ6X/+P/nnx+Px5cuXkXR0dzy9ePHieDwuy1Krq2o1K+V8Op4i6vD86e+VUpqTYqmc0pRIFYe7W0kTSZ6AosYWvlq52c3eJeBU3UOckEJcgcGVlGpbWN3drdjxfAroknc7AKXWSHMSkZwSMyViSTRNU5aUJZkZ2DmDibwqkTDoaj+bVRGZdxMxn870xqvqMiWZ55lEWtplTi2PzsMuZGcytihu7u6kw+diAIGDqItQQHEzB4eREIa1UuqdHt8bAGKPxMWmXwgb9Np5vm5RILWlFKCVJay1OgxASmmaphE/3HFk3HEOQJk4pVarsKVi8mxRbS+CEnc1SR7sqYgT1lrNoKWMaIlslCS9RbcCS0gwNo/HZWuzDt+dDZpW7M3URCOJ1oDG1hoODlWDeQiN0MTcCml0m77lfQFIiYcdHAOdiIZ0DbZJAWCA3Vv9wGAggy8ijcw8fLTAigEGrNqa++OwcYktMmw7X1dx3Zry4ZzcwsuHqGAlT659DiK6vb0NxE5ELdQcjoPNYbjAkO2AoT9OJCklEQr+cb/hKB1Rct4P+56IiN3Ua60xQvptrOHWrVG1NbjHAfebGhEJc9sNFYE0CC10746I6ZkDjopThKWImWDuFIued/ZB9Ia7WzVVneZx/0PXO1BBakOR1rtl5nJWWFeP7NqpiEKn3ePB5EIS1dzHm9qOW2bu+sMaKKWlxDAnTgB3zdXVbbKdFzHp2vrWa1HE51pqU1hNzj0DVr22kG8wr8hHbzRfAFxyyjkDXM0Oh8P5fK6mKSXoPqfZkT748PWf//DH7/35j7//3g//4i9+rs7VfKnuRtUaRJY0qZYVvwHhKYtIIZGLtNo8ox/GTAFWxR0PFV8jRyhIdRHd7jaKIT/GSKyEqkVhtZiWKlGDkUvznlKb78JkToByuMYj0aCfx40oiYgwGxEZRQ+vmmkYfpPuAeyLCbCZidwWdyJqaLCLtd3XwHi8bYKEIwj+qe2xaMC/j2jwt6M9igk/Hxr81a/VMo6GdwBDSn6kDGxyw3yDrB6DZw+a2+ORokc+3E6rhxiyez9x6XzsrevcNmO37/hbYf2htn95g3FRpsE+3XzXxca2W1vAl7YNterw8DYFo4L35shuhI80LHqsOzrbM5IGmRvp1NnYzOJ6YCaJgAuN6GW35IdVzt3BGUwTRBlwtAghCGLrwsUdXG2jgtu720Aykr4mrNsoXRALLxq9BRA2LVsKOBGSRfG5AoymWBkukos45/aWHr2kjWf4XeuNQP4As/VJEW2dGv1gdP9Ir4EWP3pk3HJwCoIdGm2ojq/30LZUiy2IunzAOk0Gu2Zca9Oo/7OqkqrwytCJ+Ffs4Dlni2gGUUopAhxmlv7h126WZTp9eU4pPXnyBMDd6aSqr169inIRqno4HV+/uj0cDrXWH/30dSlIkzCnu7uqVlXLy9vbL0wTU+WUq6GaOjtxEhHTGp4XdhChusGZ4XmaAFjIxEUYyghAVXfXWpWIJsCdltokHHLOWQoANwUs5zzn7NSwQaMxmjPz1PBPUSLzklKyWpgxJQj5NMk0TeHCj1jZ3ek48oUMXkqppQmiWJfYMTO0tFEAVnQBkDKHxVObtL2FSo2rRaiEQSmlPMnpdGLHwDwgCzbwR58cqUcC1wiY+/7qqVqJJLdzsaWe6HAGjLWz9biRQlNmEbq+vg7o2KOMeXJJPQoRvwijP6w72iy07m4GMy+lMmnnhGikKPbUnQvHydhurXH5ejwQw7hcMVKYZ2gr+BYahfuPmmhYow4qaEVozAOb0TZCtY2P9WdcLXgisiH8OAjZ/f7dXdXG3+7DodgeirmZ+LQh+PUgZGOrqluPkQYqhqOxHwehkjmp1W1AaUQyH0LE7f3YhnLjPdI1VqKLt0A9CxFRBVGkB1gCEXXz3aKrY+TXWtEjve7aHaRNqLMDv6ikd8Fu2nbjWIm235IzgRg5NlSjViuFN5KqY5N38wD/IGZnczYoWwqbvtbK7i3NtWrVamZpl7WUSJmjtg9u+m3zKlvnKqEBOQymcNeatRiu8Qn1qPV4edT9HRHxsw1rdPSSmoasKxGZeSjfLsvq0UCPH25dAGGGjPPEw4Y5BfMy3HggiITYrF3SnrV6+NpLKa/vbrVUd+Rpgj//5JOXP/rxD775zT/7xp9++6OPXxEncD4cztbMClLX6ppkkinpoQS9qongUXQFwiM/5jWaCRLkYemJDO3b5VwF1dmMOBeqtU2fxhf1Tay4j5xlWRJEpzGvVbW45OSuIfPDrRBueBK5C1T4JjrqJCNdfjtbo68MsFCtLQWtzmoUdw0MSDAjCRAIGp5Y9LkQFSA7S2VjO1mnPNgjm36z+1cksCky8fYg4e/Q4Nvatk5gX2nazrFdOR/54frno4ixf/erRiwjYnwvnsD95ka60bivjR/k0sP4+O3c++evC2u3EcLt43fHxTogW9IjDS8wX4KFur0r95YGSY0/d3/9b1kKcfaw69wBTNMUR6it3qVGBGAmdx25d8xgGjMazQ8U3xEJg52tMTM9qg86mbfACKcUm4JDzRzUnLytN4nJ3Zpw6Hg7TBjpnTzCQRH364/HAMOjpl/AgG1sdthSAyvyBpLzeCNbIZkHY+BtQcLuTIgdFeJoxKgBElqJFzTdnM2NfRoa/F172CIqdx+3N0xIm+6MaRIjjTYL/nYSPcCWb2mxO4ZlGd6NmDgPV4xIRfERnOgsp/gk58zBZe1iTkKNubk+YJfuN7MQVwhuamyv6Z2r05KWE5+YeSemqkqLi3/5j5+WUph5v9+nlE7ncjwea60/+dH7tdabm6fTvDudTkZ8XuzDX370y48+Pi31cCx3d4fbw3I8ns4FdYHvrrUqAVPOkiZvM5/D4+HuGjZoB9tGiYiCuaQOd6sGInY4TENHxLS6+1LL6cThnidzIoY5uaeUas4MMlMnSzWxoHpUbQaRmp7dkCRqQzBY8iQppXh1YZzVWiNIWBcjIlWtGqakuHupFuKcFMx1RO1PImIhBty5k9wcZM6OSRLgZuRWmQgQImLi3f6mv6eeOKRq0GoOJBC711p1BGeudtdh65pWKwYUOoGIPnr5pulMiDCljjB5stdzykE3DXWfaO4uzMwcGGCYqp3qFgOyQoPM1j18Gxs3Hl+9Dam2gkM6gGmxqT4Im/kYhVzGFjVOGAbxoDjSWph7HccYCMTXIFU3WPkevuq2/3rn4ycD43Vab+O3DOt8XGv7327ia4TsqykQZLUevjPqmkoRjUzuWmoZUHOYs8w8KhaM6R1tt1tzCMevompLPNY9eNZcqo0AGQVWEOdnDoJfI6vEeLgMNvZ+TjQ+3Dz12p8DX21fxwBFA+y5e6KJQOwtt56MyEJAKxayWI/S+Dn5yQNKu0ZumhEkYr9q4dAtpVipcBfhaq4OD9Uuon5CWZazbdJBJcqlbMJ0/f7DbojSBdH/QQRljDTpEUze+EHGw7t7rfV0OgXMsGAwtsdxs5avS5cugO0IHC89XkfVnsdrPZQapoS5wXwLI7sWq/fAKTMHmbyy3d7e/uQnP/vgxz/7zne//93vfv+DDz8+nwvxpBV3p0NxBxGnzMxG8FKOy62ffaLMzEQI1mVj6W/edaBIj8Risl5RdrDBKbSFswSC5lppWbAsi6pu3U42KtA5CGZmqrCiVtxUYS4gZ6CztZnhvUalGZikd6wAMIXDWRoRdCBwd4+Xrj0hVDeU5jY2RFLQr8wgwYIzNDrWhaVOdN8y6LPvYbjGsTHBP0drxzcD8YHd7/eNxb+h9rbr/trI5K/Y7l13Myxpu0p/zp//Ru4obqT/cwuf1jDCEC28V8di3PPbbow2OY33lvq33MnbHrCNrk8dkBuz1e9hQtyzZdcbaEfC3GSTO4C+KcQ/O/NzXCoWYWv7Qg/uwR1EHuyQWHaYmFhEbDh8CUN5vzma2+XYNrfXKJlRwcJb/XnzyzFw0Y0Dp0UkoJEvYzfWrU+nSZcGBgweyUB6m17qaNCpBe82V5G3v46BTret78jNW2UIuZoWs2gF6L135nqGCyfJ46/+d2jwsxp3oWCsr3iNE0aU+EF4/JE1AVvPyyYCv7nSZRoL+q7qHQ3eGxfhPWmEBBFuE9GYuRXzI5CvxQfDi7ux34IBB10rTnvov4SpkzIdSCrlhYgm9mp1l4qI3L36+fl8FhHxZ3J1JV5Rb+vp9PUvT6eT7a5xdcWgJ8+evTvvr1+9vpU03x5Pr1/dvrk7HE7n02k53J1Oy/kv3zw7HA6n0ym0SQ/H8+FwOC5lKYWj+oGrN93WsE9DfUGrObNUs5QopVRrBdyI2Y2TkMNdDZ68q4ag1Ttzd3J87Wt/AIDYnayUsiynWqszdKmuWpeFU5RVOBFRmnLDY8IsmHia5yY2WIpGZy2lxkphhqIV/iwMu2KjuAKIqJ48hLWIKGxNkKkqA+5g0BohIicCo4nluGtnZDHB724PKaWUI30zNEJDBKIwMzG8+d0piKgpTSCq6lXVIrRo5u5PU0mtVlAAwNaurq520zzP+4HNwnhNErmL1K39S0b1imG6dUs2ftsW7E1Eawvt4rq1BMdGB+Mxft49/WF564gGzHPu2+cmxaupbqz70Li91RZ3U9UQsd2ivrGBxq/iutEDkW8WZxiW8ToV21WI2NGeNZw0cf6Bo6I/ebVQu7Jo3CQ3HO5jQmJjIngvdici09SAkz6oQzj+yz3PeCuV5k3HksPyNWvRJxFxa67Z/n6dmYnXkgzUc1ljqNd6emiXjA63Tdw4ziBpRRTtXVn0Cdzh6sTE0tjx7iABXLzptZohVkzp4sbMzGRqZsI8TdOh1P4qqVatdQmY5L4yM6VHPt2daR7OFN94pmtd3MktYuPVvd229YnTSuttxnbExXRTYDAuHaHUyCEM4J0zlc7HRgdysfQzr2zY+LzUKF/RQ1vdexKHgWTYWEMlGECt9Xw+Hw4H1XdF5LAs77333ieffPyNf/1nr169Op+KM1d1tTOlnKe5nJallHo6U6IoNstZANRjHQ6LMU3MrMcDFcawNewGVe/5xuOP8/lMT/ZjApZiwS7BWuxou60Zeg5hrXVZvJQyhXcG04BGEcTr02R9EeFKc3dzl14kppQAhDx+e14W7+HisQLE8EgpSZc+7NRQj+CHt+DHOtlHis6v3QiE+9bevfbbxtv6W4xM3m9jxcPfHkh+rFEHRo5LTLjqD8EBcDMfH3UrPHpeeuxvelA++zPv7y1QcGWKfq7WVs77n7hROD47LbIvIN4BH298iEQEM0gAPVlRWbCHmAgj04TQdZX9EjYHDuzuWiKiqG8BIoAF5K7eNIjRFBSI2Tbelm5y69B1BCI8qG0RaP3jbWeK1k12QmQVbD+837Hj70ZfofFPNCfQvffycO7f8yBIQ4DcttFmQiIBBmKHXuYQ4mIE/a79BtqnBfoCu/inwu8Nt+T++x7BunDAd0C4cXsh3AkNOGB4lOIA5jDHhdm6+Srw4f236oQYPJv1c3ioEXZ+ZycB6cPDzc9//rPnz58v56NI3c9pv7s6L2cSdrfzUl+/uj0fzkQE94lFLVEm8+TIpvazn/0s5/zs2bN3njz50rv7/Pe+5G6lnk+nw/l8rrW+eKXH4/H3fu/LT589/+STl69f3y5n/fDjV8zpg1989PLVm/NZP/zlR7Xabnf14sWLu5Jvb29VNU1CSkZQt+VwC+aUprIs6jJNOzU3y/M8n43uljuZcoS2TPS8HN959vzZO0+/8MVnaRZQlYlOp1u18u6779y9fPPy5etSyicvXxPJvL969fK12VmVc84JdLh7MwtfXV2V8ykx72zKOZ/LeSmFiaa8y1fTslRzojwb8lK8uDtxUT+XxaaI8CAAcDNi1IhhWphsJzuQWa3sRvBCxmzc5N0rA8E9TAlESgoLKp+DjLy65NAzbcatQBhMRHpWjTwZRjVzF4gz0bFEIUEWYShUC5xJ2N+8jOHYyrWHekfOuzlGEjFz4iwiOWURWs7nKBjiPbOrG6Z3qgjpfwBEKiIyTYrFralf5swdd5m1moEIC0yLjmCmmlZVs61nDrWVVfRa1d1TmlJKbqQaGJJqcTMXSczJ3YlCjMQYSKlp/RNRUS/1XFQ9NB9zy6mrFOxequYiIruJzEx9qZWZY1cYCKrUyk7QcBVmdy9LxH1H2mHUEmgV6kqt07z3JgUT6hfs7kupYweVrrtrLWO4hRBV9XSyjiET8woIzWwFgG4coNFcBCkJrC7LaTdNbZt1pbB5i9almnSWLCLEFGWXpBQdMbEePqWchZLUotbwJJMhkIx6DfK7u1drAlZOOC53wwXQfK8RtiEzMhdX91LO7Rgh931nUgcjInwiLgy4uVnzyMBBWMyF7Xw65JxznqLAA0Dn8wmxtBERJYiAKFgTi1Vi6lCB2dyh1RVZ2rIIqsrZk/u8LHe7natb0VKMWMQICj/rYoxTPQvRQka7fPPOs5OWu7s3wgjduhYkZydrj59zZk4BCd0EJPN07VpUdbebd7u9KdRBzOdapikXr8UrMxfSiuotkr9AXGRKlG5v76A+yZQl1VTg5YtffA7Yyxdv1Ojf/lffSykdi5mxymRmBgOR1+pOhoiHC4GoCABRMjMhQtd3RU+FI2eJwrPKZzdVd0pgV2ek1wAfTjqnqdbDnIR5V+uN2hPIWXFml1Tm5bijcwq7cwEvdZdcbvbvvnr9sYmKn0jI1A1EtmOdhRhSKl4kuVoOdxPvre4BqlbM1bgez3eLF5a9674uAvecQHYS/cLuSnY75ZRN1NmVqp1tn3dcnEmud/OyvEE6az7g+rrUF0yHUs/ACXlHUHIxgoAZ2aPsEpmjCJFd6DF0skNPyyFIl5wZtSXAPlJ5uy0I2MYwe5BkaI4gDgiBqKnY4bIcwsAYulHFHAdsCX5hD/K49LZY2bj6uNv1AhsbnWi9/4YHrCsRr201cvA52rDs7VdFv28BgZ8THL7tsI1/81e7H6ILH0d/sSt4oJZw0XKZ/TGt0U+74fHH5Z1TTxG/d2TLZ3twel0/pYsDvF6cJLQ4g77YAJgAsWgTgIzJ3YwM1EQNouvEmZAIgGlRO5fjLjPvLdEuXCvk3kpw9K3BA+8xjV0snEyht8/CKSeOSTCc1IOoIqGh6MzJiVjYIvckLOQmEA2P64LIjI2IfFnrTbR54SFlShFwEwd0OyZptw31DFnRVommo8EBdN2ntZOBderRhlZKKx39IudwG7m893L72xcevMQOrZHD79AP4X6eh+sGAg3f++hXm7p/R9uYv6V/Ekt94xEOvmiEttFLoEHWqLvBCQ5oaPMSjMgM2lwXZg6bPHmfLxP3kLUbw9TVtfmCHSF863BmSpO0whIeUnbOMGLnzHkiabjRHdWQur8gKqUaVB3qIsSIdKdiNXKUnJnT+XwSoSnLlK+0nN39cDgsyykLpZTOd+ej2s3NzW6a7u7ugDCROec8TVP4X+d5fvr0aY9ukUia5nRzcwOYu3+5+MtXr6dpurnZPXnyZbevAnR7d56nvSrujue729M07Z4+fX53d/eLX/zik9v6+vXr29vb169ffvDRh29ub91d4a9v7169vk0CTnI6v4H5nGdGJZMnV+LshYyInHxOU6L6kx//+YuP95xdhJ49v7l+spumvNwd5+npO893L1+8JpzKonUp8Ol6d5VzJnMWCOab3V6EXi+vlrLsshyOp9PhCMCdjqdlXvZ52i1qVs8uSdLMhrMawFf7GzVx92LKGvqlzdJNBPesddFa2DHN2WoppUxZzAgagyZxq8uMZVkAdjdyEgZTEknMvPjiCHnZNmLDJmZuTHNXdVOFw0BEyScA5IyomOxCRAwOKr8ZSlNeMGZfxN+8/rCvn06Ogdau9nMHVxCRXZ4CSQY1MYWsTefCDYlIN+d28qYNo94ifoPIZ12us9YaqX0ia4DOzIiEiEUal8O0Awj3CG71XD7tGdgUpeG47z3unnNmZmtV12zUCk8pYqRdz8ZMq9la5uEifGpm6BIpzMwsROIW01QG/m8kVebMbLaMgE+jcTJHb9ADHmlKyXSNua1u166aOHoYndM4TaHStrYQc6q1hhgrM3c2chRD7eUWeywRzqCLLKyx1ADgqG7Y6bXjbjcW1UUQ1f2C1Lpt28fp+LnFZazzZsfJucsUjZDa9ocRVOcmsJQ4CieomumGnAgiyinUWVsDtRIhW+rs6M/h5kje4vbjbY7H7IAcIePkVsdTDKNkPNH25cZLL7WaWVQBjdjV+RzCrHm89CBTrS/dyMhGbrABqs6ccp5V3czOZVEtESBdjOLDvovFrBwiN4IGtNvrzvnC0NQ+s0ZEnbCqjHp484dxo24tUy9is1VYzci0BduxLJh4OZ7ef//9a5g9v5LUKgECIG9pENTzdAhAp86ub9+IQL27BBCEgBA7sVPVgRm484c5sVatdUEnAPch2LJq3XtoEOBWaLgxGnBJZe/tU2ADb6KID0Y+bdHa52z+yHl+3eaXj/OZjNYxSTuX9dP4jX/DbSwav93t8yYOfXr7a3rY/ipbiJwooliNg0YrnCDAo6JT/+TiuZikYY+wV9q2BtRWwAohshGhPOoJ7cLo2RwAqG/q3MP4IYnYHnzUnOB1+EnUkuEmw2GdQTqEzbk7Yrahy8tnpz6kB3gbDzgmSN9ButBokFUvviICmC4cLuPvxhJaY5PYduxnt8u3/wimU10A9N38t2SC/h1qPdVwuEjGAvt4izQnajMC3Fh+hAvxZIAIDhBTzonZSbtJ1umj1khN7SdbM6wf1/7dDJsWsR+75DA7Ya6q8QgcKTbuyfR4tc/zLDdXTw6Hw5RSKeV6tzerN9d7oo+Ph9swr0Xk6mpXS1NUb8kkzsJZOL948WKapt1uSkmIyLxGkFDtlHPO++eTLJhSWc7u9PypHM+3V1fXu12G3j55Il/44v716/KFd/+ohjaGiLvf3r0+nk4GN7Mf//RnP/7Ln9Za07Q/HE5V/e7u7sc//slHL083N0/P5/P5XItqKYWF6/G0202nN3dErlp+8RNzYJrAjN//2n9jnvfv/+KDly9f5XkfEZHT3YGIMstumuGERac57ed33/nKOzdPpZzOqupGy7Lc3R3P57NTYtCHH7+4O7xOu33K+6UqSbq6yuoHM6s6bEGW1MCJhHfHGG5mlXzaITk9MTPzqqqutTNDPMkuQjW9QiC5V62kdgRCDqMFrxiEZkC7wSuM0EqxG9w1ATAj4tRtYvIkkYdJRFEHwowUrmyU7vFzTIiJ6OXLl8xN3zYKJ6aUiOAoKaV5nnPOidpyD6ClfbrDWZ3M4E7m5ABtQJGDzTzEjmwQ/yV5Z4VBHXARjgBgZ3S00Pow5TGSBrsjrW9QsAZ9DUCr1OcVXWQiCh+PurceSfMNC3Kb4uZoklKrtUQtDsYR7B+c7N4sfOpWqogAHmFCdyJKHe0AWFPOArJGKiU2gPBiWeltg5rYTLd5a6nlhQaADIAt91YqwiDE+mAFj8WiXzpGywXvdzMwmDc8hM0GRvfuvAO/FUaOpzAzkXzvuvHixgYvXf92gMbApaoaQwnbq8bqCYW32UHM7orQ+XDzPloC462KAczSy11sb56Zp2na7Xan00k3r2D8pGjZDMiLIbA9koesqCozRwnDaU4h9iXdm7bthEhIjcRUCAcQIlF3Op/PADiJ5KTuuizuLX2x9joi1GjPDeFHrD5STMc9Sq/uMG6YVqA+hhm2n0RYfphBftFw72wRGjscDj/60Y+uXOuX3/3DP/pK9IOZMSeSRD08zqbOjZ46rojVgFoTQbuzqaKxMCp0rXwtRMZUrLg7vMLUTMnVPVLNh43Wd9ChTfTW9jic27JJ/TNQ3Db5JPz997PL3t7uRQJ/nTbG5WdajrTxFrU/tpGwv+3224cJHxrrQJMT/Jtpv1qEs8eRGwJEC3qwUQQ9whEQxEvqTzcWjaGdwwZwCCHaBsIBPpKuO5yL3VVrJWEEAuykTQnGxli3Y6XyoAKlxuBgxkaILnSzySwUawZjlZnDTG1jtXt8NutqRD4DgvZOc+7R+zVd8N4ffa9cP6SVTTpmyiUs7GU6+pLYrhU/+LVaO8XISQsW4RC27bT635qJ+nemRdiOSKLQX18Lxlq9+WOUr4hpEU6ZUDeMQ2LQmgOGlGAWqrwRbHf3SBeMnTYkFEBMsVdGrT7XbYYDEVXVdr2un4duY6hpRClH3WAzS1OiLMl1EbpOzPv9fj836RGYlqe6m+Z53oEA5nPRnOYcqpKS43SL1tvj4bRUA6oHF85LOR8Oh9vb26v9+enTp8uZztmvr685A572V/t3af/q1Zt5vxfsDOX2zfsfffjL6+snh/OJyK+vr588efL0yd59Wpbl7nh6cvWlr//hs93u6umz50+fPn33C79n1X/8k7/8zg9+vttdffjhh69f37548eKDX36kqsfj+ZMXL169fpHneb9/+ub21Zs3S1lwOuHj/Ydf+9ofudXT6Ri5e3Belhrr0n7euRqZS+KnN092881uT0ttFMerJ0+/+OWvAMySnPJ7P/jhX/zoJ6fTIpWqQa2cjgvSYZDunGWEHQKC55x3U9ai5+UEYDfNyVq8ydmRJ3gXSw3Tpy8rIV1Ya52EAEAIaMKtnZ8SkWsnN8CFCQwhojJ5EM97uJKISBkm1MLToDgfQKBFj2Nti4PB4ABs5gYnM/cl3AFmBm6ZVCklofVho1SAbIorxoPAnblhMBEeng8zE85J2t6gVdWKmblB3GRN4270CSdUi5qG7Va3VimL99QgOJG5l1KYSSDMDGGmSURSzqfTqU8eHlDBzMqizGteHzV3IEIQhYhC+kSb7iTVWjZ4Bgb3JpNzv2Ages4kBojtgEe1VVxADwBiA4SwiZgN/LNGLp0Aj+gQw6JSC5GE0xXdKAzRliCiepQZ9LHN3Ed37m5N3bS5eMdhQ/11mJjxq5HrOFbMOOxeXb4h17m18Nb3B4Si1XrbmwqKI+0z+n/8yh026i5Sk2kdoeC2b3foNV4KOmAbHT5uvuF2Xrtl8widDX45/Pp11sKS4wCzWuviLTjZZk3DpcHH5QSUNVgKNiBon2Tei4hmdTscz+16RqUu7g7mRs21NmCY2S08gz4G8HbkjH7eDjbv+H8dM5u3TMH4agm0cNomRdCmExAZyxABvNb64pNPjlZvJvraH/6+u2lx85rT+i6qLlxFpRHjfJSuwTp3yDxCkS1ZE0as5kW1qEK1mKHtlw4RIQ7/qCFq/KhqqRfLRFtQOnLtwxi0xYdbU29r/DUiWScOfR673AaFASuebFbCmOP4HHG8z9+2p8XnRoa/wRv4K7axAt/75LepXWDCfqtDTOJtv3n8KXwzkFqsyz99gMlbPn9bs3G2SzlcQ6+1gPZI3u5o/Yq2M8XDa9uHdOPJDYs3FhluFFBOglZzguAufTucotDOiORzkxyLutCIM7TLxbRt1V+DqOPmDiZC7Du9ywYIG6IdkTEVcC6IedLJlsNDFGvacIh0/l434uM8AHpl0XtQ8N4fGw/Rhpa8HcnbRpeOmLcolIyR1mQ0APyqyaW/a79uoz5ZDG3lH39sNxTb/mD7oQOA9gIhF63v5g04kFvMCneXqI014pBrONH6bCL0nFghcfeL3Ys5Ahz3xh0zU9Q2cx1wA0ACzkS4fXNHsHqurhpFh8+n08uXL+Y5f+GL70x5zhNfXe2Ox6PwlfQ6v7XW4JWF5Kaql3JWKyI8TdPNzc08z6QfHW/vPvj5+yLpq1/96vNn787z3qd0Kvr9731rnvbzvJ/n+fnzd3/v3SfztEtyLKXY8urN67tgtbq7nk8zp6fv7pjZ9fb8+vTB7Uc5z19+tvvKf/efTNP06tUfmNOy1E8++UQkqeF0Wn7+wS9KKSJyd3f3+s2bn/70p9/+9rfL8/kf/IOvvPPOLN+pL168LEWFaZqIOZ1P51qriEQi1t3hxcefTJyffvjhh2VRd9/tdvO8X4rO8+7q5unr16/Py/F0VkkmeWLiWqvXk7elCmA1cJUGM4j8fD6fmOZ5vr56QkS11onJo7JIX0XDNl3KSX3Y3AJgN6uqgnXYNN794gaHc43S0gYiCk0MMOU0W6+FaGv1gottVUBj3zUPP2Gk0iB0vJwo5cndYY2nV1tQStMkZL6UBVhCCiwOCMDDzLlzvRq80Za4OM9zW6zJmHkpJT4kIq21ujkYzLnV3SEggplh0XJsDA+WV+6GLDm786o90wu1o6ODxn4c9SfWmezB292KZ45Cu+6uzFlEghdXTIPku+3VahpZnt7EEptJHSRbM7snMTpobLVWtwvxmHHVAEjjrQ1GYtTHG1zHfl0ytHp3ZrVWCzK6iHgvltMubgTosDzGMHD3oKM0v+ql3mags3uGNcCPbnZxzhgAW/QYsbJxgHcIumWl+gbRjQff6P3YiLm1Y6jt9wNpaxO/3dgoAIBaq4B8E/aMmbTb7QBgwYJwwVkp5fb2Nt5dY0176/xaKxPBHyGJbV/f5r0Msr6gixgNViq30G6QdblB7jBfODkBLMRk6qfTaVFlZjON3FrmrqWEEUCDcwWg2sKDW55tqCWrqjbPYh+Em/vvmPlirejVEA3BPnWymG4hJQUrpdTMHswxIkDnPF1dXU3nE5ruF4GcIGDxNvDUPVLpCFDzWmthUiEnATMLURCh21gCGbmIEIfIg5spegE3t3BeoGV6uIYUtPTnijUKIhCJ0dJzJ0EbyuiDsUyX/9iIQ3g4iakDvO3x92xBbwlIG+w3Rve4jTFstj3/IPnwr6U9brD+bbNGt+vh33J7oBMIIGaJP1gEiOStd/2W/mxezy0Avve/f9XGl/TFza7XkFTzVqCjpSgc1IEHxxcMASmoUihXu0gCgyEdyFF/xsaT41jch5UTHttYkZuPr1HpeCiFBuTrHcagQRmlhBCV914Qvu9hPpBV5Ct2J+ZFDjDHxLRNNflGDW0A+wIWAgx4x4G8eRV8Pza4dmzr1Xu4kd4u5/NQkPbCueC+qZDuwWxq1Qi7AfNZTIfftV+j89h0HwABAABJREFUPe7ZaTpkrRZyGEJjKH1aix+OoywyjrotRd2jYUEN7ZuCwWFO5H0XaEqcLerRxomttlTfPlJKiIB1Vz3s0pgW844cqmsdi7Sc3hDJ+XhmAJ7O5yI8kfOLFy9ub19/8ffePR2Xjz76KCV89Q9+//rJzX56ZmaHw+HNm7vz+RwEwqB4iZBZLaVEECXIRRNfk9TMxc2PByU/Mp9v35yfP3v3ZnczTbta7eOPX9TizPzR6eM8W4tFLFDVqMm+41RrrUd18DzPMH356jbn/LWvfW2aFXx+duUiCZie7VANzDLP8z/6B19jTlGHKu/mjz766Bvf+Ma//P/9Zzc3dnPzPMkfHQ5fOi31fC7H4+nVy7tyClqXWV1KUbAkKbtJyvlwPp/hZPX0+s3L169unzx5BvqFEV3tJ6Fa1dyKG7lpTq0SjDFpGElV3d2Ep2mqSzlreedrX/iTP/mT3TR//PHHOByrltPpdDqdzuVUq0bY9/r6Ogy4aog1OWeZplTrsjqSG1aJIDIHPlHz8NzH2s2uG8KxEQzm7sbclmgz0zA/zdU0z62QpZk5h2FJCbwM1T4Q4KpxJXQHILu7s5M1iOWlxgg7nwtvGHFmTXQxhk1oq0ZQbp7nquECUOv136aUVyjCzTsoIqWUKFG27Y0InmFMkM36G6a2u0nyHqZz1ZpGsr4Na8soqm4zw7l6yFo2NKmuYdK5UbHqnXLaiqfDTYtt6hZsZVo3nWDB+kNnHo7P7xkEA0WMk6wrizsArZ0G0Hg1vYjLYp6IJXIj4c2btO4x3pL3nEi60c9xt5tjrCWB9BYkVfRMj+2OFXmYigtDdvyxjb95FyVyb/Yx97Il48ior4Bej2QL25rXQBUgFTVrKsRErSx9QLvwOjfPce9LG/LlqkYEOBHFQLJNmbt2JxT1U9ekFyCK+fX65mqUGRt/yhh7QXm1JtfZniuldF6OxXRmBnA+n7nt9U2OnTobton9EAtLfxcEJ2dX9dKDzwBAjOavgJkzMYGJVndD9Kt3IdOYVtSH1nZuepC0+4NsAWF/NK/aqAHqLfgYcr4DwPu2jI07gJvr/R/9wVf97u6dm7kNCRb0btl2LDOh1pBKMwkqS1ecWramT6RMV8BZnFgJbT8Tzk5LPIiZwipgQi5Ewpw6M5y7CCHA3EqNdUd7PAje0lrGEfeX4gC2eSAP2v0cwl5Remv/WSP4P6hbPTJy6ddFgvdu7PPjuoeI9DfSfm3J1u0U+1tuI+Bz37hfV3v8+l03sgnuPezbzvarXcVHyCvOv97kiEIAUZevLS9OJIBvQ5cNLrojJAyZnajF7mQTqYuzusPhw6XYqrY2X0g5nxs7Q1r6X9SmFRHtcTKixltomqJMLV+GWg6hEbwVwgkhzngY6fBvSLmEMU0U7BgaEdHW5wTxXh9ifD465F6Q8B7Su/cuGrDsa8VqpbzlvdxHg42ISBcRwlUhtmVs9Y4hAv/mXAa/aw8bdyrHMI1q+E9CwuPhi733UWD1vo3aqCbhXZy/OccR84U71WXjo4eTGSMMN1eAzQEEnqReSzp2ZPTkCCKgs6s6pyms3yoUPugVPTJzKsspyUSwcj5N09Xh7ny1F4CePn1+fX293893d8ef/+wXVU/H0+GLX/zil764V9Xbu+OLly/P5xKAjYiYMU0TiKppXeA4EaOUIkuZ53m//4K7l3N6cTxoNebbn+dXT548+cIXfz+ldHd3jLDJGf9/9v7817btOg/ERjPnWms355zbvHffe3xsxF6SbVFUUxJl0bItx4ZKcZNCfkiQcoyyERTsOIUkKBTyDxQSJEDgoPJbkgKqUkkFSAVxSrZjKylYMmzLUiRTDRtRNMlHvo63Oe1u1lpzzjFGfhhzrb3Pve9JJEXJknwniMdz99ln77XXns34xvjG91lEsFqcUQVtuAkQUkmkTUqpaXjBqxjj6oVV0zQNtXlzEULg2AImaJer1R3Y92D08O03l8v16vS0YxoI2gZWr9xfhk/8yptfjjSuVicnH3tv1y45Ls7PL3a7HizkLCq23+w3m90wDMzxPe95z52T9t7ZQlUvLy+HYSwqb5S95JvLK4kdBG4DUoxEITCzWShDBgABVDA1FaDi571RY4EQjEOEhjV0vDhdnD148JKYiuSU0n4c9kM/DPssZRgGZhazMBHT5pDrGFocAUIQUTIVBSD2LcQMBXuaio9aTASnU6AGrFCbtUxVDY70949mZEZjjmDGgOwWBYj+kQcZEYEIybNoNT+InlT2VSFWo3FERCUVLTmlsbi9BE600nHMu91urpbUQDDw9JxaRiMCA1IwBVNAJhc7yVYD2qkShQQEaofmIlWn4AVzBSjzyJLmz6uqc+uDWTUlgqmqMG28tTauUL00ppyigss21aVeGyndjxEnxZ3pI0w+gUTHxEjEGvgd7wX+RXiFan4EpjPGLxsRCcigypAgkhGKyGQvzl7pFREMFWmYqWlFRETkJSMz77ibD7BD6FA1wYEQDx/EKt3U5uuU6Qif9iN86rLr/jjF5ar1Z08SzwcnHjl/4BHV1iuEtbLKFEJQTcQQkLwhZI6ips91HOhM7642tybOWBfAmNDlXszhBBAFjiG0bWtmLldTFWOtSi7N9+doYJ1pU51JVdUKEyNZCEFVjcDID/W29o5m0UPfzoGOW2qCoxq9mIGqlqzI5DjE64oiom6zMSFtqyYuAPWwoaoh4eJGh0hi3kmcgGRzOqM+eFStrV8HEFQpM0ZnbR041XVTEhHJCklgZcvl8r3vfW++uVw3AYxsEjb0PT9LcR8kIiBA0OIbBeGt+S+5VK8OQCAUMFVVyS22XgY2M+ZIIWoR1cT1SmQG5HVDnD+s+v7xbCHwEDEDHHypD7+HGlROgLc+iDUIOK4QzsGiTkmTKZX7rkw/PaKcfcfGU/P/d3zmUyvxD06siX9wSoXwNDJ0m4Fp5c5Pejeq57cK8N7t8W+th/B4XvnWeHzBEyzxgGPaSEG9DKcVCgICihkq0IFxCqqKVgibCYXVoXXT8O2hzi7x2iCRSPb3rwX8aT8RsOrdhwfWKMzvO5NREMQQ3bQVANwD0A5fwIRj6+6FyJ7TEXDVvBqNTwK/cHth1qLi4WebegiPod+tu3vMFXqHr+bbSojMG4IerqH2EEI9LyaTxOfj937MEN0p0m45yTVwepe/mdqXfFUdtlacQ5RDVdkAKoqrk79yPm0uLM5AAPwvTaepcbsC7Y53U+p/1rk4LEMRwHmrN1UNAEYMRJRzRpDdbhfDAhGKaAi864ec+9M7dwGKqF1d3+w2X10sVszctgs3OheRvu+9aCMi+34LoOv1erFoCTl2yzFLSgkA2pa6bhlatwfA5fJeP0jbhpPT+2bGzC+/wjltdrvdMAxm1jZhvVyaWcnb2HBg6bouDXL++PzevXsnq/X2esfLDGaRYr/ZxdiHpnvy6NE4jk3sdttLJlmenS2aCFKA6OX33Pvuj35svx/ati1ZGKlBPu2W91b3xlxKVsIID4golCzjOK7X62Xc/PiPfN8nPvGJUorHz7/wC7/42c994fT0zs12f35+dXF+dbPd7Xf9btePYw7YmLk4v6lnp4EFjDmYSWAA4835+Wd/5VcWi0XTNA9haNt2sV61bWvoco9anehiXMToqhtzjaJplwDeXKpOOXMBWwVUBVWqmpxIimCKcVFsqgPopK6JiCklRAQFYCVAZlIFICoTtWqm4U1Bp6mqmM0SJkQETCE0Hn2mo9nGtR+vTs4avhsAWKDjEBxcRgaIVCCl3lGHdyR6SHojVQCmbdvZOzvGEGN0gDl9NFaVWrF0R4MKJ1RFS1GXQUUMACRiLgiKiLNGZ43ysd4rmZziZkBSo//QlVKkFAALIRKRaa3qHBk9gjNzHcDMN38upNAkYcJHHRQV9hzZNx0jlhl7HAN1f/68R8zkVf/srpKJaIEZkUWkSGkCVhaNEYDUUxrdBFKnaaYTilM6qrnB1EDmH8rqFnVLiQSO0MUxGjxu9pvvJ9ZC3zuM+b4dw2atfhjsRaTAB8InItZcmSmiIZp3OqvO/j31RhGA6UwhrpfERKHaPBgR+qcWEYUDoXeeJ9NUQSI60AanFzu4O8441sos+dM0jbebhkBQjea7Ot8UAKfEs5GnCaWKA6FIqbVHhaICWouZIQRDyFLfAmsXTGWKzhV1XxHzZFMogZoQQk7DjJnnI8duiTHcals1Y3dKdO4WogKymZWizApmJmpFcpZxHGEYAEokvHN2kky6gLkkU1REVSlC2SQ3U9skmCsjExETMDMhmalkMVCfPgxMIQAGsCKqqkUxiRQRFw4lBFYgVYiRkKZZoQKqWlRLnaWeCAMRYzHkdz/H32Fiwi05h6ODua7DQ9B2uzyov/27mM1Z42frYL/D3/424+lUyDs0KcE7PAGPljAcbAB+/8ez72rf5p14l9f/zrxaTYrZ4YJnFP3t37ffC/Q7w4ZZTnM+QKB++3O2wo7+C/PPZgYGTDwZK2RiVmNCA8XJFg/B0Z0ZIHhDDDMDH8STiRgDtVhZAx77ThsLe+XkwB2dD2j3uQVARHWSJyIiilQFLPdnqjw6m9dNTUJ5xV3ma0ICQAR2zbgjWHj42BMH9dCLeLwY7SkYdjhkfi9yKIddZWa+mFl13YA/KGrAf7TGLfh3qPw9Qx337hqrjx/+imqOzyMNOgquAAHQKdAT89Oq1IfUs3vagedAF/Sou8E3Z1Awnu0owRQRjfSoeuzXMUdKzkerqW2YVl9tAXvzrbfv3r0fw6KIqY2x6a43N22zjDHu972qiI5quW0jIvf9uFe9utm0bbtenSyXK2e7nd25a2aAJpK75WIceyAzpK7rQBqw5LLEGIIgAwYOvFwuN/1AFDqBfaqKgk3TbK+ux3F0YlgbFmqRme/eW5dSIlOMcRiGXPRmsytibdvuN4lZAMacs3ej5ZyHYWDGUsr1TUaSxdkZNAFUQfWF9Qv/zb/4xx/60IdOTs6G7ai4X7VL5HYRKLMBUMqlFNXRIHOwtuXHuX9CumkZl6d34Ozsv33nJz79qU8sliuAkMay2+1vbrYXl9cPHz6+uLjYX5IhfOlLX3rrG29fXF91i1WSYmYpy37fF9FSChQ2icN4o02zl3RydvbWm68TM3fN/QcvDsP41a+9ZoiVVxmiB8SRAzMvl/eapimlvPzSS6rl0aNH6/WaiIZhUMNtP+ScRe3y6qbvx9VqBY3CJNHhkoallBBC4Lher/f7vYgUUwFYrNe73a7Fxm6NGhnMG5CIzKnxUoRDMDXVyv+fokaPjA89fojotbfkqpuIMguEgA1pDCGoOvEDiorkCnssV07prneOpScvtes6dO3WiqxwBhghhKZp5vAdmWMIpWRmJ6lakSIKAAQGpczCzTxX5gCQKIioa04ys8NIVWXy+kmYcU7JmqV0XadTc+Dc++rgwV9kXptzHO/fi0Nr/6eI8LSnH3h3ADCJfxwjkwowYuVPTsAmFMnjxMNxZFjcMpAgxgjOJQdSEPU+KJ2iWKz63cTAwcED5HHEWvypsHbeAf3WeS9oSslrd1Ly3NCoU88qTgXA+W/9I4hIKhknhExHUpzMXFSICNBfR4mImDQXVe26LsaYpRCR3/kZQMpsT8/MzK4o64VcM9NSZIJzaSxEtOi6nNJ+v3eyg82eFgZOpSYiyakGHgZN04zjOCuu18QKgHNcRRRAD84KiEWSf/ycExGN4+htn0QUGzZCEJh7SnPOiBxiK1n7fiSirDJL6dnECJgAvAAYkDt5xulLKT6X/DM2TYPIPjN9Bs7piVzGXEY3iJ+xH8ynyPSVuWT8/IcikIoQ4CCpjZxSDlwnhYiMY1+yxbvrPKbr6w3ECKA57XNOMXIgLDJLIofIsW080cNcE6cITHRsyzv1VZph0zRmg4HlnGLDSYSZGRkRm65NRTAEVRSBENt+3N2hFTQ8fzuRWKWKr3Zd5ySBlBK1zVHDE4DZU23/0+FdM/FQAdvczHxI3ByIoGg25WzNivuUAtpROF4JopWXV4l5akC+t9KRnL0BTBJZStBOD85vWkkFz4aDiKjH3WiIc4Xn2Wcev+zTj/zBKcp9mzWW7+CrHQOAw2boNwiR8ekw8Vus29z2RTxGHu88vsXXPyK14q2eNGci3PpECgDeuWQI6Fx0UEBwsXBPwHneEQGISAy1IsH6EgdYRSjgImC1bd7xFzGZqpgSEiGB79JSOARRsVIPTQVAZA48C3wakicjCRGAkQWBjyYtIhw48L6feW1QkcDcgIdMUdWAnipq+t2p7zSL0Mz40o58RY8UPOjZBfi7Ge8+M/1sPRKDmH055mzEc2T4HRpVXR5g4m44F4Cn/Hwtyc0LR9XFwrSmUsEA2ElyUyqBDapVkvNkDm+mM72Kqu7GFINP0aPmkhGdXo0AACpOBHKsqG765Dl6gCmT4uEiTUGps73MNSBzSc7QcXOycP/eK03TplSkGEU0s6FP+93w4osPQmDmxiDkPHKodmZNt/QosGkaj5KJPJ7zpH5oW4gxzjFxs2gxcpDaojOOvdmeDJErhSyV3sNrj72+9JufX69P7t27t2zWwFTQKFDTtkFjLulqtxGRZtE00JRSxt0opjnvVDW0YbFsiSJHWjBv+23OI+xhGHbLm8v1er26cwdOTu6v7773wctP3nr09f1riNS1y5w9O08IYXVy2sQFh6YUTSlFpE4u6e462DgO4+X2ortaxBjPThaSNgoUUNfRVvcW731xjR97n6pGOru8ukL80//Zf/F/5tj82I//eCp5u932Q9rtdtvt7s0333z77Yc5591ud/7kslmc7a7Pd9stEq1gvWiapomn69Wu36vqMAwAAxwVVd544+L09LSJfHpycnZ2cv/O3cViYWb3zu4Akyqo4WazTcNXry8uyzhgqC18Mba5cR0LSaPFGIcEY8YYFx3zOI5gzenJYnt9Q4hTCxOamdTcRv2nP34Qk5yKPHZUhzRVnqLSuq4QvaBQ+FDsmn6LDjbmmuQUgSBUodZaTZpkJwCR+iHh5OHuF1tJbuoNiml+F/8cquoVOSLSSeuaiOYd3Q8PA2dSIjOrQTF1K0WaGHR5SL5ImQNz3RoCNhMH1d+RjQzMkDBMLQTP7NFz+cV/NTNbDCaRmFu4/PYuP7/gOI71t0eBwTxbcGKoVlYQ4jAM0+PeWMx2JIc5h7bzQzP0nZ8wgQf2V/V17c6TRBSecVCYr5yOaJ/+wwF1HPl22O3QFifu6FPTiYiqZiyAmfiELKYiFCIdinhu/AoAoKg6a+EwMwckqAJIMR7sMbW23ikQEhzfSQWsRhRtFzlgLrW0C5WEfHzfSFU9RRVCiDGUUsG/qo5jHyP7R1YEVb17966q7vfDMPRdt0xlTFmYqsH1PAEOc7VWPqdFOgFvL5JOM8omfiZMF3nIMtQ/niij9as5moH1jabT6Ph7QWSDajI+f7kq3g0rwzDudrvdbpeut80LK0ZgFFe+ZWYIpMRFUjFVZVXPm5KZKUi4XVvG2jFhRFUv10VzrWYyTCQDKBExNQixKKVsWjRwzGJWXVIbMESIbWhzzl5enjk5tV0H3mUcgubZu0WnYJonSIBzLPa04RgqAuGMMFGPIn6Fd2R5oYLREXLTo+d/Z8a/VhHRP0rEtuOv8qkHv2Nf1u/hOOpJq/vb0cVXu4XaW692C5+YApBNyuN1PypmxrOoDAAAoEud+D9oasQgNAAxRTHmFgAdR4KiTrLZQG5vUxsU+SAQWrVtDpcCBGATGqyZHQMzZDSbVplLf1U0WDccRQAyNDR0ZT24RfO+9YOH6tPifYfd4pvGYN+p+f+HZ5r9URvzF00HlDiRQSdqsdtCyFxXRHAeylSBP0yWWeTCcIr/0GpzR61I2mwHOvvU++Myn8dWqzQqYAft/VrP9zG3+XisfjjTp8DbzCyc3H0xhtaP2EBxHDLQNqVUTJiZG1Yz0AwAqqQq+3EIocGgmjLmAgAhhBba3W6X0uDi/stVt16vm65h5v2wIaLYBUQWgZwrctiXLSOZoZZaKfLre/m9LyNigfzw8ds5S4zx3r17d++eLZfLoR8enX9jGIb1ehlCSHkAgCbcHfOYSmmBxUazPaKFyEiBgwGqIQxpLDcy5mG137z/lRc/8N/9K5eXl9vtVkRLKY8fnY9jvrq6GcYkpsNwOex0Nwz9fux367F7/WQZS3/dxZgslz4F66BjPusYKKpBURADI0AEIwjw4GRtu53mR5/8vh/8kR/8LjXd7HbL5TIVads2cAMcIIQvf/YLP/MzP3N250MvvfKeh48e/epvfPYXf+VX7t9Zdsv1xcWTMe2LiDgNEhGJTKQU69rTNPZl0GF/0wV89PCtkkcza9s2xrbtusViaal0pKddIAqCjYg7jmguBQAYGyLe3fTLVWOF1NgC93vd73ZNU10tDhDBJ68HylOBa64W+tQspZRS8CjodPRlZjjV+TzPDAB86ORGg8ryAzUth9a1OWAlJLWjKe1z3lMyE0MH0SAf99Q5GzPjxLeZrFxsZqJiJUP6uAV4Zp1sMJwk0ACOIukQCcAQAYERmCgAGR4kGRnRvOG8Lr+jJqQ5Fp/B8/FHriBkWvPPAkJ7pyJAkTw9uV4nHCExnW/TrRy/2+kaYQCn7kz3CqdCkP85T0Z2x+9oZrOchk0URBFxA3HDZ3zkjuqixx+2groQdBrzc2aQMz9t2stu3ZBJoxQdijt1GhFDoGMSLAD6PKwqJeQaHhj54MnuF1ZKaZpGJ1Y0VKGC2orpRVoA8AZuVU0pzQBwBvNYC86OGVw0yJGSlZKJqKp0YpWBzVn7NNL19W7XiwhzfPDgwcNvPE7jdS4FA0+fFxERjHTqJ8TJrlBMtYqX+jdL09d+QPg4dbHqkeIREZkcbvvxLJ2zMzPCnJ5DtWIwZTPBSA1VQEuJoQmhlJJzFvaZY4PoqJJBiqDHZxWim6JYBZM0qesAGhEwIzM6KY2M2BhCmJ1asM7husqAEP1ZENTIoKYqcpGc3fzTRMz1XKtijSogAhEZPZuJNzsW2/AGKjyoLJofx7VK4ke+mR27k/nL1MlyYBA9Gwu6Mci7hpKTMcB3YHzTAevz8W0Mp4w+Cwu/U9/ed3C8I4gFuHX43hYymdJQMOeAqmPKvEN65wA45RumTLH3/jFUMxt/3vERUFT5aNEcCm80dw8ShgZhtpOpNXZfMQrHTE6AWuerLYRmfhBz/RQTGkRkAAIjAHQ+q4tL1Xj90Bd6fK9wfpH5vaad/xB7/OsYx9zU5+Dw93Q48JuZovT0DTef7b4VMKDBgT468UYAAI7PG63ZQqgLxJvNAACYUM0twsGnnh4lakFqVDy9mpnNhcd3G88GcnPEgohVhG+7s8UCQwhFyna/y8OooO2i6ZYtIzVtMIvMXgahUsrVZmuWEbHkGkmEIDkJIomYsz1zzs6MIqL1vQ4DYQAzTXnsU2+5krtCCABUUgYAj9RVddWtSnFl0GxmRZvzS93ur9frNZLt074fdxQVRsh5XCwW/c3W9QbHMY2jAZYY2cmr2ZFSF0MI+33/5Mm5qp6cvfKRj370zvoO4939fg9G8uGXFUkFDDG2CwNKRS4vrt56+AgAzr9up8tFGfom0LprixVmg3ELIEBU90ZRyBnGLKUgI73yyr/4//yjF+83P/Hp74NuJLCzZWPDRnKGEkTIDMODBx/+8Avf89GXPv7xH/yuj33P9ubqp/7Cn/7f/G//jmK+f2fx4z/y/VfbbT/m/X4Yx3Ecx77vd9s+DQNyu91uA+N6EVcdBhi3uwsy6K+Th8XMEYGS6CoYs44lKiJ3gZmdsJqKDLuxSLHGQGBIY4wxaCgqKETRVL3NSAGAMCB7gTGimXPulQ71H1WJgePEG0M1DAfjBFX1iHOmGuJUeaAjgSMiKiXPYpv1mTUA9Yh27iOorxMiHe3Xc6oeENgUS1YAUCtzLGtmzEKUiW69fo4N3S5qEdUI1WqdHevpMt0Td79QKEUNUfyfNFloGCK4RamrWeitcugMbqFW7Dxp5HTmaudwfDHzGp5j+uMljYhkgDxTQ8u8wnXm9d5e+TG29YYoKuhTzN4D3JKDYsoxVAAAP2Jlco6qGMOImWKMpun4Hecrf+prfaZK/HT901/2WekdkyM7RNQ5vBARM5m1OudXjg53yVSVuV68uvlB9GceuhPnC6DJT9Jry+5GMwGkg4NiQBLPoMChWdHMAHVKW5gXnmepnlSyFySbpiEiEdnv95vN5vLyuowphCaEsF6dnodLD1BKSTMImSaiT8yKgYlcy6KiqacoavOlPnWr5zkmVVNqkt+cFssRRDzcUkQs8690dsWYnChqv2LM42BmMcYYI+TeNAOKaREBr8WJKRETMiISBubI3ACzocDUXhtCYAxm4rp/xLGW96e21YBEQSM2qiWrFUE1AmqQWyaWokTBgIqoKCAyGLuWNRExo9tOkHqrs3pVwW9TTbgeJXFn7Oc84gkzauX/eDg8tRQeTSLfvhDh6OVuFQn9kUlafN7QUK3KV8yvdiv4OCZ2fjOI4zka/H0Zz8SIfyjGIcatK+spTGiuHYNzedyfSggMpMBERNOphZMPYQVnNjEQFOoKcJgHZuJSybPtKjITQS0JEhLpcULWUNUQIfBx/oR8M6hdfgDktfpbdTyd3OcPdAaAmuXy3sYZH8KtZfLOaHB+pD73WZ72d7h7cJ5RTy30Z9/lj1Lt/Q/SuFX9e4oSMmf6ZikEf8TmUuHthfMU4ZyOXUzQDMBAFQHnSBGmUMqvAskfBK2pWuN5ns+vi1ULzSOtKWio8eSkCiHzaTVF8mhmIRXO24w2DsOw2V5rKctVt1gsqN8RkUEXY6xcO4Ocpe2WiGiAxUnMBnlUG/rFYkHMy9WJWlGVfkib7VZELJ4xRxcGHMcx9YNp7bdpQmtmDvxCCETBzIbd4JdPVJ0Jcs4ppcvL67t3z9q2XSza1Wo5pl53ggyhiQGiSC4loXdJkfXD2HUtqlgRUwYLTWwW3UkIwbSM2/NxHE9PTxahhK6Dbmn73TCklIVsVKCz1eLO2d1X3rNeLBb8Ax+8ubkJFDbX27aNyNS2EXLBYRQRFQFnNVqFN2Malk/sX/7LX/j0T3w6fOT9cPEY0gghIJUYM0cGxv7ysnz95vXXX/+lX/i5k/bB3RVjbF7+4Ad/7If++D/9xV8+XTCuuve+56WiJmJEhECllN1ut9/vv/SVt/r9tuua979yp40h2oPtTbPZbKTENJb9fr/f79Vg2TbUcko7xNMkCRQNyUyXzeLuep2K9uOAWDKZpiya3b+Mm6BF6jatqmCKiooIPJkqw1HciIqgOVe6szv7ATBWV3oAMDJzI8EpThKd5rcZTBUtgJqvn1mFc0jK8RheHiBikVtx6pyYRK4EDzNTw3mVGaiKI4iDagsA5H2qRxnZjFWIqG3jfA1TNc8/iFOn2RDAvA0CYLYZgEklDUrdHnSGVXOiyNBtKrGWXmuQ7/f5CJXh0ZhLf8e4xcwAlAwPh9+kbuoFjfrkWX7TLHrQq17UPFTzjvC5AVQh/LlyczRqYXOmSvrrM4NPA6DwFAjx4epTlVY6ZX9EBOad64BADp9u7ic8Mlqckwu1b9UvJkYGIIIqtiRacwEx1MZR/0LcI1NVQc3Rl4h4wY2IFosFIrpscmOFiPwPDMEb/wBVcr2rIYTFYjEkd4CY78m0Kc9GiCKI6P+NMXr+hRsCgL7vh2HYbG72+4GZs4qM4343XFxc3Nxscs5N05kUMwFFrW1mPueNkEVEoADwBOJARAhjnSBGZgfHjnkyHHOJj+dS/RYOref1B3rmSzEgPlQS6jdCRIABUQEM1KWPEQAgJ0SJTMYYCAQZmVHB63qIXHt7AMAAwb0hiYi46uscFqCXshlromVijIeilov31ARRTNmgFCKj0HATzdAMm7homk6htqoi6sQQAwUVkPBOdULDSc4bPCKUoycReq14Ci3RlSoOUewszgFT99G7cERvjek5tTXl6fjyWbbn5Ez1zuNZKDgHAb/TlTwf38Z4lsj3rYbp3yqk/Faf/ztfz+1TBqe/OrLU8PwGMliZgRbO29D8rPnMRKh9JS55NwW4hoBUA2KC2bl+yjd7tUTraY7VLaYy6/QWa7S+hpiRkUcbBAxedfS4pRL2qMI/IjSstUGbpWIONcpbH/8ZNAgTEnwGpf2+LaujXtA6ngPC37NxwITHNfPjBBAdiNaIlZvlx0vNJOI0Tz1AlaMv7pB4nUcpZfYnnL9Xwym7CDWcg6f2czy84rTD47OHA+LBp36O5x2jhSKxlEIMHLrFUsCEGUpJ37h60oSwXq+X3cIRnRXb7/fGNF1EDSy8g2i32zkjzw3Qnf4aQrh8fOMhoEeBMSwjh6ZpxjG7YoACiYoVMkJEik2jYqWUoR/7fWrbRdd1IbCU0o/SdrFrmtAsKEbkSETN8qSU1PdKHNs2LhZtLuN2e5NSNgVETqMMfUbE1eqkaxeMw3i9BQBoU+6Hcbdj5kePHjWLDokX69VyuY7rBXBEK2CZT+6uBYsk05Kz5aHPWVIaFotFztlEA3PDAQC82W6X+t/4jc+N4/jqK6/Cm2+C6tBvx5zOXnkFU9pfXzchSi5EoCn3u906lg5T2zVw8+RjH3zPr31GH5wtdmMmymSopEyBCAqARYSIf+zj7zeT9WrRxdC14SN/+ocevHj38vzi6uK87/vdZnt5eXVzvS1F+76/vLx+cpO3GyylDLlstnvNfYbdMIzrk7vb/aaNzeKUc9FSdCy5CWEUJ9ZVG0kzE1MDKaKmrusFEzMNzEzK6F8uQN30IQSEg5pIDeYmQUhWOp7BFecQtE2Eo9KQ14cAzIjn2PR4QrtSy5z2qEwSA7UDUHFi5LSYuEZsRxwPM8s6NV8Vj49rqjNL5+KQUAtrMcbIzH4qIhNhIKqtugBQini3kPcQg/ERtfVAI5yvzbV55sdVpcLsCUMdL3W8XSo8vg8OzKjA7LhARMhVy7V+TLcEcfXhatyo3o1PRN43RhTcPL1eAdTC0S3V/rodVerCAcdOe6XnEOYrnG+yU4jnj++PONgzNZrEb44/I+LTpFnHbBMsFJhgv/+zlCpUgARoQPb06Wigpn7HshbzjcuL0iKaUgpIXWyMkJmbJnjvnCs/KlhKycXcSinjOA7DoLnM344+E5VNl1xnsi8oYlhY23WtS+Zst1szG8cBKvQK7rF+dXU9juPULlvTMAxAgEig4LVrA6glXgw4A7zJjrImAsxb9Kbvar5gmPC282bnCWaTfe1xJuBWkFfVJ6pZKE3fptWav6BajLENERGJ4nb3pOQR0YDsmeCVPEnpnHayAJUOIFMZVmcNDD1q15xvKyskK1IMgICiYcjJtvtR09C1bmjCRaEoAAWgYHZEJJ7MRRXUZ8Lh090aNbPrGRasZAQ//tnqee8rnRARyeYaxWQbPS30b2XMX5bVYOSbKQR+sy/7fHzHx20kMBkR/X6/72F8qxPmqTk2ASTvAMSp4jFnNLxIoe6SpBNx008Cm7GVmcM68GMakSqoQ0QMGJgZZnmMumPPxBZSFbPJMguROQAzwHjrYyrOO70HpVN5hHTyDIW6bJ8dNCvNTMSKbwYtz+9dX3q+gc889x0nwO8dbHu2BvV8fOfGMSa8NW7zAhCnGQVmAEhgYFBq9bqySFQnIg/XKMZm+FfTvS547v1+R68OUo7eCs1MEcjppHZIwaC37yBqOdjeT8HSrcBS7aBOxMzh/GKTc+oWzWrRKBKaZClp3ImWrJBzGpEAMlNkYBUY80C1d6em7d38wCsAntMvkgDMxR67ZqWqVtAgUGwIiTCQNsumM0MDDY13dyghxRjHYQwhds2iCSv1fURxHLRtT3IykZyTplFjw0RsiuebRzln1dI0YbGIIVLbLBZt40YXLvc3DGkcck6236U18TAqmN1cPFQVtbJYLKCwDjDmfnszQrg+u7NfnZxmkcVi9fbXXzOzxaJFjAaQRil5uN5eh+sNALSxWXYLimRmJgoAfS9f/vLr3/3x71ss7mye3JzcOYlhSdw8+urrXdeZQB5su90xcE7G1KxbvHr8BlzGuFhT6WXYfuW3Prc8vctNK4oGFGPbdV0TQljEZVxt05BHWS+opO32Oq3a93/4/S9vzhr+8Hv6vmcipphzRqQQmlLKZ37jX2UpXbccc3774aPzi8vH55dvvPnwarMJZMZydXO12dmdO4uu45Q269VdD9ZTKWYKYJ6qVzBDrSJiZjCJcwCoSDaTebGIqZbsqoahErTqvo+IgK0e6WH4xHWSmL8gM3usKYKqChwOwe5ENXyHZOTh51toZCqLH7EWZ0tZ9ObVA3dR9SAU4WItM+nUJzkR3Qx7ojCZIfD8KYgIg68InFElkultYHwMCOfLnt4aQgjwTKNg5akeQcTj7YAYANWmJotphzoCY1O7Vb2Hzp6ZIP3M+mR2eQ9APDTjEZELYM57n014r0pZKs7f5hTZ5+PPCxM08tc5rgD7jdUJNuFRzcr3JjyyIpxf59Y3fjQNfL0bITEelxNn1c0KWozmtyulr3KmRXPOkVhV0bCCN4QZEBqCk+FtqviV2T9d/Tuqrz9XvL0EOn+tMUYkA4DlcukSuKWUIY2g7uASUkqEgYhcxZQ5mhURmSgjisBmWivgiHKcLDgQvGuO4HjyIKKnJ/wGzs2BfnkYJmr3zEuZq+7Tcjqee75RwyFvMb8URCJPIbVtGziWUkzk+uqi73eiGbQUJanyvcKTY4oZiqiIkOrBeay+3WE+5FLtc4xomjUGgCqgSBxiCI0BiaIpIrGpiYia7xiYUkmpNKHWnEUUJpfGY1md46l1+5GpgRBkxoRmgui9Ip7C0DkB/A680G+qPHjrGo6+gne2ZvlWx3M0+Ps7JoTzB3xMsqh+KE6w8KkewrlOeNhb8CjLSa5hCCSEdLSBHPIrfuQQVXtfTzkhgjgC5KMbZXB4hUoi9WXq4XX91VH4C7UyM0/w6QRExHlJTj8gsLmQgf8WcW5EhG/323oGDX4HcwHTS1Ux4ed471/veJY1+tSvqG7+t1oNEau5ERqIC+QCwPwKnvz0n33eAjOagU4KMTalH6ZTwc9x8iZ8jycn2Ae1InGUVbwdNwKA6vHZXRDRfwz7oYz7fhzH/R7TuEXKq0UDlmNgM0kp5THnLDG0XezMbH1nzcyVdjW9gZmllJhjjG3bglojIrX+UKrqPVhkooABJYph5CilAAREJCsqisABu2a9gBoi10sfUxr6FEJjBSySqfb9hhljDACw21yrKjOqtkXS5gaXy+V6vUbEUjTnEZEJAzOUYvv9OOxSCAEJht3+xRfvc0BGpK7dbrcpqyAAWR+L6TjmtNvmnNRMDXIpCRlSHttlC0D9mMlAi2iRgUgFmIiZX//6w0ePLj7wgQ9fXFwzYz/mxWKxPl3dvRNDCNeXN2a2Xp+tutV22w99CSAvvnB3tx+GcR/IfuTf+oFf//wXX//q+d0XXx5yGYekgJEb//pFpFjph93Hw0fvnJ0UKjFYWIa7sIZF1zx5okWIQNKYkyz45HS9+PM/+WNZNK7XwHHcbIpCLvroyfnVZv+Nx0/eePPt/+Yf/9xXXvvaRz/2oeXJ+uHDx9d7Gsdxvx9MpOQMRCEgc2A3Oq9x0SFPH0NUVVObNu7JPfxQ65ot/pzaRHOUPOcn8MiWABGJnNaFqkqxmdfM8eu4wkd9fZl/tBDfwd5A1ehIrfH4v3Pvn3PTAJzGZiklIuIQ5vC0hqQKBkoAqga1juFIRjBwjJHZK+dWYYkcoO+MZuGdAGFFRFMZZFq3evyp54vBqdHL5THtiGuuWhBno/mnQ9vj2NdDaptqJhMrFVRVtNowHoO6+SbDhK/cHe6Y6zu/y/ygD28qdtgvk8djCEFyOZ4kdtS99hSFuOKZCeEffQQAACc8iAlaBRWO4UFKpfgeUXAVq5mhpy1UFUQzVXBs5LVBDxXIA5e2bamAiGiM3gE4cPWx8Kuevqh5DtP8caoPShKZjChtgo4mRUSYcZ7VZHRzc8Mc/UZVWSVzdpV3KhoAwawNQ7OpJvCRTM7tb/kwc2apnhnB2hEgnGy9jirSRy9iZqZPZ9v98pg5cFBNLsaTc765ubm+vt7bLudRVUhtVgmevgt2WaP6+Vzf5XbexNGs34S6gpgdp9d/KjBzCA0F9pAutl3LjeoeiR1gM3POOecS+UBAgPmbAmI6jkenz/vb1vTsOHFzVFyZCJy35qfdXnff/LBn0kPf6vgOVhefjz9C41tBLEY1jwgwgSuZjNHruUZAwIwg85EBUJVnfBSRuiUecUyISErlWCM6wYbBA2Ii8hJjTe15acNFmOz46g+H49QEiLXH7x2Lfod/1qfV//KRucrvEnT9XlaGb9mcPh9/MEfVkjkiZsLkWgQGfGhDqEkKJ1nNxzQCEeQCUNP8s56eK1LM68WmumIte0xxIHhh0qYj8hZp7qDA54+JSCnVdsLMwm988YtT35EFMiRz8cTT0zWqRc5d1626VYyxL4GZh+sUY+y60C0WIQRVZU6B4yBC7YK6RlUR20VkM0sp5bR1V7QQrWga9gXRmjaQATPtdrvN9jrGuF4vW27NQiPvOT09HYaBUNvYpDQACgcg6s3MchlyFsnMbG3bNGHV3U8ppZR2Y1HNRLTrYLeBu/fO0jiOYy6l96pF27ah4WEBV/sbM+OGr8/fJKLlctl2MWOxqKqFmW7250+u3haRruvunb3sZUYzKymT0XA9WNE767OUUhNCyWmb+qZpjOHy6vprX/nNk0V83ysvyrANbQSFEBEzx66DfjxrGRcrUAVRSOO6ZTq7A8ulqa64WZys792784nv/dg/+2e/8KEPf/gDH/jg9eamH1Mq+dGTi81m88Zbb243w707q8df/9IXL5+8+p6X3npx/b0ffpVbBi3d3QgYAKA7W8AwjOPVmM/lnLuuG7dZSyJAHfYg8sHlSTzr4P0vwo+9r7349f/661/59/7C9/2pP/Nnv/61NyC1X/jCF/ZDL4aPnjy+uNycX1+fX2+//uajZnG224tAjLzqBxNFRB5WV1oKqiAhEppodQoEZoOWuvVyfX1xfef0LjPvN/tMVwiEgUFVEAhIwFJSqkGxgigiBsbGQM1kKulALUpXonLtYDUTsKmVHRCxAUolFzCOpOxum4CGbdOWMUkuxACERVXBgInkUEZ3hwsxFfEKAKoeEFFdrhwNQGYSmLu/mDK3SKRGkk3VxV0KABjXANcNBgmQkYgoxoiIqljUlf2J0G2RKmgkQFW14rcy4GzyeFAENSQSC2rq1Wl/K1XVKrvq7WJEU4HBzEasAKyiNXFwTl7jdQAZkSOzqpa+QHANUnXvmtg4KSDnXKk7SAioBkoUAlPaL4mIgEFJnR5sFAGY1cRMDdHIao1GhJvWBYxqJXZKBYCZZM2gMKdCzAwIIzcNBnCubAFgYUBUbZumlGxFTEouwuwGlWTSmhigBQqqoin5pilaIjMYShIwAGMVAgBuzdULkEkVpBRDDCGUYQQzMls0LQYkNmpwu90wBWICIEATzWoF1QBZS2kZmxg05bhYDde7GGMA3utVDHFz06dUFsvTLGSksWvHsc95H2MEk0W72FzdxNgSzUgomFl2p07vikQ2NFG1YooEpIgoChE2RzlEaFuE2rZzSBYagoEgAzOZ5w2mM8fjO6gf331YZlqsOkc0AJoWrLNKuOFhzBCasWQyNuB+KP1ivLm5+PJXvrDgXdd1Ja63Zc8xANBYpOXWsgkMowUtXFIJOTbGkAGChui+hAKGSAKYsgzcIoTddnwEcJIhmDUQurFoKCE0UlIOIG0Q0H7d0r2Tu4FfFO01taArxEVoO4wQVmihHRWqXXARA1YbW45mGa224BIEQ1I0AOdyz80Y3tengAwIBmTTZqcT0QCnXl/XYERCMGIXCZhU4LAmlwkAKrMC3WfCu1DI1/JUlrFD4REAIB/CjqMf5i8d4BAvmpnBcPwI4lwnmf5Tnzq92jvUNuc6z9OPKwC9S/T8TOvjv6njXfD8u6P0b+p+/s5v+y6P26H96dY7vtvVhNB7l51LLfncQECADKjg5QUmLZaSAgJo4VhJPYRUNeRE2tDqbfaBmYECuI49VFt1dAcpDIAMxOgy8QA26YoiR5j87hEICfnW6qDpw9cboC5+U3/lLlOEyKYI6F5WgDCvPgNc1xt1+27O9/+o1DL95unxnURrBu+Q45vAxe3rgcP1fJPL7Tms/GaGV6P9Z5+GVinR3jUATge1uqePMNE2zZtXjAFtMqYwgtpWQ0624uBygoaMiAaqZpbMAEGNjMDUO35AtaY+zEwVTADcf0ldgqxKUhOwkRNkRKTlMPHjzCZNeDMjQkNRVUN1PxhAQsJwdXMJAGiK5Kl1BRUzu7m5MrOA1IQYQgMAHnU3i6Zt28ViEUIVn1yuuq7r7t2772IM47gtJccIIQSD2LZ3kUtJw24niIWZkaAfcgi8WnQUIERVs+3ObjZbVW0jC0CMEQx2N5ubyytiWCzcjdfF402Vcsn9kABgvZo1SIBYEQ1QDMpbb70OABwwxkgBRfJ+SEPaAbYAxG6Kh8xMpZTxenRKlUPNGKOq+A+b7XXbtm3brtYL5jURack552EYiCG0YbnsSimPHz9erVbX11fn5+cf+chHhmFYLFtEZA6qallkuAGXHsmSUspZbjZXRATIFLvT0whtCwrdfr9YrF555RWisFwuQ2wELMT2A+8vFMP19fXl1fbFF1/c77f/+//kf4dA73vPq6+//vp3fdf7ddxRYAAFyaAGGJqIGfL11fVusx3HkdHWyxUZSrFhSLFZjsNgoyLEolCKMjUvvfLKmruXXzpD5qZrs9hmt7/cbJLiw4eXX3vz0T/5Z7/0la++kdINAXKIiKwWUC1nzZoRMTIv2rZtW8mljAXIbrY39x/cA8Cby+vlatnGu4ioYEXEnTQJycgLOGaTy5gUrRQQFCKKHLwsKSJpHGulxR3eblfSYojIxCKCim45S4ymABA5BOIq0+SNRcxSMlU1SROrBoWT8sR0qHgxB2u+HeaKGaHPRwNDMHq23uJuS47iJkCoUKmY0+NTwYQJcZLaJ2BmUDOoKlLHzEzPFQGAEUaK85UYOLapBTREmp9WP4sBTRXausERMhw5QNZdDwEAiBig1E3oUBMG/93Eyax/YmYmAGiQDUjNj+EKCBUA6y5qgAhWADMiAqppBHOLj/pK3lqGCP6tIZBfpqEgomopTtE3MwACNw7APg1mAmDISISKIJLMLLqtCFZwI1AMzADNLGIkCmBgCmboLnmK4DZZOMlMm4GKo3Iw4yQJikhRMP8evWQnc9274cbL3cMwNE0TAg156FZdSklVienq6qptlm27uL7ZIWLXddeXV2d3VoE70yIqgbBpQhOCKpQqpARmUDz4UFEV5ECeCEREVZ+GZhYXVaOlmvXVL0hnX1rw1OUUxhxPhuPSnGpxmDh/yzZV1OeURN2MlUVkHMdIRhiIFDENw3B5ef14ff7SXZxdauYyuKpKyczMgdq2bdvIjGAKYhCmguHMqpnmGFYicVBjRFRTM2VGIjQQkWImqtLv9mm/v3/3DnFBb1xEQzTVkvJQ3H2CAhw0isSY7bh7EBWA3HUbfrtxYIES6OTKCDUgnVbNvKqeidKPSaTfGqH02xi/y0rj8/F8TObsztWcd+xaJXTSph5N8koU8tPBMxUTt2Ve1POmVI0EiZAIDKV4hzZ5oOwsD0TU367+dlsJZrrmw68M577Bo9VwAJPPsgCfj+cDfvc8CyPDArVD3mo93IsKc6XBWxc8PK0mbYaubz2981wGnCqBVWAJ1ESEpuPbI4FKsNKnT/BDUmZi2eBRm0n45A/+oImIZNUiJec85rEvJTmNDA0CkiqklErKqjpesMvDuARFjHG1XiwWi5df7mOMqrrbb8ZxbJqm6zpmlHFYr9dd1wUKi0XbNsuUx5ubG7OUztoYWkM0sCyacxIRUXl8cYNkDi8FEZCT0DD2IiKSAYAJQgjMARF3w2b+tN7pkQqPea9WEDEoAXVNiLEhP/HXq7spj8Mw7Ha7nPN6vWyaCEB9v0NEUXVGlZmYQSlChGJFjEFNtahqzjmlIUza6KFpTu6c5ZyWy+WXvvRbAPDxj39cRAI3pRSAst2mYRhEpGmaUopreKRUNpsNMVxdXZ2dnRFwk4qIjGMWEaa43+76fiilbHe9gCFit1ikPt2/e3+/3X3vd3/PT/35n/qNz36mabomtG98/c2T1aJtI4ClcTQz71MahgGBVEXVCFHEikHJVvKYynUuKsa52PUVvP7mozfferRanVyOX18ul0CY8paYkfo29otmsVycfvIHPvZn/8wPf/FLr/3zf/4rv/zLn7m6uQncdPGeRQEAVR3zkFLSYRhyyjmHEAjC/RdfGIYegMKCIKoaExITccBQYZUR0SQSYxaiZyIccuxHlVxSDeOqx3cbGxenNRcbpepoR0SSq4CHmpJBDBERwdSKeAEQEUtJisSETKzh4NkgImDoAoellBpKT1RK8iIelhnvuWbpzIRRqPLXhrXDEgC8D5Zdr99A3IlxEoPBSaLpSMHV80w4O3MykucmfLs4XtioaDj7N06HLCEhuWALVcfSw4ZizHY7ODVCI7SiBlaV92cFUSKcAa3r6CoaVr6fVXeN+kkdpQMlIDJEAJ4+FfnK8ttJSEaikBkQiUUDIs0mCqBqQDjx4xUNqhwlghEhCfQKSoCKxoCe3yKDcUzgOrGAAgYGYmpmgYup1T3Sqj8DIkgWIUU2VRD1fJJyiCVPm6Z5WnreW8wwGIgKiIqIISJToMA2yaKIGAAUKwB6cnLiBphXN5cppfV63XQx53HYj9y0KaVSBkYGopzHk/VSSm7bOO5Hy0llZBDJQx4zd9HUKYfYEAO5sTIUK55LB0Rg8n5sm0SlTRHAVKUmBwiZj8WclKBiwmOW7zFmMDMzPUZJfg946oavh5KaU3Mr8resWlQzAu73+34YdjsNIWBw8R7xrIhINjPVAtAiTsLZfo+r5VAGKmCAJGpFTJgjETEHRGZlAKzHXrBAoJpz6ZFksYwgXPox5X0XGEDUkmriYCGC43Z0Bhx66fNIssUEwI003vlUf/e6iw9v4wz222C/Wn+b4d87YsL5z9/ht0eU1Ipa4eiYr+Ooylcfr84Wz8LU5+P5+BbGnAw6fqRmasCn2a2+hqmH+biIDXA0D2U+a1ANGV36C9y0iRSMUCch5aem7u2q4LNo0OZqYX3C7VfAmnl8vhyej29i/C4xodWT+LAGpuUwLZi5pxUAiWq5z63pn+mBQjioZKMLj00DRGXSeCMinOjVdmTAVl8E8RCITus6xLYxk2jRaxgEWkpSEXKzYz/hVHPOpRRTLOPSe2/UqsiemQyj/cZnvxybYGZ9v3MwUENYta7rvORycrK6d+8eMfS7fUrjyckYGwaAto0xRlRGDKHhzX7Ybjeu6t40TRuaYUwnqzullJSGnPMo0md16u2d9STegOraJBTICMml9xxYBJqz1Jt9r6rEYXVyCgBt28YYcs4Cc1+NIGLJkEcRG9brpYxjP45YRfBzKUWLrFYrItrudghw9+7dxXp9eXX55de+eu+FB6uTs5vN1VjKOA6IViSZWdOEpmlSSma4Xq+NrB9HIEqpXF9vRCRQrNr0FCPzmMVEGUlyEREKLJQsl/3Npmni11/72kc//JFf/7XP/NZv/tYPfP8nmOJ2s08pMLrUAY6aVDXnAkCBOSwbAlWzPOZSjBBTnw2DKKxO7p3dgeXyTsoAu/zCSVmdAID7UCQiPFkvxqyAsYl9c4YPfuTj3/e97//Cj/2x3/iNL/zmF7/42c/dMFGMMbQR2nURyZqdjamGHMOf+bM/8SOf+tT5+eWu37/11luph81mc3FxcXl53ve9mbr6Yts0quqojAHdg0RETk9WdjRKKSmlPKambeeqIIhhLeKpmhIRgjrXhN2ZSAAAGVwEzQIGcq1pQF40OPXj1aqgq14bzZHWLHDCzMNYgNDUCS6IUyFR9RZRe/557rea2N5VB1VVgRBlLjNOMM+yHTc3ar2wtm3rCkeGqb8OEczycaff9CtvcZzRjMywjaDxDztvTVbtHc2gMgd1+giI6HAREQEYSlFVRKaAUzvckcedmgFQLNU8AxTUpqY0cqkaADMgQEVUI1NSLfVz2XSd87433VK3ClEEwElF02V2BB2TKxFRcPkZEbBjixRTNAMzAlRvRzkSpUMRJ0FN5VypSI8wKLqsDvv9F+kRxcxMBBRQrQrJFEPEQBRbZwSgm/SkNDRNYOZ9vyM2g7JYLs7feLRerwBgEDGzrmsBIKW06lqk+N5XX9pvb4b9frlcDftRUu77UdlEJBetZl1gxmSKrOIHiMs56CSUZ4emtZlTJKgs4BjsILELBl7UhaOv+/jUmc6wo66bo9qg9y8ggKqKaL8feUHAc8FZcxYR6/uxXaaWOyIqRQAEwEyRicwspYF7Cy10uQFtMTBoTc2wc9QEDBCQTA/XeXSgIhFwMIOc8l41L5Yx0iJHUhsNokFOuc9lABRmIobYMDNXNIzGxO6/dNTX4SxQfSfgZBMdTe0ZRHcUkOK04o80Bo7+/PgPbwvXeecJwKH96ilMCMdXVcs1IEcVj3etNB6F488iT3hHpug3M57H09/e+MNz357W5/Szw4yOvNdAEdjX4xEa9CcjsPFxXQJv51w8oUk+F612qqubVOvUMAUAVdgJj6R6qnkgHMnGwFPT2Ikq028JnEE6dVKB0btxev+Ajz888+ePzvgWMaHHFc8WtAlMDx5kMB9nAG6xAr4b+w+GM1/Z461b2jM0Y8UKcKYg0qptnNcY59P8NqVrEsmb9SMAIDhNzkzJkAMqMscFNyopowkBMzMBxM4rMSHQPdWDqYCZOD482W+JSERSHmqtwN2rhURkyKOkMpTd5aYvpYzjYGYhPjZR1dJ1XdM0rnT/3ve9uFwuVVVkH262Z2dnL7+8bpouGcauO1vfBVRHBU7+WZzg7GJPjIY4puLdOCFQC22POko2G7SIiAx7LqW0bbtarUIIABmAiOKiWyNizrmU7DcrJ5GS+/GJf1Jv/ar5MOJUJKWdF2T3fX///v3Pfe7z5+cXH/nw95yfX4ZA280+5YGoAslSSs6Sc559yfb7PREVke1mXzU5mbuuQ8bFYvH40ZOby6uu65w7wUhaBFRSyl2MTx4+Wq66Ljaf/bVf/56PfrRtYxpzyXmxWKzXJzFGJxB3HeactQiiRWYV6FogCoj8xltvx6YRxfX6/oNXXrh77+UQ10PKRFbyEEJghqHfIdPJcr19+KjrlrkfpcD69N4HHpy8cu/7f/ST3/3kycUv/PO3vvSlL33uC59/fP6IQmy6tgmY1LiJCsjNou/7v/Tf/x8AEIgC0faJKxhttzdXFxdP3n7rjddf//r5xeOby6txHLfb7bbfl6yllP2+z/t9w0Gqmg0Scxvisu0EbBxHO2J1ikjKuZTCTUSvGwGamWVx8/W2bUzUNT+JKHIzLc9Dj2J10gMQkTY2MFNDodbWmVk0z+vnqTD61qI/0hSpLNMpU18XM9W4zw/PygAwY6ZJMNSJNOYwV4bRpx9Nb3uYjZXgWt+uitmg/zlOAS44y9LtG5mYQiBAqf7y6LuFu6oBeN7Kz+4ZmIEiqgoIIAYR8Rvibw1qUl0rELGiLCM084YrA5rwsLNDqTLsVXPVvwExEDW3nSJn+hkAVCF/TwARQKx0iWoboMpMBIQR0ACqdcFcoSXopu/X67l1c2QKYCDZb2GDCKZUxBALUZUtce69z7LFovNbQdwCQNvGUlIk9hKlg0C3sgQAQBnTUL9YgLN7Z+v18lOf+lTf95/79c998YtfamJ7slwNQ1qtVqtFm1P/0ot3v/sjH76+Ot/eXLWxSSlZsZxlL+M4jsOQxnGUourtCAbWTKJBIArCNteOok/qAsTsVBACI1W9fZgdSqZzWsPLd/PJMU1sf5c62aweH2pW9YrU0Mx2u37Vrduma0NkzjL2/Zh2u/6ksTSW2CgzQ8k2+ZoggxQpBYpgKeTaPH7IuWF9CAEgABQDZgYpBtXrVWe9MSJPCQAilJJT3msZAQqxpLFfKBgU1aJaQAVAUSXGGNztSMS5w0gmmiN6zkYrzEJHTepyU0dR5rPlu6fU3ud/2jNPm4qEx7YE9R4f//mUT6m/k+MY1xu55tc8KuHeLkv+rmVOn4/n4zCeOeNwakadjgxnlxjOvHQiMHL1JyKS4kqG03E5bTdco2KqKY1qE++Fepo6s3Telw5XcvAGPAqsbxkGwvETJmRIFRlWLZv5t8+1l56P33lM2jC/41Sp3Tpz+7WZp7bf6Tngs3pOw07ZyXlz92QKGSFomWhoLgUwFw+JPJ/t9C63oxcRED1mOcExJgQX85dZZw4Rg6BHalis6Cgu1kFobdti7fbFMomtE1kpIxG50ULRDAhx2SzC8v7L9xxulpLxWHBSOJfR+X6gNgz7/X6f8ughmgucFtA07MdxFJFHFw+Xy6U/eRzHxWLx4MEDjw8WbbdcLruu8/ArxjaEcLJaGkYKSkRNiIg2DIPkLWODFEU59TnnpKpEgEiLbiX7fcqq215NENFfkxnNbBiGUpKr+eWsiLZPGwBwzl7kSl1gpNaw73MkXq1W/TC89daj17725oOXXn35lVf3/Xjn7qmMCdClqxgIUnYNdB7TKNebUsqu31Pgvh/NbsysCRGnKlDTNN2ibWJYLpfT101JCmGIHLbXV6vFogn8gz/w/T//8z/3m7/5mx/96IeJyKE+ESGwlCJizLRou03alFIKa8OhbReh6czgve/7oAEX4Fdu8oOXXt0Pad+nEEI/5H2fSikGcnFx0XXdyy+/vL26HsMwpJxGWa2uFt2qaZYxNB2Uv/BnP/GnPvWxN9/+oc99/jc/82u//tXXX08Z18uTbJnCQk3/5S//yn/6d/6Tv/63/wMQgLZdv9iuEe6PLwIaRAYoAAYl2bDfbDbn55dPHl9cXl9dX988enJxfX29eXJxc3Pz+PHjq6urYRjMjJmZsOs6TwT4bI4xcnDhjVyKiEgkIqY5AU9aI1ed5BkBQIsUAjNzorC7RjiucmQIR8sUAUy1a5fzEjqEzuZ+ub54qwWuqqopgFaRYU+8zMV9qLV7nYsOCGoWqjoouC2hkcPRoH5Z1T3gUOexSQt08ul2C+8ZNcIxegRQwvrxzUyxugPXDY4Q4aAN65+iGv5hZQWrWkZFEQTGSc3Cc7iTM2Hw/AkA1LqnkQEwBwWeSlIIHv0bKhVANRBUcZsTA1Cr2nKE7jCpVPGtmnagEzVWTUBMzG1NmAMFDohiJppFRUwDLdQUnc5QvxU11UBVNAUBCaoKgqpyLIgCgCpqZsTRmxTJsqRsZsG/TUlWRtQcmYgMSRDUxFSLmKrq2enKv5oQ48nJyfvf//6f+qmf+sQnPvH3/+u//w//4c9+9Suvbbd7AGKkNIxNRDC5e+dE0qbsKbK2XYgUEXHAklLp+37ox1TEnLSBmIqoaimapYjkmca8HbIHOuTEK1c6QyWiqkSGOve5wVSQwqlN3gM8hHqP6xwDmMMmOTpaAECBGNQM27ZtukUICDCaci6SRhYjJso5l6IUiQiKGkN1SVEtAAc1Wu/sBDBVneuBZh4XwiTUBNORZi641/e967n4gjYTNAFQM0WUJvBi0XaLhpkBdfbeUDXRzJpNIyiAFm9LnLXCzcQQDcgdhfWWuF9NtdgtKHjUlwvmijJQ75sCwhFTFN9BKvDWI89U8CZjADA6VAWPcWaFr1bf7oA8D8PJpRPIf5ci4bsPnCfIrQefjz9k49uqLDm1ksCZaofT56hYXTOY5sya6jh/VP5Q9d55Qw8F5z+sPzpvBsGqirKoVFmmqTV3Ou9qM+J0YdOb3IKCRwmUKspyQI8VGdaK5YGGPS+Db/3+PB//Zo3fLn3wDnaFR+DwHX9VOwlVD9oTnpT3gNFLhFNnBwDMihKmM7vLNKuIiJFB5YpOcWT9wyOy6JRb8Tc5AqKI4c23n7gcHxExIDFEDsy07YUAiCjUEgoAAAMjjGSE6no4SkRISExXu6uaQjZDNDcfBYT16SolyjkBADMvz7qX+EVEFMmIOI79OI6IWHIahsFzz/v93kXqeRhSP3zj4eP9vm+aWtJxBlsIoW3bEMLJovVKRdM0y0XbdR2A5pwfvHBvsYAYdRzHvu8RzOuQpSQDjjEq4jBkEdmPxey66zpAdTODxWLBzAXU1DgsVFUNcsFcVIuYWSCi3QgAoEmMU0oXT54Mo3zkox9cLtdEgamNoTSxK5JEcqCmlGQKTddNsJ+IAgBhQMXKK5o82Qoz5nHo+36xWNDEP2KExaJbrE6JXri5udlsrn/oB37gta985Zd+8Rc++pEPSUkKttvtSlYASMn13+HunfX19bV7nZlZ03Rtt1TB9cndz33h80+uNo+fXL711ls3+3/62tfeiN3i05+8e3Z2llICk/3eJJdyj09WL0gpFLuWDI1vLrYA2zsnZ5bzqG+Ukh/cg5d+4vs+9aPf/drr3/iNz3/pq69/4/H5zbbfjYLt8u7f/a/+7l/4cz/96vf8MdgOZdGZAREyErisnwqo4OrkdLk+fenlDwIBh5rME9HtdrvdPpnG+fn5+fn5drd79OjRfr+/ubnZbrfDMKSUxpJF5KTpttstmjUhNhz85IgxppwRsRa3zZgJ1JRR1RAJmFQVAb0lzXvMplDV5SvAVx8vGiRwCdDjrA4eLTycSKQ2ZVIAwPBA2ENEE3EZEKjRO5IZkkvUkJqaAlWtHVRAYp52IoDJ3wARVUWlviwRuvajmRGB1jZlmzEhAMSIZkYlN5PB43xJ/hynCTqBHQEQXeYHiLxfDQFUwBVnzBSLFZpQLjGYNoQ0GcrN+WMkDIQe6fuL1EJMQPD42aY/8SoNIpkWQzQ0qrkwQ7IA5LEDIymqCSACGTl9iRCNkAxMA5gzF4eKgxA50ExIRRBCg6pwLghsZkxIQfxGlayIGIJGDogIWkwyB+66CKCLBmHJbVwtYkDEmjBquhgjN5GQKXDTLtq2VYSm6X70R3/0bH3Sxubf+Xf/XVX4ez/z9//Vv3pt2cVxHC/PH79w/46WfLLotoEClogUQ4isCKwmHCFibCNKMTMkYkPOOZthkYl9oFU9bDvspgmIiAxmaioAxPV8maMlg4mP6enJirPmjKY6qp/+KS7qcHyaIbJZAUBvqDTDYUh53AQqJfWBCYy4aXLOKaWuXSCTlhExmKlB9b2Y14vVGkPNWap684SjKZ+lTBQQIlE9CBHBBEoxMGcNM6IBAodJtBiRAwZiAAAjwpDHwcw4+P7jTRqeHvLsgHd0KAFbpcSpJ2fhGVg4He+3Km8K7ttJeICLz2JCOMDC42pGfYSfKTMeMNtRQKHPtEXd+tX0xR1TjHR2Of7DSZR7Pn7/xxF4c9kVnOe2wVFYjGRoaISTn+3hEPQY8fA6ZggEBjpZUNjUve7Az9NYx0eq/9fgVlJmyi3ehoKHPIsnU56CgjDVCecn4a3nPx/Px+9yvBPww1ooqEaaNrk+TKmN42Tc3FVrnqCf2GNmZsQMJmY0WeMCARqpZDFzWVBX8rO6gqbQDgA8HK3voTX7SUcq7qoaPv+FL7mtlif4CdAV29sYQwhd1y3aLkZ20BUMOYjkJCLM3LSBAmYo45jUNAYOgQlRRJK4poONl4+diYeIhmZAfUo5j0QUYwTGZtE2IQAsT02YuW0Xl+cXpZT7918Qkc1mw8xpLOM4WrHZD1rESimi+uRyb1NrGZjMgjcNh3v379y5cwdU+74XrW2Np+vGeW6V6XpycnZ2ysxj2ntRqGlDbJbMbJBKKdw0VW7ESzqkBEgh5DEtl8vdZnN1vW9CvLzaL5Z3Tk5f2PepbRdjEuImhACF1ThE5tCklGKzaNplCCHnsVssRY0CK4galrE4HG2bZhzHfhx2u03XNQ7tVFVM1+t1vx9ijAZ65/Rks73+2Mc/8uT8G1/84hdeffVVZDKzK7kynaT8RJByLrmJkRC3+2FMRRQAQ39x/jN//+/9xue+vDpdP7nY8sXla69/fbvp33P20x//+F3E1guV+/3u0cMNIqZhLKUwYNd1ViAQg3JJCcNVzsN+GKWAGN+7E/7kp77vx370hze9/NaX3/jaG4+vr/tHFxd/53/1H//H/8v/dfPKe4yAEFz6ou7qxNAQ5AzMgHFaUQhggsinJ6cn69OXHnwIEebMour1+ZNhGG5ubi4uLh49evT222+//fbb19fXv/Vbv0lE++22QWaiPCYAQIPlYhFixMoKLl74B1Eto09OD0NdCzMYalFiZF8wiAYO0cBEyQzUvPY2Z1lmQacZehmZqg7+EQ08vepPMFdMRQTvoCciIrcaJ69XgCAgGBqimiGQFUC3vnZ0o57ygcCtqk5yiAhGAqYqjDwV/urhXVU6JTsuHabKjC9tXxTs+5ALcSIiYCQ4NJ4xEoAZIjBRnWBqhcBLYYRARM1c80EwVA8PgICpxrPsFUK/YIKJxAlCEGFS9ldBVE8GgwKoFjQwohhN0RAhMBih+38woaqiimadVVtdhKTsL6waN2CgiIhKJuTSTZUE7oDe/9lEIAIwEjZv3Wxjg4ivvnw3l9ENchDx3r07pRQOdGfZmJkhMXPTLhaLVdMtQmi2u34sstvu+zG98p73/tt/+d/5+m/91pc+/8Xv+shHf+1Xf/21176+3+6GYehi08Zmt705XQVGQ1C0AkZkkNNgAplqSRi1gKioqcvrEiNCE1iBzeJM43x0MRIRcQAgVS2sUiyYzR6icwxnZmh+fBwjk8pvtuq07p2ZR2HfNI7rdar6+Mk5kwYSK2NgTWMiot1uCLweU0pFOyDnfakVMG8aUrUiUiolXO3YeBoRCUmNAYWoamcT+RyDeoBZWYRTwmwqoKgCIgaiIMqe/ck5J3FX+lK0a2NKycxCiMDsW3rNg6gC+P9wuht6FCEqAJCRIkzthVQZ3zMre7obAIQVuXno/CwmPLY9PA4FnqoQ6q0vxdfmrF2FczHw+L/PMlqPL+wdY157Hgo/H7/twGfJxnakMur6Xnh0QHskCgai094+9Xcgsk66iTa5H6lU209EBApUyZy+/yOi9xf4e/kfTlgRqPoN2vR8PC4YwmQ6j0eUUfCjqf7wPDXyfHznB067sU3lQbq91VciJ5jMLbVTBmRSfccpTMIqIa6qFBgUULX20xgAGIKJFS/pmaEWKSJ+iqrIjApngmhN0zDM8g01llMN+1FhHBETT/Rux5fepEjTMeyDEU/vtXPvkCsouN+01+u8eYmIQnBHAGyb4NcAYKpUsAAAYFRzmYSgKmMuzGyG+2EYh7JcrZk55VyKxqYLxG3DJyfgi5mInFZXP55hKaWUVLFiTikl18UZinzj0RMDcQScc7ZSculdn8NlLU9OTu7cudN1Xc6j3zVXpmmapm3bGCMHuHv37mKxQsQ2RCJCNWI4PQmI2Dbr3W7X77fnF5sHDx7EZrUd8m5MZrP4nvjdSHlYrZYPn9zcu3fHiHaDFmBqFt1ynXMe930aRyuy3W6bEAMzEV1eX1Hgpmn2+71qaZrm+voytivOnFJarrrr6+s7d05DCN949PaHPvLBx48fmxlRiDESxyGNRHR9cxVjrMJ9QByazW6biiyWpy+956XLzfbJ+VUpsFiF7/rAB2PX3n/w6n7UtouQNWczbC8udgbing1KtN+lUgoTmeFyuZRg7aLhGMYsOStCiM0yNidnfXn5hbv5h+NXv/b21994+/L66r/4T//OX/+P/hfG9xEx5dw0wavkHBhEoWmnheT/RfMeoSJTahJAve0HAODspQdnAC8d+grMI9M3Xvvq/+R//Lff+noKCmfL9V/+7/2l7X73xttvXd1cX++2qWROqd/tI3Ek3u/3S+4cDs2VdF80TYzemMqAgJhLJoAQggUWqWaLfpjlnC0LRTosFO/WUwGFhmfr7YmlHQIz92lkM1UtKk57ZWZkxsJGTGguQJpKcVZpKmW1WKSU2thoLsioRZh5GMZqzF25ry53EwI3ZiaaVZ36Xe+TltnHbDZYA5ibkudYfGpQbIJD3ACutsKBDBBxzCOjjeMYG46xBdCmiczcYHz55ZdLKY/PnwwyglpkTqlkSe1i2UAYx9FjCa8cllz1hEHUQEXETNqmA1AxAZOAgSctVgBommRmqGaWCBFbl/zR/X7PxE72TiUDgMWAFLp2Q0Sr1UpVVDMFRsTlsvPPy01s28Y3rhhj27ar9k7f93fu3Dk9vZNSOjk5OTs72+/3aLDf70Vz0zQi+ebmJg2ZGUC3oCgqWSGnLvXbMetmu7/e7BHio8cXZ/fuS4Kf/bs/8/Abjz772c+u75/93M/93NtvPzw7uYfIu91uuWhFNI8pxhAImSCQjsO2izE0IYuKiqoSWGREAhWTnMSmAtFEdvUSmU/AGJiIpRhARjTCoNPwfMTc+80hHDFVwDGbgbVt64m2Ump7LaKJ5NmlAqtAGSNgKWqG/X407S33XUeMBBZ2+zQMBTECoGe7QggmCmjjWH0sc84lB0JEZMgFmEvRYUgBcxODmRUpMmZTEFEVGMc9cmdmMUYk1q0NVsZRVdG9QyhYLq6zghzb09Vp4KaJC8aoUpM1fd+DSO1i4qCW/RwGAFAFdOVatwV05W8vXxsDSNWHKlPBwc/kSqklnATinHfqUM08bBVEgENyl+AW+RO8JOl/gohghKgwv3vtET2UNGql/Wmy6DEmrJTgubBoIIS+I9X3xW8uIK6EwOfQ8d+4Ub9xrbQOn3Se4awN1jUzigiEWswbFbyBxQMqYgYKfgzqzEkhAkTRRMjTDo+u9IbAagpgCASIdgRHsQaBM0alaR3VUrlzPeaewyr/Vn87LdMqwkRHk/+340s/H8/H7zgm5ReeNPx00nEgRFBQBHYFdDjCfjDVAO3A5/J2G1ERUKU5deF99SKHvkFTNyS0WYGPCJy2OUUFvjj95+PBzADqOixzYEBEQc3VaY7ODAOo/rp+qYqVBgOIeLnbeAlx6laq4HCxWMyhZIwxxujB2b2u8+c4boyRY4zMbKoF3QMAEAOAexBgkmy5REA0dhQ3aPEoJ4QYOZhSqix2osCKEpvYYDNBVlNVk+L6H1aklCyluFdEqZ11LCJ93+eclejiZieXN9fX1zCVd7yi6Z/LzE5OTrquC8RN0zAaIjYhrpfdycnJ9mYjkgkxCzXdycPHV0hpzqA7BgghhABmfHU9Ill+fBVCQLTNNoW4utnsVVWKiJiKgUgpykhZDAPf7La60aZpiOh6uxOR2CQvZ425U9X90N+5d/fzn//s/fv3H7z8kplJMWJWVXcB2e/3zAyGqWQzjKENbUcYh2H/kz/5Z//UT/yZy+ub//y/+L/euXP3r/x3/uJqdVL666qtwoIBNZeh3+/32yaENoYYo5RSxsTMsQ1Ai2KNghpB12FgzVlKHrRIE5oY4joEev/pRz/0ohjs+vG1f/H3vuvP/Q/3++1ytQAQQGAKw9h3bWdTYl4BjvORPFcFHa3NbXuVNVl1WWBip7zvIx/+m3/zb/6H/9P/2d3VSdd1f+Nv/I340kt2c3Wz237jyfmTy4vzy4uLJ+f9Znt9cfn44aNdv/dK43a7zY7kjUxxsViWWADAb77k7DIqPWSnwFkRRQ0hLJoW205KUVUpRQAIkMitNUih2OQJUDUhwYNfJLdPUY2mvk8gUYydkBRzn3ENCIUU0dq2BYCuab1OGIixwfkRAKiiLlbn8Gazm1co0dzhCEfG1rfoZCklOKr/wGRdWJIB1EwSQ5gQr4NbaNuulKysiHZycvKn/tSnX76//t7v/ePve9/7Hj169LM/+7P//F/8wuNH5/funVxd3Qz9wBya2IhIyUqITNx2LUDJORPRctEihpyzmbRtq1bMnK7pbgHIzP3+oW8mXYwheKHMEPFk1TFzjA3HEB3btW3TNJFfdkDon5eZRdV1jwEAGUIIiGhQfUfb8KKIvPzyyy+//B4tst/vt9ttTvvXvvLV/X4/jj0imuRxHAGsaZoQkpmlVMYxiVIW2Nz0V9e7zbZ/6aVXm/YkD/lXf+XX/tWXvnZ+fnl1dUNLfPz4PIRmHMehH9sYAWi/G++sO1ADVJFsREQG6K7z4v9z9R8VKEVSUeaIaOheCk6ZYCainEfPOBC5Nwc4HbmWm7H+DIiGqETeqm5Wc+qO0s1MtEx1wplE6pznuh6pasGbT+mSNRdjYIWgBmiaig193u72bduoQslKgYlInOFM5ml/VFABKd43SDCVo33LByCDggBZ8/z4JO1rABBChyAIgsAhNMwsJeec2WviWcYxj2MxY4QIFur1C6oqqYoJsjqanHo2TE0Bi7uMTIe3F64BjNBUEabOvXrfcOrKuG1dOHU61aLiVCeEQ8KYMRwKgAc6HFUtjarXf/ya895odovwdjyeLfo9pVZKv9Pzn4/nYx7zEYzz/z818byK4AbIU3veJG9G5O7b/sxaJwSeTL7ndiwScGFBp8zR9EZPtQXOfYO3ACEAgFUJbpyVSA0BK10cfM0+TTE9/ozPMeHz8R0ZBLcNbK0qLaB3roCjqmNaaT3Onp6BiDg1+XvbPE7+wuICLaA2ybkDmedUbgXJxGyT55aPuVYP03nqj9cKoSGbGRlNunzTmqdgZt4oaEZeZwMwyw6ZJtG56YMR1aZ/pxo6GimlvGUaQmjaGEKYHdUcEblMiz9/uVzGGM2Yw5hLbgRjjEYRCDhQE7vdbgcEyEwKpYiqMscQeLBtjR4ACCZ1e7VAjCAYsYkNVUnJMzPTEnEqnnq4k3Mex/HBK7Ubp/IzpWbQx90gMgAMasVE09iXUtoQSikvvnAvD+O9e3dO1uvtrr+8uvn662+eni3cntA/ZtN0bYsxAiKWUk5OVinpMCYienK+Gcey2Q5ERECAjWDRnEYpZBDaxTCWIKCq+z7FGCmQAlqu4DwVWa1WMfK/9SOfevT48Wtfe/297/sAEfX9KIBFS+AGUdOwLaJmqKpgZKgkwC3lnE8a3g+7u2eneey75oW7d07Oz8/P1o2qFElN06y6JTMPu9hsIISgJZc0imZuqWlYKe/768cXvfOHl11HAMN+6PudqrpryOrkLNLYttItF/bk+vXXfmn7//vgH/+hH4LcFymhWw79GNo2g90ykL21uKaqNR6xTKt/xGHZOG1PVVPf/4W//Jf+7//l/+3zv/rroPZPfu7nfvIv/kW8c+fs3r2zD7z/46ZAlZYGYnazOb+6vL6+fvjw4cOHD6/OLy4uLp48eXJ9edX3/Xa73Ww2br+pABQCM6+aZs6piIjmklJKKa2WS5st6dXmCcaRnPtJkxG2h++B2A9EIQIF72UCMSKX6DZA9b+KYJ6j6PvdsluAFoQCZoFZVQNPXfnOOQckDMh0MoniKFb1lKmdA8Bb9uDW3VOdDCSgbitT6UMBAEsBI4Dk5m2EiGiMtlotRCSEjli7rvvkJz/5vR++u16v73/0/a++evorv/KPP/l9HwH4SLtcfv1rb3zpS186P39i1hFxbJgpmhW2XlVBEwChLQmJsQjKdpecst4tWmYm8nRSXDbvdY/TZbdomoZrucPW67V/KVXzMwYXq4xwAgBt26q6UQOmlChwzY0VG0qeVIKUiC7Pv7Hb7cwsxnYYhquLy2FIRHTv3p1A3LbtYtF2XbdYLBCBmYkGEUMaDTgnEysUYoxN20I/lpT3Rejttx6n1x/ud30IYXuxbds2hrbvh5zzom2JOARHp+S5KmWIkQhBtcwQndAUsO5RWcyQmUlNodIfgllgLimhmUzJLAMBQAJGJODalucIhwgioIBCZQc7vxidQaqqNSuA6nTaeXuvh82UwTFVQ8hZchZjIEAVM8CcZbsfNjd980InSmPOi9AQhWSJDslH0+rsWnIWjkFS7wrSORWwhGiiRaGYeV6/5hyx1pgFjJBYRVSBiAiDqwAzEVdJWTTFJrREwfvPD+BnQr+mCKCuWwvqhhqmoOb9i2Y4uy+CABCZ9xM6w/oIFtZKWi3QIdKBjltlP6c63kHB5QiMHV5qJvEqAJgd7Yq3Og+tUtFdj7GWB2/3UHlcjP61EuLkSFj3jAO8fI4Jn493GnNv3vTAkRI+TAeyIYgZg81cG/S0J5Mz36wyluup58R0MKh283XF1Pdy+bRn0OAxJpx53QcKKCLXol/lVLsKjkFd9XQ0w58JM56P5+M7MaYkOwHYlIaoLA/Pwx7CVsVJwtqeegVEIGY0AC0wdSWB5yXNwA2v9IAecUrBQCWjaT2kAIhIJ296m4rzTpCEmnk5yF4AQED0XCqYeqqGvHyBVAWBGZ1QVC96HPySnl5RNFVyvM6ZkqlazrqXgZmJRvSz6HBN6JxXAGBmZ5wCwJ17Dva46zoGVIWu61arFTNn5VYKonctYyAwA/GGNEKjWRdBDUyLfw0KAFwOeW4tlboAQN482XbWLcps1eCfxatwqmpGoNUuQkve7TYl5zaGy6tzJOZGQ9ulnHf74atf/erV1dU+74mIOTAzYUWGzHx6esrMd+7cWSxbd1e/2RUAfvPti7Zt14tl1zUxttyeMCARpHHcbG6WsW0WzfX15Vhyu+h2u8HJq00TkhCOxcbxwYMHL736/q/8qy+/+fbjl156CTmaQtssXUQH9KQi2yIppZxk1/ewH8/O7m63291me//F7uWX7p+sljLuNfW7/T6lpEWaJpTVatG2BtI0MTac+tTLkNOAiKnwdiellBIepHFkRjvhrolS1ARMdci7knviMpZ0dfN2jFER2q776md/+f4yvPI93xOQIA9d1/Ypx3apR3v08X5/IDtOBwjU4MVhk02VemAwZFJNWvL/6N//9//Dv/0f5Jz/wT/4Bz/5V/4K9CMEtMjQ+LevZAxIuF6/cOf0BYAPwyFND33f73Zvv/32frO9vr6+vr6+uLj4xltvP3r0aLvdvnH+KA/jMAwgEpsunAQ+kNAqRRuOMjLbcevrjZldn8mhCEHtrwAwImRyx0wqfZ4+uDIYOJmOrG2jFGobIIiRVEthAkKX43QWuGVVFSuaNSMzm7pvXV3UlUgg2az6DQJABYjT+kW85U8IAJPvn5du/FpAAU5Wa5HsJ2vOedztck7/9J/+04s34na7/emf/unlevWbn/vllNILLz1A1Xt3QtdKDMLYE8RusT45WapC6Z8sFosYFyGEZtE1TdN1zWq1Ck1smma9Xq/X69i2fvHMnPqrMLESQFFESsq+VHMWlw4KTYwxmlkxzduuFB1GGIYkxQzR8R5QzQRt9/vdftv3/Tj2IoJMOeftps8JYoSubV988cX3vfd9d+7cWbTtYrHggKqax2EcR6en5yTDMPZDVgU1BmhCy/dX7eXV9uHVk7bZbfohhBjbrmkaAQsYShYA6rolAuckbbtAZubQxA4I1YxDVFUzYW79OAAjQFMBEfUGTjM0y4CsAqYoxTKKece5iEJlgyCwgxMyBPDYS+ddTtWVHWYgoWgAKkTB0xI+J8gpYVhl5auTis8iAwNRNRFjQjMUAQQtgsNQ+n5UIBWTsTRNBHRVp9qM6nmTnGUcxzLmEsI4DDlLKZpzVgmAqpYNi2tiAdAsdeMnXykSET35qKqmBVRCCJIzAICa10ubpgFDr99TzWkwIPtP5v5OE9fg+GyGukz87JzyvIpkniuZdq2K0/Bo08LbmJBv1Qnr9vZUpcX7qGbC23EzIVQ2rz/lWDYGDEAmNUV4pvPwm4d53+rzn49/U8ah4RVhyuZ4594UziEREkABIlQwF7JyKW1VJ7QDMk7Ek1mNDQIBIHk9kJySwGBuFEqTT+BxtPksRMQp1UIIhIBHQjKAtcdkOt2+LbPN5+P5+KZHzeLhoc2ewCUhnqkcVsRYMeFRedB1F6dGXM+bggoAYBFQrWL1gBDUlECNCPxM8he4pT52NGYcJCI4rWQ7sisLWZKvE7Mqo01ESigyJafBACo9CAA4RoADrpvAJXqZyCOMop5jRcOg2IoYugcYTUI3Kuv12hCLp6IVcxIqAgDbb1y5KV/TNGYmxbqmWa1Wfd9XKb8YmTlg9M6f5syYOXKtyEVGRCSAid4ZCEwACGuHiFEtKaiqFB1l9I0pGs6AsDZDAhliE1syCFE5YGTK5T6qLLrG9Lu2242WJDmN4/ieV18updy5d3eUYrVlUUpxp3UQha++9lYIIX3payenK2aOkUWkbduf//lfXCwWZyenp6enJ6vFou3atg2MIrJcLmQvut2E0MUYhyxZY7RgagwxcJvU9tv9zfbri9Udbhdf/K0vt92q67q+H7vOdttxvV4juXGYiZhpENW+Tymlm5s9EDmz9yMf+nCMcXNzZVpkzCBiImOf8jhssAKSSBwCNU1T8rjf70Vy5SJKGfqEJqQmXQQVE0WEyCGNw+b6hgIVKVkFEVPOw/j2P/tHP/Pj4+7lT3wSsIGcYmgAptrUFDHRBOZv9w9M072GQV45nLtrwMyatpWUf+RTP/pDP/zDX/jMr3/mM5/5B/+P/+rf/qt/FcqITRBnXyEFZ3ZTACkAFZwhM4QAy9VisfzQiw/qyvQ3HYb9djsMw2bcXV1dPXr48PHjxy55+uTR45ubm2G3TykNw5DGsZSDRH5YRIcrqJ6nNwISkRijeDSsIkWKFq/gtWEZEBVEzBAkECCiokreLztK43bRdl2Lo2YmCcQFnGpsxFBnP7AijeMIhpWgZnQwOp0yuPMVeoLWDmpv9Rf+/9m3IWQkCkghhEAR0Z48eRIC7UAXiyYGcvD2xS9+8b1nDzb73fn526vV+9/7nhcfPX589eRhCM3ZnXt//Hs+Fv9EgyHud2mxWN2/9wIznyzH5XJZTT4IiahddOvV6fV2Y1a1IwEg5zykpJoAu5Ql7dIwDOO+H4Yhj8XVp1JKWQoih6apgFBluKRxHEVht+tLKURh1w8icr3ZeqHMqFIeiAggXG2vTk8a5sXJ3e7ll166f++Fs7OzF+/fV1Ui3A15t7/Z7XZlHLwyDAyllHHMY04qAMBZTMT6YZ9TMYxZMYuMYmY2FCmYmE3VAocmdiq5pBRYEdgQY9sgsqICccl5whKI6ErERGSIagimoGJFCrNnD0kBsoivaDNTK2ZIBkSGbiAJ5hVAqOQvRQRiNJ3PCRcomxdZ3ScPUmWIJl6zwqMoEaaUhAKwAWXJgRAAc5Khz2lUCihQhpQjHwQhavnOUEQlSUqFm2LW+uFU3SlNzVwvt+Y1jtOZPkU5ICKqFi0ikg00xjDkvVrOeQRQT4KICJNHp1VQBxAZGIARJ+kXNK/HeSkdEV1fG9Ud0zz9hEigpmSszyiFWtXKx6nm5uaZeBun1YIIPD1uVRenj3hoc6psu0qGe6pjEN6JCwqHrxPnS4LbrhVPO48/H8/H7XFrUine+renGtnNZl09GFSfajc1mmQADn8GnoSyOQU51bcnFZkjniccVfVp+tcxGiREOionHrONChzFwe8+nvNFn4/v1KjAD2dW6KQ7isCITku5TVGenegB3NLCRFQziJKLBJjMyRcg760FXwYoWiRzneGVC+b5FQAopThl1P921gvw48D/eYzmQkoDESGymxcDIFlgpelJbhZsZeqjcPKVk1gmBQ1ExL7PUGU1ankTkQCoFJ2kchC9d1hERG7G3l8/ICEi6VygUAAKSJZRFUqRPg3bUcwMtALZWh/wkOIt17CpggqM4J2Ei8UiBm5DbJoQY2xCjDESg+BmrtrhVG4ygEW3QBFwIhmiVbNss9HtCgoWbCObiFq2QQJhCEEJdrsNE7z3A+9NaYwxCkZVleLMQHMhBDPb7XpEfPLkSdvGYRhSHsw4WEiZJed+d/Xk8QYRyb8808Vi8alP/cjNzfVvfvHzq/W66zrR/NJLLz253O73+7aLp6v1YtGplWG/e+mll1bru08ePd7uM4XF0Gcw3u/3uVhO/RxdIRNRCAwFLecMIKjwxtdef+HuvaaNu81NSZkxTwEZiuZi5vn1pNliBEDCpm3cDxrNbDfurPQl5+u06UOk6kNkFElVkwA3kUPHsRHV3XYvu0erszu/9PP/3x8s6dVP/ihADBQ2275dL49ZINNK0iOy1q0FZ9OvD2sKgAC3/f50sQKQv/bX/tp/9Jn/+Xvf//5f+IVf/PH/1p87fe8rhpABs5VIQQG0FDbApvbQOoeaZ9tAACjFigAAxgjLbrnolgD3IL/fG6uYwcByvry83F7ffOMb30jj6Kqnjx8+evjw4fn5+Xa7TfvdOI7DMFgRn1BOS3a5QJ/DjgzdNoNrEF5UiqKCmVhRk5/8yT/znlde+vVf/Uwaeslpe2XjsAeQk8VpKSUlV24qgIwkBBRIDTkSI5CCOcvATBGbGQrOYjNmpjQnbQ3wSEQOFe0QiJuih9Jdu1gsO8m9Sz52i/b09BS07Da79736an+ze/0rX/sT3/PH2+9vHz8+x8AAhBSb2KWkV9cbIu7aZSlKcqmj7fu+HwdVHXLKOSOFpmuHYejHnMUbgNOQk4hoab2yl/qUUpJcsfd+33sl3BCYoiebReS0eyHnbIr7oTfDtumGXFRhuXqhfvCqmwWKICLrO9YtlwBEHIzXu0QXX3v0xS+9QURgWkpJQ59z9lYxMxlT5WOLqqqpairq5bvFcr1er4m4USlZSykp5yJpuWxiaNx3HgGYAxEm0VIUORihqGWRMadALEVUIIuqq6ooiKEZgqG4Pwc4PQRUVEUdCXug7/JGAIgzmXn6uiv5BICYar1YfbcnRCMNpRSYBGAnSoWn8yoEcpDqt44Qi4IUs4joFGVEUxxz3u+HzWaLYUkRcs5zqzAYuPMQK5FSKToOGbkqpsKB+oITGeAoMeRTEQAAmKlpmkC9gQAUtSKSOXKMjGhFkpmUklSLgTZNtFyLq15/MDBwMvG8tdQjD73ZA8kDSvAbOSWsnhpPIbFZh8aeqgc+o/sy9xw+S92c/egrEn7qLZ9SeZmAX2XDP0scfT6ej29rHCa2vstUQlezQAIjT6nj1LsOiAhMPIleHL9sZX3PDhPeJ+WlhNk94oAGdeoqvP041bK6L1BggxkZgn1TUBCeo8Hn4zs17MCrOkA+m6QQAQBgVgvziftOlFHAUopKQTVA7yny5yIQgtHckQuTuKbbksEkHgMALrNYiswrwE9zb2Uiqpa2/vxDAUO1OI27NgWrmyvMFXZfhf4HamZqNzOgrGvexbsJAUUNnB6AEx8pLAPcqiVCUHInP4CpL0y8kiZmVjIwsxDl5B/VzKxPY9d14CKKOSMWmz75enkfyWjwW1EQ1CuEZAMHjByYMUxu3USUcBNj7LrOGZU4qb+sViucJEYDN44YJ2mQgDKZyDFatpTSoDkG2m6uS0nNqhMp+34LPWBcz3wGJI7cONhen9wzs3v3H7Rt0/f9MO6vrq5Wq9UHP/C9Hu+WcSg5j+OQ+iGXEQCGUYdRL673b37jXFWR8cnlDgBubm5iwzHyslsQQRPo/OLm+upyux1e//rbJydbMztZiYiEkE2A2pBV9vu9qnZdR0SqoAqr1YoIrq+uwGwcekRctt1mt5nvA1EgorZljKEUkyJugR1jFxsupfR9v2zLogklW0lZdUBjUc1ZIHNsOpAIpTNqNUVDprC4v1QCAbRf+ic//30JPvypnxh3u5P1WZlipUPAZQpmc/76mX0d4enkNgJit1jkkiPg93/605/+9Kc//Sd//GPf+8d+9dd+7UdfuIPLTsB09p8mZKQ+pxACIUFgBgYABVNvUkWiNk6VZZOSSylx0aiaqhBYYMY23nv5wb2XHrzvQ9+FSFV3O5dhv99sNsMwvPmVr2y32/Pz88sn5xfn548ePXr88NH19TUillLcA6OouCy3sIIVEVGzbBndzF5zzvmv/3t/9cf/5I/+X/7z/+y1r345Dft+t7+5vkr9sO1LzjIMQz+kcRzHMQ0plaLEEcFMpBgispPSEUnk0FVsR+3GkVnA5t3BXTUAnH3jjhqoRqCaVBGg73tiDASllNOT1Z07Z6enpx/6rve/cEc/9P6PLdvlo7e/8eTyEpG32+2YMiIPqex3abPtxzGrQk5lGBIMvar2fZ+kBI5FZUjFzAwhZckqqlC0NvkholpT+zOlql9GikQ0DAjWGLVEZCEgEyIy0W4cRIQoFEBiprYNFMwwqYMfMjRmDk1jZCTSLJallL4fVYf9+Jg5jOMoKdcJCYBoZCCaaxNyWPliB7d5UFeEJkOQIQn2MUZu2tBiKaVst6vuhIjBUIlMlJk5EGMpJacsCoAUVIuIJCkhBBHvSAUppmrFC1yK5u4mRkCoCiK1C9RMVD35BsRgSqWIapGDVwEYVE8gmGhaqup1QrzVVFATQ+7yZ0c9hBNhrK47Ncg558zWBST0IE9Eht5SCrtdv1i1baCcRQ0YNYuQQXEV3xCQLafS96OAmYHbirJlcOIMqTuhzL27VhUMi5nb05uZogoHJPI7YOSi95o4oIGKZJHMwWk8hFhzlmBghiIS5q2kmj+ZNzBNn3cur9kBs6FWIwqAd7B5eAdNTp08Bp+q7D31555CNseEtY5t04l8e0wkIZyKfhMUPKr/w/PxfPwuxjuRz+gZEOVaF5UdeiRcwUDoAfLtPwfwkkWVrTpO9zCgP/2oNlirhe84KrN0MpY47uOF385D/Pl4Pn7PBvocttoubtWj6CAZetj/0Y2XZyt5qIDtKIXiRBEzQ1BQlVIAjLG+OE6aoiYqotVt0AycGjq9yOEAre9U/ScOj5gFYiT2oggCkPcfwoQmEb2nBMA7CVFz8cpeDUdmy7sYYy4Vv806/qUUE559CKGWCTkwixYi4hBijP553G47UutIMqUkJc+Z6ZwzIBBWq24zIwYz2++kXowHR8iBiIh3wy4S56BMBJBpKlJKU6losw6qiEip1+xyON524oCwRVwul+5ufHKyWi0aMDHIXYxmcnl5fnZ2sl6vchlVixsAeAZaa9AlKmIGzJxSWa/XImoGTezSWNYrXnQnTZRFp4zGzIwgIkXSfr+/2Q6rk7Mf+9SfenJxXkpCxJvdVlWXJ7ZYLPrt9vL6ZnN91XXNV7/6VWZum/jZz3/BRKsLCNJisTg7XS/XKzO7ublBxPv375+dnTkhtpSiRU5OTpbL5bjfp5RyzqerO9X5MAOyYUBUBKGATYwRAFIeRUQzEsQ2YGh3IQSTMO4h5wKKacRaQ86mRVehMW0uLociRjG8dBofX15AOxRc/Oz/+x/80E5++M//dL/PzTIeIk1zNOhL4XiRHf1sBgA6c6nrc51CFqxPiPi3/tbf+j/9H/6PV9vdz/2Lf/axT/6JZbwf4tITMgWEVCEwxTh7HMmctGFynQ+r2A+RiJuGm0ZQ2U3ZwYrXa1xkJTCogQogQuDu7LQ7OwWz933wg1UYChBEZLu9PL+4ublx18S33377zTfffPjw4fnlhZMet5uLoqggEdiQgAFVRfM//Ed//8UXzn7xl37ha699+Wy5vn/vTtcQY2yWa0Rkiv6d5qIp5TFLzrLvx5ubzc3NdrsfkrdUGYZq71EjRQJwjWQiUjtqUzbH1x6YGhISkPvj+ffQNA0AENEwZJdCefyNhx/7yIc215f/r//nz5RSmra9urrabDYUw8XFlSgghCGXNKoZFMGSlYhiYiLKUqQYBjazIoaBiygyMUcgEoulFAEjIqNJtACAiAkIkAFCCF2NQuoToOIaHgJzCFERiEJsgxKqQogtTlzZECg0ERFF8s1+V0pBBUTeDUXLSERtXLhwC5owB3AOBCsTZWmL1kRJ5QYjAGFseMzFcIygXdchERgYGXPYbLYIfHJyN7ahlKyazAqRb+NERGqogKZu90iq4OFXFbecQjHCgISG5PYQ/nVIKaJARCE4fq7+N0TBjrDBfACY4pxTRPN+Osa6w7MZ+D7ve7Kqeqe3F0XNqkclgKmULFFV4SCeZCK5qKaUVJU5FhnVVFEsSySuj4cADF71FRAzGMcx5xyxOCJjApxsQt1+loBLKUYyKTRJLqNHoszojvclDUgsIu5QLyL+slSHAUVAQuD5MISpVX4+lWsJ4tYGZDbVS58ZT9EvdfIb9PriO2t7Hr3UrdesFZUDrtNK6EU7ep134IhOV6rfdLvUu7/I8/F8/E7Dt2JHgwCGR8PMQAVhypPWP7iF0HyPrsEheKsyAbDMzroAcEi7+LhNIq3/wNu/ndua/FffJCx8vhaej+/k8CMYAI4PbpwksieyND7bvs4hEJoVQQNHRioKAKRaci6lAJgRc0Ai8p55Py+cpwRwOE+OGDcH7Jdzds01f84BECotSxEAYCZiQgbnTIIxT20kdhRhufi+xwrFU/mIiNj3MvuwmUEp/ofRRhM0RaEp/Zytxi6gRu5378rvZqq6WrH/tmsWvFijN4ccAhGrKWJPEqvaCeectXY6oUhJkkFNWAYAEEBF4qqoQQYoHQCAAhT/YhgxAIAUQ0FIBdE/IKlqKaUJRAgNE0GBkpYNr7sGQe+enj148CDp6u0n5WbMzaLtVu/JfV4phtAGRnJHbySM7f+fvT+P1iXL7gKx3977RMT3fXd6L/NlVmZlSaUSIAkJoQEECA0tBAipQWDT0G5sEDQ9iG4Qg5fbba/lpnvRbbFML2SW7abbggXNYLAXlhCT3FoFViNAlEAlqVSaqlRVqpKqcn7DvfcbIuKcvbf/2Cfii3vfy5LADK3KPCvXy3vjxhdfxIkz7N/ev/3bKbWHPvNZ6gmH3e583aIcvOztQNd0p1kRpXIovasn6YDkhbqT0zbpauXOdke6dvXc7iBPyQn7g7BNx6GMJdfIqto4jqZ6eflw3TW5lPsvPWiappRLQ7ter9frjtjNykdfvNycrTebzWE8rNdrcydJzz777NPPPOMt7fsDbUegEVmJEADK1gAbsr7f52GUxOtuVUnNqv2wb7bd+fm5Wh5GAXuzTrr2q/uvARj6h6WUk3y12WyGYej3BxG5z/dGHXE4NO3J6Zh/5N3/r9Xulc//jb8FW6A7gQKp8dRk4hFg8HoiXFXDczHVcCSKxKwCgQSsWjxRm+Tpz/2F8tTZ//m/+7+cbM6+7S/+ld//n/xvbLf3xNx2SmJJDtD1ZMPd3F6mXYFpCUsN4Nm7CQjLvH04npyMY5IRmhwgAsvJ3Xtvu3vP+TMBOMPchmF3vX/48OHl5eVht//oiy9+4sWfuXzw8B/+/f/xox/+0N3Nae57Wun3fPs/+N6/9Y/IPOfxNd9+rLkfM6JJY9N0Xbtq2zaKZ56fdOm0PT3fZBuKPa1u6nQYxkcPry8fXRPJ9fX19fVuHMdxyOOowm23WsGtpUaBIas7te2KQCXbmGzyDZVjBMktszq4ZEizfng1XF9nAr7t2/7eO5+5+7bnnyMSLvz+H3+x7/PrD1/ruo6lIWKibiYNxtWGNK2WTV28lLS4cdRFNI+aE3XiozBkWmTUSmbmltqmAXWkNhJR29a6gnUZlrsAsjklY+JCjAQrhTiIExEJ51zGnLOZ+Vig2diZGFRUcnYdMphhEjHJEEkmLWZlGKeMb9R4G9ydshM4OeugbCguTZcabp66uDPubb1OQY4deCBkwdjwKJ4b37EdUhkBykNOqdtuSytt32czTynBfBgOTlifnUTxiWJQtdFM2hUzjyUPmojInDxb1kxElKRtUs6ZCGly23tV4PQYw2Y2RoF4UyclEk7ipnOpFHcqMIWQTq53IhK4m7uSq7lBXEkLCbddHgctulmtX77aNxcXJ8O4wmocx/UKfb8TpjEbJ3Z2L7uNn7g7shn00J8ctg2hTS0P+VCUG+5Iu5y1z6WoEqukRtTMxMHUjKMO61POOUMBJbLkWsyoZJg3o1J7cq/HvvenPvjTh/VT5TN/2XPjeOmuxJlsgGG12SDnWts6EgDJGAaC27GOqNPCYxKx1loncF48CoN92tsXIDDsZZoKOM1xDDRY++PGrodkUsQGKfg7i2XKFifXROuwKabcRQDsVHyxXi3O5/nOgSgoVw8SyP0JqxhN4Zpb4UZ5cvjorfap03wasUQ8jwyqPB5xFdOQN3bhBLTchOgAe5RTi3i9GlGaCZw06zMBMS18viq4Sp6FfDQ44n7zFDMkAgFCkMlfM+VVLwKPcT1MRNNju1kwYxq98zk/79FgfuzILYfWrSZTSblb5/rNrrDj+fFX2ERy4HrZUrvTCcQKjpz19p/nIX4eNJqzmm4eNA4OCAExsB0Q0OwQdDd3crKQQYRyimJHBAWByKDFzBIxEZMYcjEtUA0xFxgnaYWSFY3TvWjO2jUr1eyqQu4S3DIFAEk60Rt9olXWaORU4Sml5Ef1+4kRN5lZlVHmRpN+I5kdN8IFGaC2GS7Ov84H559poV4Yrbq0QSISKjJxchQspkWFw6Cqxu1F+l/TprZtI+rI0s4U2MolcxXi+LWUMo7jmIfg5pF5t5JbkdP6LuUoXhdfDUBVx9G6RixJQ9aQ9EMeh0MIsxrxo8ur693BKBnxenNCJEylFW6bppEkIk3TpaaT1K1PztpV1zbipWxJuYxNtzYWMAGsTk4wd3WLpGw1NE035GtHZmn2+30/NJIS6UBEzGl90q1qPjfFexn7/vLRHS+5aSS9613sttvtkNZmZlZyjryr4eHDh6++/vpuf80iWQvA65OTtl2pm5nRoE2TTk9Pz09Ou1XDzG2Stm0vzk7Gse+aNjonGI8lm5tebXdRO069GGG1attmvT/sxtEOh6EUmJKZmXPDzeXVHiBOQoaU2n6f3//+91/uxy//rf82xhFNOw59BeRFUzr6whlsONo7eNLqBsDhSRIJUBRqv/+b/sAP/dAPXz54+G3f9m2/8ld/6S//ii8f+n3TJHXLjlbaxQd/lmybf948gxvbDwGR9qDDKJTAzF13tlqdPf1UqOL8ioagBtf3v/frf/f/6n/ZDwe4CuPR1SWRx5BW1NnhhHXrzLuIEDIoNdw0kjo48uZsfX5+2q1XqelWq5N3vfPp9As7Kz6OY9+Pu8P+0cPL+/cfXl3ucy5OUoq5e9u2RBE2ueHYrctZNYy9aRphIatJWGUin770+utDxGo47Ye+uEuTiFOpoiawagrUSw956TarolPFjWfW+sx9dwPAIl5r5pRYl1Q1iiiG6EopZV7HRGS9bpmZiUXE1Yo7k3Rd06UmvqgUMytjyRUQuruzqzs5iTSSADOCWWkSM3MiBliLenFTPi50DjeLVVwS5VwAV3hWZC2dtpvNJnXtZsMyjsMwFFMzJVhiappGcw6tr5yzWaEUrArfHfZhIfV9n9UBuFHf90Rs5kQSpQTnroswfqzSMfJsWvfnn+cOn7gVIKI0BYQdIBJnArE7BQPFQr5dwM5WuSkTgZZJWOImx3F0kyTVUZlzPuzH/a6MA0pmLTwcTK1pusa8wBnOWmgcMPTett6x7HfXfd/nPKhlmIHi7TGzx3ZwJLzMvID4Ntj8OO6eUmrb1LbtIetut3v48OH9Bw8Oe+yutzE2VDUNA0RAHGSaaYcnP9qfyOF5je5alOE2t+hhm6Ad5p0O1d0boZOZtCY315B6nIJK9C+F2PlJLnq81bj/uGH4G3/kreDJW23RgjlS65hFzH0SrHLPC6IajhT0mcA8L063KM1HROfAY6mzPyfYdss6vf3XT/WU2id2DT3hnJ/FnCHYEhO+UY/7W7JUNxtNoPiN2qzJNGUg+AQIQb5gjJQCt9CSISIRielWq0Qxi8OsmMHNiCgscAqhJijm5EBQEEhxE5SJiE0Bw7AK4t8plw8IK78afBAjY6oTMsJlvsghmQOGPl10nvPLL3Z3mYHWYlweFwiQuwcgvPXx+NIAhACGYaBJUUZSxYpElEst/h5HmLlppG2b2MijODXoJO48ETtycKgCIkakUaduiiNxEICqStcV8LDbE3yVCGVk0/Ozk83ZnVF5N5Rdr9lLVt+NlqR176V6BoyNjABPymyO9Xq96tp1J+tG1snvnq37q6HdP5COpHGlkZM0iRIlct9fbVM6IfaiJTXrrMacmqZT7gBYMP3MdMxhnnVdZyUTy7bfbnh17949Yd6cnRdKUw1JqwGX0NzzYmb7oR/6XEwPhyHnLCKX14/6IW93+1fwqrsD3gillJ66c5Fz7rrmzvn5ZrPquo6I8jg0adM0DSfabDbFfL/fjdnbNomsuy4RdW3qpOnKMIxDr8Wu+4dts4IMxIfUrAv4+pVXXnnw6P52/1t+x+/CqmtlHYlBq9Rs+2G1ajA7vSffy7LdWqoIYm5CXFy92Ori7n/wH/wH/6dv/uOrk82f+TN/5rN+8eecP3vv0eWjizt3BBhL36TVG03dpQfoX0TW+Y07lXYFAO5Qm/KSBEzIBwAQOTtpzXPTkCkNw5C6NmtRAO2KCKpQIiLalUzk7CVcPkQu7JLs5LQbct5ut8VUi5NI165Tak/OLlarTdd1MKy67uLigtAcDn3OqjqyIEkCeBhyySOI3Gq5TmYiSRS6RxxLgbgVd1eDukPNXR/0213fp5Tg3Ofs7tS0Ba6KOAsBVyY7PuMoezWDlijEPh/0iR7s7kIz/yFmbS1k5+7ELiLxcywdKaW+J2bumial5Gpmtmq7zWZT3IppgEBVHUuuCsO0EmaH5lLYWJCI3VQbWTExgdzcipbiZomQmBGZmeEKdwREY1cNqZpSNOshaylua1ufd+dV96XkXNSKR1EhcoR/DoCqpiRqoV9qMdfGoahp03Tm1I+ju2Z1kJkha2ESAAGGjzBvsRTPa/v0Nmuej6o+2ThTc6Koi+tu7hZ1HVULFnAiHH1EaJo2pQS4qk5vF0MeDzvfbctwIM0N+UbzCGKhtfrIkBqP8+QW/4mZEpTJyU0tu0VGgBmEPFyb4cubOGaL1Ij5Ecys6JgSBc2h7/fb7bbv+3Hk/X4fu1L4VmIXCU6Ox0YGFhInCseHpHTsk0BON3Ipj0ygaSu3MGTdnXxO2oziH/MSYKjuLTpeuv5swFy0MH72SXXjtqLp3GaaxJy4eMSoN5axI91hudvCZ5Wu2zHAm82mO3kLGb7Zm4fqWF1qhKgANKdCVXrz5NkPTbvpo9XHV3+h40EAwXO2o993dgveHnLLHN05XPMmR4N4Q+voCfbLY31xu4eDOPB4HGwODy6PEBhuoKNr6/E00zdHi+SLmA03Dj7eZsrJspfrhuEGU/JadH7yM8YZBBCYYVEuOKJBA2CT1igMVWzGnGb65LxRElHTNLFx6lSPPT447Xb1+9it7lPMDOYItc32BAJZ3tx945uObtTJhpsB4RKYzucE7YqIQjNhNlNCsn+GmmYWvRlVKAI9jllnXNt2myU6BZASN5LiVoNWF8JxKaVWUvieiKRr03pVjSejGpkMKyFuIPp0NDQsPcRLn5qU3Tk1Z3fvUXfy4MGDB5f7vqiklTMdRncfRYjcUd8TATAv5gLhYTdie1glCMqm5ZOTdz56ePXRj3w8rUga58albbr2pOW1uGw6ud62n/mutw9jee3+q04rJuoHhpTZnmNmabuoxppzZunWZ+3l9dXV9W6z2SdCSmngkhIzi1kpWoioWW/aNmUtzLwpZSyGqk6Ltm39062U0vf94XAYh0POmciF+cHV5XZ75VNOTmIBLOfMtO66ruvai7t3QLbd71frtuu6UK8RkdN144CWllgkNd427WZTStkdBs5DWq1FxJheeenF//5b//Rv+W2/4+4736XDkNYnDL5YdfA8LVXqxMuKzsu2NKycADcRoU76q6uv/A1f893f/d3vfe97f/Inf/Kv//W//ru/8d/brLuoDNOkG4vgG9Gf/v9b1xgLruntOCQRkqAm0Baok+7RtiD7h//gu9frpowjMW+adR5VzQo5pQbExq5wkWYYlSeXHzkzWYJboaLUOJnS2JftdlsK2rZtm9Vrrz48PT1drVbZTIubhRxILkUZxsxguBmzpcREpJZAAIyYuWZkCOCq2YjUndzZPEhxRCiQPGrjPI5927a55JTanDM34jYrMzGIzUznbvHFqicsfmNJwRRnATAH9ol8dk4tGREzSoyfh0HdvUtN0zQhXjV0Y6Q618p1AS7NEDl4FpclcpCzUEcgLSMRa7FIu41FqXrHScMqKmxO5GpKBJCZSZOEGwiXQlnV90PJxmsmZhFZJeGM0Uso/7Dh6upq1QAAc4JzLnl+imNXMLEzM4/FzNTcVT0XFYkinDX3rC4R05I4r72xIi0B4TiOcw/XWeAOKJF4RQs+iVkrEM9NS+6Ge2ROpgnRuynMAVViz87b6+Gw15Kla8+07N3USifUCkkSTg2v193JybpruyRMfkiMVkQS0WAOcivqZFNxT4VHlJAlASAnnzIItEq/uqqO49A0HGZrDI/ICp93ouiBOJ8W1INqWlrNFYyqJ5i7ceGbmAfn0vSRSFhyt6Ope7RS7ejRUgbNdjOOyI0mYRidylpwaOrUdOmfrS2uc6vdWOvmuCXRTb2P5fr6pCzEaXi8BQvfvM3da7WIKAkzsUAhPNmFx5AgEdFRVv82uSw8WVEBOw4SyVRLsNKnb9QVRJwfeblEkyp5jS8s2lyMavnrp3yTf2Zr5WeZxUvL6la77aBadDDfrrj3ZmnRB/NYv9VpjoAz7hR4G6jD2skJNYJEiH3eLRyQcDP32LrYGBoCTk5EidjIwt4A4GRTisIswrG4t5shtxs3NtkYyQyxIZpTpOtg4f5UIT5WJrsRc5y/gBfEshkQ4oYT6DhSaHHO44bI41u13wxrzl8R7n8ATdNEWG++EzMrXrQKN5KqlijGTdSwhJbM0o+uqF86RR0rwywRufs4lGa9atuW3LuEfFgz2cn50wrqMzythY2kIZKi5gaNUKo6EwlFYUZk93W3zlq0aDFrUJiTpXS5223HkQzcqEshiEjf0Do5l+Gwasszzzz18NHVP3nve09OnnFscpbzZ9fMnFJNGOuatm1Tw6JWGNS1qbi46X4YhHgjaTQ1IyGoeS4wFHFNplEFUQ1ZNUnbrarWSBFr29Xq9PzupK9ApgBYsL267vt9KWXMQwRbxnG8vL93yKNtf7V/Nes4DEPTtQACEMKcObWpIZKmac43Z+cXJ31mIioF3JAVTZQTUsmH3YP+L/+FP/fr/s3f9It/+ZeiFJIW44gEcHgMmaon6kYOzc1mBgiSuxILTFcX58jlP/x93/j7/6P/+Kl7d//yX/oLv+QLf8kXfOEXEuj68tHFxcXtCyyv9Un+9s/QHndtgoAA58xRSJcQsQwAhWEZkv7p979n7HfuNAyDqhl4zEVB7A6iQlBnrTkSTD5R+cmMXEjv3rn3zL2Lk9P15YP7H/3oTz/a7wxoujZs7XHMu91uHAtBjDCOJY8ltZ0k0lLUkRppWnIDGVmtnOKT8FMEnMmc3EhdpQ4VOEhFAKSmyTmLpDEXJlIgpeSqYSmE4ClCKpPScnYb1VoHy1XFqYI9oqPbbfI9kx2xXJ79SnFcVdu2KaVkUypkRcdx1FJmAImJnjQviJr7GWoe1yijPJRAzhZqWCJEpqpjqOqhWtVOoQjtxdyLcXJmlrZhdwBZy9XVVdO265OTpklt2woZu3Rc2HC13Q1iYzGhyn5PKYnU4qjOBOVhGCykV8BEwgRnkuqYr4/ME7Nx6TWPLk3EfJMHvLDbpp+r468Zi+Uc4BmzyNkqtajyNuYTd5dcVUuTOHHjrk6OqjZGWfN2u72+3u12h1V34ZZK0b2NIsJcrJWUUtu2q7YRIbWx5IOVDCqJqO0SoWnatSP1o4q4iKB4Tcid7j+gmy2aTzHA+bWKkIgU06ZprKrsMKdk7kVLy4kWSQ3TTuTunsdxuWFF7zFQ7BjBJiKZcF+UiXefS1AphclLNmUkV6dQxDKPSgPVTRx/Dd+8ho5pXUPIHc4VkVYf2WQ0O09fOI3qN2J4HgU56KjgvIgW3hgcbwgO3Z0eq8H4VvuUbzFACOBaXTPGzDQliU2NOc1bWyC9Sa5saRbO43Ai0E11Jux4EHCu0+dG6PqTDGzM2bCLmfWmgIJTW5otP5e5+XOdv1Mn3qpDWb9SHuNvvZk9RgSbQtg2VSHC/GqMwB6BPxOQw6YpEb1X42TkHsWs4MYONXV3hoTcP02V1c1MrdCCrVnvgQQCtprpMwfMYkMspUTpuMivmT+VjqZY4C43WkigxoXIjhYbFsjNF6FG3ESD8w/MfGub4TC8lpFKJq5SVTVRJIimYXDEvQStdPqi43f1fR+e/jQJls4G0OwsD+gY9tAc3owTslVxmlpwItXU55lAa7IehgxT0pLZxXGyXktqh2EApYs7TxfHmNWJG1CTurFk06yqriVisgVujsvtrpSSEhcq3DGlpE7X+0M2ds1AsVyMirgzPLmcrpqhFGrau/fu7Q8wHAA67O1+fzn3duXPihB51wjMW2EyO1l3ChKhdHk9rkREmkYijtekNQAjAyWkzkyzuVGUYFTLhTyJWvQ5VeFTdvcE4W5zGgE9M3dloJTywgsppXTo+1JKn/u+7wHEv+7e9+PQ592uHw6jqnbdg4uLMyIyLynxyckmNQwiaZthLM9/2jub9elf/LPf+htefPmrftP/DOMgJIDGPIq0UHemuTThcfotRlfYVSwAhpy7bmWlPP8Zn/EH/uA3/bE/9l989Vd/9ft+4Ae+6Iu+UMfhzsVFfzis1k0d1YuL/MumOjgQVDSHm5urudfgRgvDOGLNT13cKaWcnp42TbPb7foxEztHYQwzq7lH7kxk5sQGpLoIu7u+9tp9t3ynPx3H0nXri1Np29VmdXp/e1+zulPfD+qWpPpW8lgSEzEXjFB3CtWo4t5ZaK1GDzERVWICUUA1LlZ7jdSHgGQGczYnByuxSyoGJzgfOfazb2heTGzK+J/XkOotmvxE7i6TymiV07jpdVouedMihojYuINEmqZlkBvmxS0Wb5ukPLuWiQuIiGGmw1BUvQxjRRpQYpYEoqhkoMVZREBMxEiStBIfmrZ1d3WoOvF0F+pjyeYOZrNOhFNqN93qpMVJi7ZZkQ8AF1NyLcWapmsaLqV4IDCzYciqyimphyBUC4JJfJGO4xiRutqfi3TuSihgmdHyciVUOLw61JjIolpmzuNY3EkkdU1bOR1GofU5lDxtP8bh9ax5aMQEosj8I/cyjoddf73bXd+52EQl93Ecx3EELDU05laaMbUnyUk1x2bAIBY0ypxS0zZqtHLREstyIDEG2KwiI4uM0umJwEROpZRRi5lNBVgToDLloLpWF2qgaCICSIg8qvdiKghvYewi6qvW+TvvaHLb1oxqKHM1eZ9Go0iKay0F4AjsRz97KCmyLzAhyCZMOLegntpj0ovxGQcBTgvA9rMn+Ny0FW75lecHUSwx5NwJbypj+83d5lF/q6BKXUuPOebVrvPZpe/OkyRNADxMm2wE+qZLs4EIPB1kIvFbzosJSS51/BcKH3wL/r3J0OBjUX167E83++ONWFFv1CKuxeA5AFjFUm776t+kjY5zxOZhTTUpCMHjAszmUB4UtSpucDMt9gy3HD5O2FyRkw0K5ti/uEq9WKS/SS0fBfIbHoFSSqTEYcJTMzgkrt7zJYxMC1fNxIqZC2IsFEvnsg0lvo8QM30JAmu0L5aD6QvaSU4Qi1AhLeQNeFEVLeJO85m+yNbYbDbT6mK+GHabTTen/E2ET2VQyOJP5zuCFQmKal3V3ZuaNXdxA33fiwiIPApR6CTF03C5HrxkLaMwnW/WctJk1Rdffvn6arc+PTOnfT+ktnOj1GpxsIOcOLAOUQt2ghHnPJysur6/Ou3kZNMQEWDUdiB2kbCzzRtGy0jXh30r+rGfefEz3vUCGFdXWxHqe1hTWb5ExOpESkRCfm0mjDL2K5F+3BzGgZn311dl3YUxJA3PFTWY+e7du107RPeuwGoMcFqtk9d6IWMe3TUlEmKHec6llEaSOOWZVWukg3bcSLc5uWjPqGowjONYSjHD0GdXFU5l1CDljuNoZewP1/0w5JwdJecBsKZbvf7aK5uzp87On/pLf/b//sEf+dHf+Q3/7ub5F2aDDKYgIbKwwp60kNn8v9gupEmH3XZ9ssnb3Vf/+l/7Az/w/Z/92Z/93Nuf+0v//V/4hn/3G3wcVs0xQYiOU/bY3oh99XNvn9xJVqk1aXFi6bHeoB8+4xd8Zinl1VdeSdICsJoepjkXNRiTOZkZfAxNEQYbcWIODW9ybC+vh/0BAIzbZqPFHz26dkJxDUBEzl4tabRdAGPruo5WBCCrEvk4MipcNWfiqN5HVCzyoDhsAXcHsRFbYWbkUdVpyOokIXKV3auWHLuqCo4BjWg6mRlHPHPzSPzAdMOhhSlUOMcJby0dMSBporVPXMcFbYEJsSYIi4i0JSyY4AyOg5dSVCt0DBeJo7jDvDgs8cmUWhZQZFopUwN3jhXNKXowvldVw42VknQtr9IKJE2XZMpXU9Vwu2RTN4azwswmE5yZIMwo6mamjlKKRbGF2RSLLMqbIPmWsyyWTXcK7jAtmP8OjGMholXbglNgyXAMT+UfFGrmFtHUxBwBTzAjimcQGwrUmcdseRi3l7tHT4+ncBN2LWXMezOlrIaU2jG1w5oaFmvlbhOodUrAM7NSCJMrRFUlNURRVIPmkGCtShGiaCmNOmrdUWMLmFgnRSMJ3cxElVNquHZdww2YI+CorhQ56SI3xuR0wQqPJ3P1qAIae+1UHCIcw8fBVoU3ZsmfSbl0sZwFGyJOrmtEsLFv6yVGFcQp5xDsrgS2yprnuvEfLx0/aM0YJH6DooW36Bc3lkAPsujPtaDFW+1Tpy2L+y1b3W0IMHM3kcnBOmE8hy9cuEzVUcHHeEhNlI1KqFKxnxPAHsHIRbTpiP2cPfx2kxv0MTT45sQnN+fmcvrSjWPRZlz3RCPnSfNcJ3/ujRSYSVqm9jm9WXsfjseymuwNfoiV3929FnquBmilgxLIffKbOwzOTpj2O3eH1rQXLOwoc+LpHtw9m5pFTkcoxjkIImyqNAGxGRDS7E4OfDQlPiw3sOkJqDLDsVQnm8Qblkfmm4sjeQHAbnjxpwAjTTwcMzNCKCgcszumgvIBJ25FADBRoSKgN/l6vZE0o0RmpoklReZJUlhaWhS51jCk4B2Rw44YWkQA8nF0MiGWJjVMq1XbtDL2g6sNwyDSqJOWwpyGPNoeLq2QC4GnOw+xEAvpi1XKw2hNI9KxYNW17ebEUIxHJ1WHG5O1xA2ZrVZ47cHDz/m8z7r71FMvvfgwsTQNPxq1dtpkolCEkskvzk4MRUmyU1Z0Iodsl/1lBE6Hkvu+J6KTk5OTzdmHP/oSM7cprVarNjU5Zwadnp4yMBdjbNu264hhqrrerIwpE7sjq7h7aMnQBs485qxZVfVw2LVtC0mpbUo/9nkPo9PN6nTTnhkx8ziOSaho7zo2ydTGcdiZqbqNQ766fui5NOvTb/ur/4/3/dP3/v4/8E2f+2v+jUmiz6vWAhmc3yibRg3CULM+l/WqTaenOo7NycaGwzf9oT/4R//of/bw4cNHjx599uf8ol/1lV+JUpZJBrfm9b/UNqkUVgvM4dnU3VfUHR7d/8Hvf+/2+vDcc29/9ODhMAzjMHQpNU3joCGPQ1YH3Gm0IpwJYE6tcJuapmka4cR+6HdGogRTV2PA4WIu5pFGTGbuFAIdZGbNqss5N2hOL043m42hZvM+vD+OJY/jOBa4WjHzYgAiZSxgUMhoOIOEk7dEZKbupOoCUldVFyKQCwhKILNaS5iMYRSJKDRPRgA5ZyymOU1BDPiM5W4wEcZxnBNr54kvIkW9kUREXlRVHa6q5GiaZlbIiC+Nmqj78qqqkjlzYpDCKaFNNAtfKbJPSwRJCJ6EHyrGTTWy3b0izKZyYIPTx2q1bIwWKWIqTCZI5ZCvLrdnG9Fiqt50IipuFOuewkuxok5ESQROq9V6GEvVx9KQdW26FmN/lBH3CS1jsVrObrU5oGao2YDH3gZEqG1Xq9WGmUu2w6Hfj4dSCk9ggAVdxUxM5Ov1KqXkrqUUCBu55mJeNm2vzv142G6vtv2uYWobMbO260xHc3NXtWxUUtN03WrYkjqZ1eKdTBI231B0LJpzUXUWEAlxIsrutZRi/BCPwCJt2xUbRRomV7VSSnhSVbWRlFIyM1PlrhMCipWilkQmCErkxGBmzSXcEBRBz4m6YiGqUYOMC/fELI3o5DSPVQC20J4LbhUdeVUVFvIMAqdfMakU8GMUTQLZwuqySbXUnfhJoug3SKQT+J/4pT5vuI4lHK3f6LPt5xF7fAsTvpna42gwnEFgIrBb5Mm7qaZmhaUFGGoU9bMBIYSmwN0c95tESiIhkCvqq2mEfJwmccXKL32TjMB/rpzAT2rM2Bv8vOzQGeY9lvFSgqwVVKwZ2QjgYLr5iTfJG3pii7WVfN4bbBrmEWKtQ55c4W7uHOFBc8BgDhaQC7NbEEGcAXW3kqHGHkySOstSSlrKjLmMEKLfZJjlSeedKOyi/X6PaZ4uXclpDiBGIyJADLTcUyZwqWZOkABL8wfnc2bGzjIJMJsys/gT9Ej5JrmUblJJw5fv7hHT40l0tGma0An3Wk37SEkNWJjzsB/2IrJarVJKpZSg2JqZlgx2p4p53H0YhsoXbZsolBzHIy+xbVstA9xCWKJp09n66TalBw8eHA77rk05D5DETHk4ANwkyabmBlDASwPUSd3i/i8fjony7nL3zPnqfL0+7LaHfqXIzpkbcGo4tV5kUDvbnOyHRwoqaqenp6tND+P1ejO2Pjv4VZVAkIYJiXD/4fXJqhvUr15/+MxTd9v1pt2cjg8fbfeHzcmJqkDWxPLgst8PcHeBAANjH0ENYSZ6pDryVByyaZquTVHYLaUUeTgi0jWyWq2aYuG6CMJVgosIt92AOOT7nIsDoMOYi6ORhoHiboYkjTTMKA3zZt0QgVyL4blnWY0d8syde8z489/6p7+2333Jl/zK82efRdZxPDSrjpoOgKuRPLZYuccxEU7URgCdUwKMm9Sl9Ht/7+/93/+n/7thGP6b/+v/7Zf+ks/fnJ7qOErbjsPg7t165YCaCj+ZZ/VGXtLpT8vl4MZpqnCHyDETItCgmgpX/NM2rbl95Ec/8K3f+q2vvfryr/s1X/325z/t+vK65G2XmuGwP1+vh2Hww+F01eWcD+P49Nmp2/D88y88/7a3r7o1kVguDx48eO21VzZt6rpuKPlwOJye3X1w/7LbnDElplrzU1WzFjgnEW7b04vztumkSUTUtq20Tcl2OBzM+jna5rMbN0VRFo+5bhZLDpdcC/IRUaIUpB/mJrFMFe0DeEy9EDV5qvNlOmbmBBJeLgvz+60v/egPq8S/pmsDcbmH4gA5oGYiqZgyIrbIWoqItF0bRVCbrg0qwTCOQbksTkRNX4Z1J+ZusFXbXV9fd00rIiRsZqrV1iciLaNbjQpVZ7mbgbpVF5O0FGc+prq5jixNIjaCu49Zscs+QKzPWdv2BGAmGYci3KhZzhnCzEmEHK5qFvniOataP+aYfaYoNs4rJKpMVBV5DgFhALqQLIvX2nVdyHTZpMsaJIK2bYkTgHEsfd/v97txzO6eWESa1DbhMwIw1/9omu5w2JnBCVlLYuqaNadM5MOow2ivvPz603cusFkBKUnKObO03aqR1BBS22zWq9V+dSrSdSuT1DmDkK4u96lZCbdaRpJmTpwoeUipJfBmcypc3LFarcDpcBi6RnIpkchaShFpiEikiQBF3/de1NW47VAUkmQOA5o7gZkNrqrFNHnQRtymkhMzcp7Q55zv5Ih1iQjMYJrKtYWu+AS5AdByebF5rXAKmVNheBSrWni+Jvb0AqfFeBO4Q2pmChng5KiZyTOvs37jFE48XjZCi74MFdDkooqbIbIpLOPzc88ItirTvNmIeW+yVkfy8SVTuGl8HmHCQmmeClQzco8Jq6jiagw4Qar37FizgAAnkpCQiQmGqQjh4iIAuLqsQHMpwqUmzc32qQ9JFhVKCVg4neYTvJLRK78kSAtPsmPsCGOqoUJwrwBm2jJcgXl9SfNe/i/bgf4/wfa4nrOjqqBPQcJlSNAc7l4AkzpvnCL2VimMGhE7qHlRShwBN1erYcGipRTkitxsktsMtEKAaiTPM4CIHIYr02xZzKIAmJQ+pwpOk/vG3ZNNcXau9zQjtOMrdq+5D0QUF1mWtsDkbpwh5dE/NIG0CC3yIsRXIeV0kfmGgn5GE3/JJkG/uFrNSKSjrPwMwGcpBaLWJWIlTVwzfL0AlJhTN6PhGf6BaUahUaXRakalrxIhSLQsZycnbZO0jCUPTGQEhhOcmAASIhIacwEZiGlikk0onOJ5jQpZplIIzjBVVwSvTJGN2ckaMX409NB+uzu4+/pkY2aumnWwlBCuNaaEascIo5RCTWssDnPmQW006k5Om92hH4oZgRtXK+qqKau4eiGKKE1k57E74OZKZEBx72Nk01HUNE6mCB42rRBRYjJ40zSbzWa1WknDARoZMgxDKcpMQi5ESgyWbtW4q1t2cidxmJq6lS6lRCiEJMLSSlolaUXS3/rr3/HBn/iJr/o3vvoXf+EXtCcnIPIx7/vDanMqMa5igTMjGIWKYkxXwsS1Cu4hIclnf/4v/a2/7d/6S3/hL77yyivf/M3f/F/9if9aqN09ujy5cwHgwev3z+9cNCnpAvgdd7Pjbz/7MnFsBgBpcpIOQ3b3rmuYqJRiVkLgvmvb3W7397/nu//sf/Otb3/bc9/0B//IF/6KX/Haq69+z9//+3dON6U/rJvEOl5surNVGobBOH362+7de/aZe/dWb3vb83fu3B12+aWXXnnpwWvXl6+P/W61Xuehv3Pn4rDvt9tts+rW67U563jABPhZmqZpUtuIyJBL13WpbQBW98N+uLq6ury+GnrKUZ0FrpFrR0IEWdTtnJ9VgUgGnnjjRKEvQl7L1nEQR2me1MuP33Iw3VgZOGLiwaO48ZEnv4VFXLFpmhQIXw1tG7T709PTeYVZromSTs0sCTdprZZR+ia1d84vDocDgpfr7sbFHIATGoJBg89pzBJ4jKWWRqzL6fHGvLIqjoxZU2TPRKbqqm4GVZ1SEBySqn/N2byYuRUvbn2/U0MxBxgMitQbkJcja4MXGQIR4WS/0ckArq6uIiHh9Gxzfn6eUtrv97vdzt1V9+NQ+jHnnLV4hFFXbUdEQbwNAm0UHx7HQZcpfEZO7E7jmJgx5k61K6XdHpC1CLC9vjw5WUvC9tXt9V5IOmnKdne4fIBhcPWmqBgoyappWZr1OJo7A+wGVXcjr05GuEdBCgk1i8Bytby1EcCRjKRwM8i8kFFdOAAt6qlpwjYlQJiEONIDdZzKrvgcIDsSYQBAdRnKZqqAdTapQATimZbpwKQvR6i2FwE0M4Jj5arrRTV++bgdz3wkn/8EQqku43ndm3Fa5dRZ/TWuU2VLH48EYmErhmWoQJDabXEbs1vrjaRr3mqf4m3pGXU6Ovf9jbHBpIS0jPVh5lMA5FaHvQNMsggPYgELKxOVbiTHvjUIKzJ5QudPFskSo8yq2nHKovucAKZaAS9wNk2134iAMsAd1CIREwtu5Dff/No3I0qcmx/jafFelrFBEIwI7gZyEpDGtuAw86KmmZVcs+ZC5pgCduTxLsIu0qXdggm4UeTvoDqgAZ0ZocuIHS3EpZcGyjGHyghk7O7hTCQSh4bWd33A2PAWIjGzx3SycniZwzOfP3+fC/Pkoj56Wxd5L7fMuxlM0eSCxySqE1kcAHTKrD9q67mFazQqkpVSKu6ZtEl5YnkBSCmFz6Ti6Um8lJjD9d4iE0kuQ9s2F+enTSNXV7u+72d+Uu2KqNFkYAGDElOEgM2cFSA4EQsxGxsJKAk1QqHXEq6FUoq5EiDEAGnJCbbdboeSz87OzExd4RY5UUbAFBplgxm5maTGiKDmJP1QDuN49+LOnTva96MbNavOocWsaRNJKqYSdQqiaApFGTWX1IWHbmKVAQDUicoUsI0oRA5dTPNKzU2Jq0Q7wMxd06qqQDar9enp2WZ10rZtK6lrWrVMZkm862iVEgdtr125GqubeVEtOvRUiGjv/N5/+v0/9iM/+qW/+su/7jd9/d1nn9U8npydW8mgBGA49CKSWgE4j2Nqp7qCRyONQCijJZHt5aPf+Xt/7/bq+q/+1b/63X/vf/yT/8c//of/8P/25Pyif3S1unP+1NNP90MvKbnf4KQuXZRvtMa9kY/c3MP6dwMzuq6BA+5D37ddSpFxQfSTH/zgd3zHd/zQD/3QN3zD7/mar/mazdmp7ba//bf/9oevvPK3v+PbuxPedK2XvEqp69brZ595+wvPPffcc0S0OfXrq91HP/ATL7306v2Hl1FPEpa79uS5t7/w0suvgrTvh9XmfBgOY9ZEJYxpZjStdF3XrlYi0q7cQOM4bvf7vu+HXPb7/X6/Z6wDDcbcrtlPfANXzPaxoKpDxVGQmYNBHnxwIrhxDTLEf9PxBYQL4+KGk4grtiEi6A3K6HKtmNeo+V+aGKERrEvE6/VamEspq9UqwkellLlCq7sPByEWHW2bh8S+Xp0kke12S8HlgJEncmN34iTS5OERSWoSR9hHA42YBsoSkTmiVQEns4eysTERIXKWzU2gbsViGpKbRczmuFQSkbGhqGoxH8YS9jgJcxQ7cHZ3kaNmDIjmlD8zW61Wq6ZV1cPhEDHDtm3Pzs66ruu6zqGHw6Hv+3D2XV5ellLyqGYm0nRtI9KISNM0UTnR1bLZXPkQqBwQuBOJsLOQA5o3INvt6PUHQ9Hm+jozeRTm2W6tbWi1TgT5qQ+99jM/c3+zWY357OHVAHApWQ2rVcpKyXzImtXd2EBsFCRligcsDlYtHvqEpj5mJcaUS8/MEsGHgKw0OxGnrcvVIRMBimhSSCAwTfU+jxsWR+0JXvg9J9oLEWEW5fbQFo+xaJhcU5gCbQ5QlXQNRxvrRAEKDDmjNyxWnqUo04QtpzxAoluTwtyJEm6IgNiU3UNTtcMZ4z1u2sWgFaMyfeoty/utBlQD1/1GrImO2rrHVuHcNLZuhfviWtVdMikYhivkFllUcOOT83WOuUW3tW7eBM0fs0cet0/mLhHCVLzRPKS5YATioxyru3uVjZmcVIBHcAumMEO+hDG4BZwaIRxFqw3gyhp9U7dlkHaqNGiE8N3WBEtygKpWd/UVusE9ivDBonRaJWBUKFh3hmX+/7HRwgzDApoF4zJaWAJhDtHNzWJGXgngmXpa94dFMkkUvDhGkGsI8sj8pCmUR4vaVsskxfkW669y9FIvz+HJZRvGI91IfaG5mOGc+eK12NTI0sZ1FhWWjRw5Z0yYWNKUYV+0WOWnjePo7qlt2PnWI9hEtUKNsCvDV21zerohopwHdxcRYnKiKsvKHFm2zIj6WlPQMbrAmIUITAyvQVumxAC5NUmUQW6jVUYeEaemEyrX+93V1VW7XmXLsMRkpVjIegT0V682ioiIUFFzcyY5jPnyendydvHUnbuXDx8d+lGsif5MXUMkqu5goxplc3ISVDWFWnOOiW3i+CFKBRAAKgDMyUqlBNf3WwoRBQOQHCKDFwXoerd/+OiamYUSEYXpyfCuSSdd6lppEgu8axMzp9Q2TZOamp1ERCftBsCYy3f9D9/5vve97zf+5t/ypV/+5aXvpUnjYSdN161WAPLYM3PTto9Nz7o8pVUHt9M7dzDm3/cH/9Arr7z617/t29///vd/+7d922//Xb9rdXa+v7zanJ2uuhUAV0W64Tw7GmX/jFuOyOStJ2gpCM+Fo+samIH56sGDd7/73X/zb/7NO3fu/Jd/9L9417velfsBY8+b9fPv+sz//E/+1zoc/p9/5S+ePPe2zekmloOLO2fveMc7JKVPfOITh5+5//LLr7700iv9Ibdt2zRdSuwqv/JLftmXfvmX/en/9r89O+nGcRz7rSExSfbsTsw55xzTuJiBObXdMAyPrraX11fDMLiRwc0JpE7GRJRIwJFDzMxRlW7yQqEaylTVCOusn8ksUxE5IlKaOHIRG6G6nbs75hg/0VyYPqD50SKfjeZP2ubzS6mFIlKSbtWtulaIzZJqUc2lZDOrNBry4gbtUKxNq3VDKdm6kyZ5OluHS0qLZ8XoKauZERyrtq0dAlIDqSmKGaUUiiQ1OzoK88XbNyc1Q6h6EgCO2i5WKbjJyEzdag8IADeK+vXhhnL3tm3doZGoBlLHlDt+JDj4ZCox82q1UtXr6+sIXXZdd3Z2ttlsOLXDMOx2u2E8BE9eNVITlZmbVtrmpGm6SvQAe9ECM1ctFvBm1oWOl1jUGwuwywDUO4Ze78owbvu+rDqBFtOSmHLO5Pn84uziztq9pAZ37pw/2l4/eNBTEqKixVarDBZCb8R5LO7ElEK4ggxCCRyuVjI4QUQaIlEYFWeeD4pwYk7MEl6AuvvE6CUQUR5HhNAFE5iCL2GEBAn+MU0lHOt6OAW7Z09lLKXm/oQlwmcyzOJAVXwxgNwZ0JgD7pGTawjrynmy3gioiRyEmGgzkxMAk8OjhhuiMGgct8XX8kKOf45Axg3Ndp1Pxrd75D64AjLlDU7pQm820/vN2t4ozuPutNBSetKAWCSsLn/1BSMNYETRnHlGhNUnRzTovGCfhhukyplMd0L1sm++NlcTnX997BR3QOCBPAjOIMCYJgYBIufcI62FqviZgQlmMEXJMHPNwULsaIQLmjVSg5SZSBfE0beaB8ir3XEMyrq7I3RcnMKREsW8rNxw+MU6zgwQOzv7xCEsUIv0uUhECaQ3f6/Otbs0Ch1RMLm8FABLUIbHYm/zz+5+jBB61L0AkvMsCkVTwsDCUR1mXzgnfT5CNG2lqCd47Ryb76BegWvi2fwVsZVSBZyP19Nwn+pK2VRieMpo8hkK4whlAdxAocfuUMvjIQTics5OiOy+kPUDUEoJCc3jDTTm7iTcrVepbYZhKOacBCRCbOHWAjuLu8OplJFZwERwBM4UEq8FWx1q7oyorkNDVgYIAfHSVDKQiIgJwtLv8/37rz/11FMiNOYMFmKQE1PNYpLJEe3u6happW2TSh6ut/tHl9ef/sLbTjer/X7f77dKbM7SkHp2kHvxAgMJRXwVIDKrol5REcFq6Tkv5pPNFzZuBefEKXxzAJyCqxajjVwY5mp8sGKqMYh0u41BL/BEaIQTU2JuhCP9qV11Xdc13bppGk6yPi2paYdh6Jpuv9//5b/8l//hP/rer/na3/BLfukXdusVJTbL7h5x3TyOT8KEAHA4HAjwouuTtQ/jf/7H/pir3blz587Z+d/4K3/1a77uazdP3fWSqeWrq6uz83OHT/7wgPRPnv9v1BZgyPI4unvbdQEPvBQwg3h/ef2JT3ziO77jOz78oQ/9zn/nd/z6r/1apITSt6dr3V1LOUAYRA8e3T8/P9+cnrzw/PPDMNy/f/9qN/zoB3/y6urq0aOrYbzMWc2k3axExIkabrqGH11f7vfXn/7OFwz00osvv/jy6+5S1LK2RCQibde1bce1xHm5fv1RcRvGkkc1D2lkV9QQhhEEcDKNYqkq06QLcoLLnGOAeTLWCa7VVq7dEsVW527i2VBgIvU5zh8UR6+JWPCJps6T+sUTYeHsY5r/GkSAzWZz9+7d1WpVhrHve1cbxzEUWZwQpd7jmitp+sN1I826ZS2H/eV10/hm07qWnIsqJepS06kkLTCCpKRuIVtiIJAEB6Goz9e0oj4FmsAhNO0eCyOERECWdVANfNVATREhL2+krSX9vJL2KTUNm6Q2q6FYVYydOmMWv2FmmSR2iKhGBUFnZ2d37tyJ1MG+7w/Xu2EY9vv9mPuJ5UHMfHFxnlLTdZ1w5FRbKUU1j/0Y4yHyaedqPVZ1npFzzlHLxB3AmKlrmlzyMB4cZd2J28hwmIsQOYb7jy6vr5tWzMaf+tiLnu7udr1I4+4556bLIuIerrpUQlOZiEhio02pFRkJKZyKwimlVh0578kSnCECJJIkkkQaCqRsHpQaCiJqKLtSRdex/gerCklq50Y9iWOkbVawOQ7I0DOlhTYSUKOCt2w1A2r2YPX7VpBGruGZm0J2HI7kiRGq81Xi4HyxqFvPR0ZpVW5cJAfylKYSLFOuM3A6+fbdYd5Yj/Sc5VR7ywh8k7RbmHAOE88HozLdG+2QdkSGxx8WJ1e2KeAecanIJHwCGrxBDKmlKWK1tydBoTdHe6zMRm1T3M+oep0MboSgshtNxQIAhTvMoAVmsAJTADb2bgotbmZWoKaWzawRdmmECX4Ciovr5GnC8cvfrG0KCU58k9oMCM1cYwRhJHKw9UjcNIeaW4E5mQPxq5mZqfqEANOEgybyXnhT6vyLyCGRzWRgByb9ydghGUROMLeqmT1dJHawdHOhF8Ccq8MmtpAw7udUmGWkkhYBwPmvuGmu+eLXhdgM5g/OHw/Q2aVmvuz0LT7/OrETqwmSc75FOQMw8doWwUma0It5SokZRGgaUbcIIgKuOmW/eJXpi8+GMF3XdW27GrNeXe8O/WiGYoUkUlYIREKMkAKARKDRw+DgJIATwyHEsOwQEmraDTipOkyp1o4CMyJflwBVbRMz4+rq6oUXXjg/P78/bkEGI2IDgZyFyIK/B1FVU6gbgxxkhKHoo+vtO/TuxcXp1e56tz+okxIZeVFrms5MLYpiMhFklvhftIaklkJWVZpweCWGkIFMKdX8TM1mBrXK8mMmcyGGxDUTJxaRzCYgOIpaMcs5xCldBGaj6sHdwcSJmqYRkbvn5+M4rtab9ebUCG23vrq6eu8P/sAXfNEv+7Kv+PJf9at+1cnFuamGWda0bVX8o2kyTCSW9Xo99P3q9BSOkq05WX/TN/2hP//n//zf+Vt/+/79+88///yv+DVfZbkw07pbxRb4JHbLz96OTtMJFzVtA6AMvZm13YpSQtEffu/3/42//h2u9mt/7a//j7/x95089TTcsTu4HKhZy0kH+Gsf+sn3fO8/7of92Z2ze8+9rVmtXr+6fLC9frDf9j897vv+7OwsFxYIEqu5DirwzXq1Ts0P/8j7X3z5E1/+5b/69PT0h4VzGQk8jiXjjIhYJKUE5pxzf+j7IT+8uozCEMEwMLCDmdijrgTVytr1uUhBaXpQEwpCXOzNiqBETk7f6IwQg5mXBZsSsZJNdrYCC+K3qnr1LdyYy4nS9PPjFsiNFSla27axSqjqMAyH7e5wOAhxAEWvcpFRRB1ubmXXcCYbt5e7Z585+cVf9Lmnp2k4XL780otXV3m7dcJBGpg1PbkW9Dn7HIdicYKpZdWga7oTfFLJipi75WOROq8GNzNX+Fyh6dzPIEjUTYyHExEjj/wZ96mugjumivM+AcJ4qMC9sUien59fnJ5tNhsi2u/3l5eX2+12LDZnAZhZ0zR37945Pz9vmiaQgI7a9/14GKMbp3caoGKxzk+LeSll6IkFyuKuQxaQjLl36+nQ96OuO3n67h2YXT26DOGuflSQMcOsHKyZa8+qahr6lFI47LpuTURRtGneHSy8BEamkVVPwYi2uq7JrFnPlJK0Fa3FKhHdp+ZOKSVGzYwEQZmo5qtb3b4qcguCfXVYMFe7S6f9imVySFU/Uo1DEhHgRostD+QIwaEZaBpADHPcxIQAwWxmkx7He1CFIqjC0746fS+C/aoxrgJzeoWFTDeuf7RdHI4IimIuBj4hgmOQcEYEb1Yz/E3faErGuEGQ+1k/5kvn6o0NloL3HsmB1YNIi2or9eumT0/fXo2WfwFP9PO03RKVIWSaYiKTsmUVtySyust6qdE/IliGO1RRRhSFF5jCjdVMs+VRLbuaao7VsieWbiOphRegAMJIdgMBvnnRIB6bAovorVffIE3JnqYwBzPcoaaqrtnVYM4OuNJMHzX3SDaZGhZ4J1JFaKpkNEOw+FmEA/74xBXCRJwMShEmmz/MiSScahGY+reK2mbKFupmM337TS6oL1RDl3BzSlt0mjIiMVkwAIKDMl9h2YM2yYfSMVSoYc/NlNT5X2YuU4nG+d5iX5xR4mQ2BTGaQjOGiDgJeU0UZObqWZejZzfuLWcrRU/PVqv1us/jo+vtfuiJpRQjIxI2NbCJIsSAnAAxWATR3N2YhJiFhIjMFZJSQ023aprOIW1qwGbI5hAQmGMJJOemYW346uqqaeX84vTRw4MbEqOar0TmRO5gJnL2cJA3lQzAAtNhGB4+ePXizlN3zjc5DxESDSgJWGSAEzuxRCTX1YbxRi4ogKhfOXdjvDBmBsicSkEIybAwUxMRQnc1VWd386JmRVU9EYvI2DVhwpBTQjICO3LxhhpzMoiREgDnXJzc+o9/QkT68ZWmay/uPNUXHdXuPvX0h3/q27/zO7/zi37ZF/+23/bbvuwrv2K1Xo/DAKCNPMbg1jG5I5K7Dn2/7lagajEDePqFT/vGb/yP/g//yX/6gQ984I//8T/+R/L4lb/ha8DQw0H7vl11T5jWP5sJVOf6wtORh0PTNJRS6ho4Dtfb9/yj733P9/7jD3zgg7/5N339V375l9977gWkhGGACNZrErXtw9319v3ve9/3/ePv/b73vKcMY2qbV19/7WOHw6uvP9juDyxSHMRMWTXHtZum69bCQjA97Pb7u0/feeWVl56+d+edn/bpH/iJH910bIY7ZxfbcqZV/bL0fb/d73e7Qz9kBampmZtT1JhhZhJmUpumvNZoR0TttXKkAQTPAUxEuqCMOtW9eg6FeM0MiXFFAGguqjNN/1sIcP4TTRb2dNywWBZvTdv5jUibHKZWrq+vmFnHbGbMiDiYuxfTWnQQADz5PrVIqQyDvvPT7vy2//lXf/Znf1oZdx/5qQ995MMf++AHPvLa/e0wUN/zdp+HXjs6i6Q+JxhYHYMV09I0baxdcW9h7JiZmjKllJKDYyKKkDAYoaIUPheuycxMc+r4tKDVR9RipZRpnZfoTKaU0sJ35k5E6/U6pXR2dkZEXvT+/ftXV1dRgSZkt8ZxJKLT09Onnnrq/Py8aRKAvu/HcRwPQ9/3OdeKhfNHzCzK/AXxNXoy7pkig9FNSV3NkPvca+lTo5KIaHzu+bf/ql/5JV/xFV/x3X/3733wgx+yotvtdr/fmpe+L0WZSNQs2GElu5uVoilRk0JC7bibBP40BROIeK4/UaE4wJxQ3CZ6zvypOkimgc3EMo9MdxAJSJLMH1gyVipovBmIrltd8GXg7ka2dE9MYriL8Tz9QHU+VczvRCEWesSEvjS6J/AWWJCcJ21SqgDP3bBYvSs11Ccfl01oEAvMiZtmnFEVF1hiwrfaW622iFzjZlg5pBDosTMBLEdXSPDNR+oscHbyo1e6OnHmzYJmt46jTMFtxnF446YV+aZoS2MjfiUieJlyiQ1QmLoVMoUX8wJTJkeoC8KIiMsBalAtZTRV10Ku5NYIWxm1lDIOrjlWdHfvTVZu7fqMUOCGx/Rn3+TtlkLXDUhIxHNHORC1lYCodWdlLKW4WswsuNEUelteYckUnVyi5jW4Xs/BIuIHHE33MOpif5xPmM2taIknP+gc/HkSIJxOnwJ9893E5W4diZSSOAI7sk2WTxLK/vPdz5GBUIKZbYt4JABmFnrocT9zTy3RyzF6uUCn8/cycyMpsmQAcJXOV56FZKa8tflqZgZhOHeb9erkdLy6yjmreWJ2cmKCs4VSqBd3KqbumpmFEQoG7hBphJFaIpB56M8xc3Kwqq67rlDJUKpa4CIuQgKPPD6/3l6WUlJKqpkgbRsRVC6x9Vv1B7EATiICMjdnZjC74fXXX3vqqbvrdZcSu7NDPch/phaBaRDg6ho1tEXaJdp395DxIGI1RGIqJtYuAEctFKbqxRVQd4WVlBLMJWLLQg2jlbZpmh6jFYVRIiZOibiomqlnJ2Zw4qojWHRUM72Au3vTJne/uroyFgW9/PLLq/UJgO/93u9973vf+zmf97lf93Vf91Vf9VXPv/3tKHkxO0M/VolkvVrncRRmSSmdtLY7cNOc3b37p/7Un/qWb/mWv/2d3/kn/sSfeO3B/X/r3/7tq9VKfcqSXSLAT7rlPBYYrP+2bYpPPnz1tXe/+93/8Hu+Z706+WVf9MW/7xu/8e6zz4EZzhhHgCAJRfcPXnrf+973gR/78Xe/+92mJSW5f//qer97+eVXzdF2q3azHrOyCIhee3i52ZyuIMJdooZSalpmF9P06OEjM3t4/8FXfsWXPX3v7v3791/8mU8kgvJ6HMvhcNgd9n0/HIY85FGLU2ocJE1qKCnIzNSNzMDViRMxPTNzMJkxh379FJx3hC0LoMowTv0W8HJClZgLNsRfGWzh47q5pUf9nHmaH6Nek8zVrfOXi9ryr8MwCFGwnYmobVszc7Ugdrq75cncJwKw6ujQb4m8YeyuX3v55Y983uc++9lf/Dmf92Vf8LEffN8//EfnH/mpF6+3No489lTM72/b6+vrB5ePtru9m4ukzaZdEYZhPD7jRIhwVScncUmJOJEzuwmTCDfisb4xM9AIF7CScD+q+zFNxmvJGdMqsOm1b+oyXmduTNImpZOTk4B5n/jEJ/b7/f56G8nVIlJK2W63nNqLi4unnnrq/OI0pZRz3m63EVcc9of9/hBlG9q2TU1UmLyRAiBJ2rZt2/ZwOPBUtYIdampmVtSaomPv1J9tVmfnbdusPu8LPufrf8vX/aIv/ELX8dPf+UKT2qhh8errr/3AD/zAD/3ILnYBACamlhGUfK6dYzbH0yJexyKUpJsKZjRBlZ3HDDBn4QNgM/OFD4KIWAQkXvRIoWHiRCxMInCnyoat+myxtNZNaAKKzCxEzqSzJNxi3L5x2dQnt5rXvSDlRTL7zRFf43UxOOL8CWOae3iMaSpTwVN0hW/GHucF7pZgzI1f6abp/1Z7q91qCpe6LPyzfbDGIY7ZsMepcnPIHbVJp0onb+oWGQS3NkF3T+yTMaJeStHR8uiWx+EALY4sTBzlh6FC3JSDa9Qwy14BoRHsULJ5sZzdjEynjGmMI1KzMi9SfVyKyt5/UwcGH2+3eqQWwpoH9USS1KwVsKlaqTpyzJzHkczhbqpe1K3yFo9RGQ+DQiurFz4bObOLdorfVEA4wzSr5RUmmYxJ+QVAagWqpZQSLkfmkA0I1qV7SLxE9b+Ap1MEgAPx1tt0EWmaxI5SCtR5orRGxoxMnFOajEWHBliYBIs9OikREwfCNDPjyUmZmsa05JIDbbIIAQ2LQjFJ2LPMhrgxc3CcUkpNMzFjCaBkaqqZkUVEEjHIijayIkolm7tLw6pDLgMzNcNBFE+fnCSzR/cfDP3InLr1CcbiDncXJwHYrC5p7Uo1Q2sxtKwFAIsMw4GJUPJKfLi8Pm+bc+G14VUdVVWNIC0nYcApK7JRPpgobUZtHt4f757eQf7o256lV8fTGADuSsTEi1AJ2STAAUgKIY/tbn/oh5P1aaJHh/0+pZVlYWItnoRZ2N3doqglMYtTFD13CrZTpWVQlUx1ZyEjFC/kRMygay/szIkoRbFHU3cnLRxYsL5d73U4lL6rIq4R5xw0xjZ7qaYWWTWAqeWWE+9KP2faeFZmD8tvHPYl913X5ZL/yd//++97z3v+zud//hd/8Rd/7Vf/us/6rM9Kdy54zGgSRvVSaL0CvG3Cta8gHdtiyE3TNHdPv+k//V+v7m7+7Lf+mXd/199+1zue++Iv/VIRwViQkpVcCKldFRhR6nVcoY2tKCypmsQTOagO1ZGIeCaJiex39PGPf/yfvOc9P/D9/7RbNb/ha77uy371r7rztqcxjtBraIEphCDSv/iJD3zgA//f7/rOn/rIx15//fX1ep3a7vLRdQd0d+5oPw65jMWsWONU+kxEF0373KrpuubkpOsi9CroR73cXrlp265eevX1Z557/uT8rFk367ur169ee9CPOWut061eTJ1IVhLxXnfPPoXinQxeyhQxBpkbzdAsmGlV3NgccHMgu/ikB1B7KWBdk5KW6s2iKcbi7v3kQpqY7qFaW6Jw/MxUR5VMNDKLLLJwfUXZtwrdfab3YYqJOMlKx37d+XMX7UVDNmx9HKXxUqxpV5JWpaT9fj8MmYhE6NV1lw2SZNPg8pq+6//z/T/6gx/7BZ/xwnNve6aU8bVXt9sH+10/OBOl1CV5YbVpnn+2H59+8ZX7L792/2o/jGrmYu5qcCSiKrIqYGmaYb87OTk5Pzk1K1rG1PCqSSz26OHl5bB/pnl24M6ZHzy82mxWAqETGYdhHEciT5K8VdeSbYwegSFqGrCJO5kZjDTber25c+fOarUCsL3efeLjL11fX6fUEqVi2vc9M5+fXzx7dvbU0+exH4zjeP/+/evr65wzMw/DsFqtmlV3vr4DoO97Z82lqDZ1ExEGVYGryXmHouYQY7IIWTVuDKTUpM3JyVkj+eJU2rQ/3VwDH2b58fPzT6zXJ8+97QWCfc5nv2N//WPXhwNJo9rsd+PQF9XVYV/2u7xetRMmdLPi7gRWVWr5UPIm8fawLz5m26ln8g3bBiPl0ZQulcrJuRbL/ZCG7KCmTeiogxIH1OIq/xUMEQ99l2EAjJoWIfsTa5AdKwrGcEWkWFDoghLGUWJ/nSjTIgKIa4kVo47WG6gRUfeeOLJ7HG5GU9Jp9f2qu8PduAEQiQkxySK24tA5kMhOk5Qq0RRXiYTDSB1yYqEmuKMLuY4ZL4bEbeBMCW0br3NKqxnhc5lQX9D5jleb8fBSWQGAL4TN32r/02/z+5rpyFz5h24o5oWqCKU4g0jcGqPb7jkABKnV66ZxEj/YlKc/hQcJEAdCkaLuIzVFKs6MsluPXf9fFEA8hu2nKy+PPOYXKdPzHAtuEEDQOo8McIZF8hgBXpECAwKkqfrQG95OnuqZuoIAYYBGWC80Ahl+gA6wEVTADiJkhhnKiDJSHpIV1UyuVAqjZoBECmGI/I15ItCFLEQuZgbXYRiapiESMphhPPTu3nVdKq8A3V6R7KTlpxQgQ0MIcRonHifeUEIhuKP5F/BS/vU1XSC7aVDwRGJmAOS6pNwzGLWwoIIUyEBhKDwTOZBRMtwmsrOKGFQdSgwT0pytqBKRmoBUVRdpcWa1gELIvyECMy1UsQB77lbUQERN4iqbF9lbDs3F3RnkakIppDODPBoQIA3DAVOMDou40LJU9PL4fBqo1hyLpb+UIiIBIDE57M0sYjuL7aFGM+dNMZ42vL/B6QpsczQ9p7YssHgshLjwXd6IPDBHNGAGzfU2pk1aUq2lFgVXVIukyq9ltfk+Q5O9bdvdYT8MAzMbMI6j2Q2WQljIDAoJDWcIs4hISswiqR3DUIA1iaBN17RE5PCUqmSWVzRl5oUIbSOllMN+n9Cp6nq97jr0fa+ucDcvXsPEPmHgEOk6vsoK17l58ODBc8+9/d69e/34EoiJ2Zy5BUE8aK7VUWfRU/Por+/La3/Wl0s0DweKGpdqIdwX1uHc4TSVYVy+neV7X/ZeoE0jmh3d85BbvtZ4QREyzTkPwxBxlXEcf/iHf/jHf/zH/+b/+9u/8Iu/6Eu+5Et++S//5Z/9Sz8fKc3RBCvZzJyIpDo4mDhf7dqzze//w3/4ZL358E9+8N3vfvff/bt/94/8kT/SnJ4AyH3fnZ+XUnLJ1OhKunlWqGU4ENXZhPrDbrVah2wMAO2HD3/4wx/84Ae/7598/1N37n76p3/67/093/CLftEv7M5O4Obba+oSEgMNBn35J3/yh3/oB7/v+77v/e9//911tz3sVW0YhiEXh242m7ZduWEcx7GoKYqbqhORpHQm3LbtatUCyJpNnYiaZr05Pbu+vjTjdnX2wqd95o9/4CNX18N2V7aHfbDsjKpckBMRWN2q0uDc00xRJI0eSxWeX+J8fP6Tw5cvaznTl6c9PmdvjBPQPDyW3+tmcqxbVW8yDNJ5Qbh1WRaBpy6lrmlXK5Hk3jWJmEiKuTs1TbNer2NAutrDfmy97VhWTduAHrx29fJPv/gjP/T+uxfnxL7b77d95iZ1m5OmW61Wq4ZNmrao7/uDuydpFV4UWorWKgbCcAExsyR67rnnVquViGgembltKLVJSNVotT5ZrU6HXMa+B0lq1qUUIwYlkegEcSOnJmRKIzmEIEQS8UZi7wTn5+cXFxdN01xeXj58+PBwGEJZNMBe13XPPPPMvXv3zs7O2rZ9dPn6breLEiPDMKhqVHDdbDZt2wKYBWPiJc6UFT9mKcg4jrPWaCyqcZqZqRO5M3PTNF2Xuo5baUUkSvVGFqWqMjXuTiSr1YpTGzJeXauq1KSRKZlhSgeeqC8w96OYwS3XKbPUuqkLtyi0Vp4skYOAaXMPBR9UPyhLXUYBKjnX1SzO9Fil6z4YAxQRrkMVPIvvmsohoZTiHtI4fstu9Smi4kdWKNXbeOyE2JNi/8IshjN/mupn3R1wBk/CP9O0ivBL2DAhbVQjhFpTUkPvNK54gyV/tIhpql61nMU/N8LYjUzFt9qnQJvfO1Fox8eQ/+Qv+nayGR/tjVix6V8z//CxL79RzGL511so7smIcc65ra5jR7h46hGvkX1M03ziewII/w+4rnHV8sjxnw970h4+mPZuI0dcBIaeooCBQ81GczUrMFcr7lAjVbXiqhavKztqqQOEbr3ZtMKH6kEshLGollJq2vqiNvgbN14oWv28b0ez6OZxvz1kbimLOuCGQjCCImCDGsKPompWETmZW1FTdTOqMIoi//+GLeQeHth54sxZEvMbWVrXbdsEQWZpVC8t6vnXsNNS1sKzfz4GIQFE6qWm/8VmFNJDZkugGBSm+GC12MI8Rr16mAhLcxCPWZDLZ3D3iQJ57OXqNWJmuY0liMj0KCrjCwXw0CEIZlTcdgxolhURMUtKNMVVPaBOUzOkDWBhdiOQt0179+l7zPzo/oO+7+O+cs4IadeF05eoxgJq7/uRqDZ3OocSvchms2FmKyhVPmheBqJbXFWd1MxLzn2/P1uvNptVHnoXLUcXtc8PXgeHuxApXEABlYX5/v2Hzzz79meeeebh5dV2NwAgN5JUGaeT6EU1xad7WOKCele1TgDNrA4KGZzHUcExZdRng36Zl7jMyVmixPnj5OCppvbytfpUTSWgIE3qDm3bquput+vT7nu+53u+7/u+75nn3vZZn/VZn/kLfsHnfd7nff4X/NKn3vY2Tg0v5667qTan6/Hquj07+z3/4b//9/7Od/61v/bX/sn3/eMPfeiD3/zN33zvnZ/RnZzCTMu4WW0cGG1MkQTsSERgBwxF3WzVtXDd3X/wYz/+I+973/teeeWVVdvduXPnt/+W3/yOd7zj6be/HTBYwX6HJtFmA5QHP/XhH/uxH/2JH/uRH//RH3vppU+Q+bpr+r5fNa21yKPmoVcHkQ2H/fnpSSndWNTdNSKQoWOio4hwklJKzsUjB0lakkaak5958f63f/v/8GM/9qFdT83q6XJtu8MQa00sKBY1twnkYoxgelr4t7wmhj1x2i7f4PwSl5OaFn6f+U/Lsbo8eXkwhtwttDldxBVOqPoDx4sQuR3Z6TfuoShpXXBrMcOUWkkioupZDSzCDTMHZfpp2TzKxsX8kNWbJKnldSK5/2Dn7ttD2Y9AKvTw4DK2q9ziFUmtEQ+jDcVHozHbIRuzVAYBVcEdkDGSSKPqpeRSshAKsw9qnseCBw+3H3/xtdde3+Y8uCt4GMcREhxXVA64e1F2NFHqnImJRFIjIgRxdxbZ7/cvvfSKu4/jGKtfLEoXFxf37t27e/du13XjWHa73Wuv3b+6fn0YhmEYYpGcBZybpuGpfmP8G4sqUZpf/XLmLl/fDbRg7CC2xNQyK4EIwkhwBsQUZDSOJUkU1KFxHMWdqFk6jOZFI/zQRCGhEhWPMC01NzfOED+cMCozJ5Y0ufxi1Y06PHWdwsRZAyz8qXTDJzlhuXB1uc7TYR6fgf+bFFUgUzzoTaQa4/joVL25YAIA1ImXx93VsDwyn6zxxqPHo6/d5gllk2l5I0UwgjAELxVEKpH40eg93odPhTEovDyTMmR9uRMspKOBdNNTc7wIL+7hLUz4qdOq7y9mYx0JYeeEiTgZaUcmTbRF7t/koogTpwgh/jXyD+lJ4bonB/CeCIiOk2OWV42IWcxMoSc+GtUvJgJ4koEhkKuQwA1k0AIv8IzxMA47cTU7WBkCDapplA1HDwDu5q5qWTWrjaEKw0RwjloCrlNkxWf+PAcy9KIRgRiGQ7BFIqijqsO479KYy9C4Ec/2ALDIEPkUawT8rIRYv0Wpr+Q6dVfQTPlQJgOmco6msOKqxaoWYwi7m5mbMYjMba4FvogQYhGrm/e70ECZs+ei2ZTjgEUy3e07n1q9caI0bzwBYybQIjmX5SfDAX9rkz5eyI+hg2pBEiHASWWVLayEqXwNUWTt1TTHyCVbJ1nshQuQAFvaH4/DXNyERlHAnRZZmO6ec+5ktbx/M5tuL2hyDhYmECwJgbnrus1m049lv98D4CSqTsx+zB6dCI3uoFC3m2wMII44ldi8650D63VHRPPuTuw12YMscFfO40mXNl2XEna73Z3Tk5OTk6s8IIQ55s2ZKMi3QK0Q4CFDMUVTWWgchwcPHnz6p3/GnTt3rq5eZLCas5NNfYxFKmax25GWuB+bi0/yzOkFMZc8EqrhxVyDA9VknEygW6MFj4WJlrYRpnBrxcghkvuYWEvR0nStiAzDMB72xbRpGmmSm/d9vz3sr3bbj3/849/zD/7B3bt3X/i0dzz//POf8Qs+83M/93Pf8Y53XNy9c3Jy0rQti6BYe3YW2b2/9uu//ou/6Au+5Vu+5bu+67v+/X/v3/uG3/27f+v/4t9B23bdathtu5NNMoAUOcMdSeA+7nZXjy73+/173vOeRw8e7HbXp6en73j7C1/6y77one985+kzz0ASAOiAPKJt0ciDn/nohz70wZ/8wAd/8gM//tM//TG3klLadKvdbvf6a4/OTrpc0/eMmRtp1C3AocE5lAlBSiAyuJWpu1V10BKchaJ49cX77v7iaz/xoY/8d1dXV2cX5yLy+v0RkmIpJ2aAg+gJI0ioEbl5Dbq7F43as4uefyMId/yVHjtyazjdAgyLyT6N6JlZ/qTmDJBN0UhUcEgiEhrJi1PN3QVmruIMVyjBjMxJzNSYU5saj4x6VSJKzH7Yb4DVKiVJXUNNoiLWNOLAaO4CF1dqs5JrGiAbFEpO3BY1dTaL8ugQEUfFqAxzgFzc7fJqK1E8npyZs5pryXkoyj/y4x/6qY994vLy4Wq1GobD6w/3QkTSTN3iXGVfAdAUuGMmJuPIBgfo5Zdfnv1ffd8DWK/Xm83pvXv3Tk5ONptNzvn11x/cv3//cDiYWSl95FGvV+u2beclNHQ7SwmpLDMFM83V6nzh4eIqMqyllNBk5pqOHvVjU9AeoG7FNYdMl8EZzl7MjctQuFFtnClNq7oRObGzEzMkUT6MTqilKkEUpTisEFfV2ZmZwsxOlEtBITNzProJRPim6kXVlXFCkuQ+Sb+YG4U+uMus0lMLOkl9bFUPVSTcHNKLdTLCHnzTlqBF7y0/6FOYYHl8EuM1gkxnLryoiJJToDqNMblwmYiqdjnXEwIceljtlUdaISWRATTFCZ8cyY8nvVH0bIpizrfkXn+ecCCqlxw0RXffSjT6FGnTSGDc8Ags7cMbwG9ut/DhdMLEwnvCN/0rHjNzaP/JODCIBLcPRJtA7q3PUhx0wGF8cw54ZAMawUEOz0c4GbRT3UcxHyuZ3NzK0G/Hw2Gz7jz3VkYyCwVLsqKq6BtHyMZltVzKoJYjD4NCDB+hscxRPjuXAwB2VMlr85xzznnod5FMvlq1m80mdRJeeNmMNh5WXpjhR3XIimAfa586MPGTj8LFthJwwxB6jW4gJ3czBQFeoAYrrgZTzQUwmMHq0hn5WRJMQ4Aj+jJvFsyzdR0JcZPhfRQgvQXElh7hWxb47KlcPsiNshMLs+zGJ6fvU8AVzI9NFCeoKdtRoPKIFfVoSi4RXZw5a5NUvqgqT6S72ORj4SEiNWOuFEmdNl1zl8V+u8QeS9S77CZMkKmUCUAaRaGK+ikydzcvgLfEm9MTEdle70zRdavRQnenUdflcI/UCndnlqk81IxRYWbCEsEtEwjs5OQksQJommTmBld3dXM1dRMiRg0Ni/jhcAhi28P7jmTs1RFMMnlvY8zMuFRrIWt3N7L1+uS11157+ulnnrpz56WXXlHVGktxd6vFJIloSuybHAFL0tAbt0CIc8NCc2j+edlmEu/jsH/5yua/UvVwHAHD1O3U90MMm6Zp3dH3QynlbH0Sox9OxJKLXl//zMc+9tP7/nBxcXF+fr7ZbC6euvvOd77zXe961zPPPPPCc88/9+zbVMs7XniBTM/Ozv7Lb/mWiz/6n/25P/fnvuVP/skf/MEf/Nrf+G9+2Vd9dUP08NXXcs6vf/ynttvtgwcPHjx4cHV1NfYDMVJKv/Bdn/lLPutXvP3tbz8/PePzE4QXJ0LrZsPDhx/9yEc+8pGPfOynf+qnPvyRj3/8p/fbKxHZrFfM/OjRIy/adc29e/e215c5D+6eUts0bFpisgRdOSrJ+ERcMzMDmVqGEQlYikGz73stFhGw7mpXMroHjzJRKbbipgrDgCJ91BVO5OKRc1rN0HgNvHCyLBeO5ft6EjJ8Alac7MLH7IObMcPlQVr8lea4BEssT3OKtrrJE+zX6lRrhImatpEuNU0SVhX2Lkk2D4vdErO4F3dXh3G+7EhP15vNqltv2rbjvicHTIoVRyHNWuDGybkhaRwtsxALnLW4OYgQsWsmc6LEAJjcQ7TVjIFKyFDzKZ1TYekTL90PHHFyQofDgTli4CMC9JIR+RTvIpFCgFvgnMI8xKxp2na73e72eyJqmubpp59+/vnnz87OhmHc7XYv/9RHLy8vD4eDu0cJwdPT09mVuFyKcbNLYxcws4nDYkcS/iJUOIfpjnuVkoGFkFLbJCVSV5RsUKj6OBZVz6M2QuzcNC3X5PWgWphXKqaF8QHAnd0r5rw1xjCJXWl4MEtUZTTLJbrZTDB5r2IBhFutixPewJnmE9d0o9gOYi2dKu4E1iXHnCg490P41DlYVvN2yzdG/sywiCmxrCcxgWGprsHjW7CoBrHwRVcSWrWwfREJJANV9C5V+WH6Bw6weSEnsABLRk7Ie/Bjpa6jCik++U6w3HyXEiCYfHmPz/232s/fFqbSYqiwoyxf8GNQkKa8wXr+fHA+weckVvcpzIib5/8raE8OYt+srGCfbCbAlifX5/QjaKqmP4XpVkCFUEAOKBBVHxSu4Yij/hUvmnM2zRSSDGNGPhhazaOV4lp4su5VlcfWXM2s2OCujgIyIiOYOGrMqagZKApKYyCwUaQJMwC27HkchuH66iGAzfruap3Oz8/B2O124+FF09E9s7jXAjn1YT4l28827G6Pllp+mQxuXFVYHcQWKNErWmerKQyOCAnGxZw8omuTm8QRIkGYFRYmf+uyYXYgTpvvYr11v5mfhcmsehwlAUhZlUOXLaUYgsXLONqcgMciFIEEDyqxhnueF1HB+NoIchJCoGLKJHSZLf4lVAikuyyjHBaMa5l3jsWfsAxmVjIhFPCISi3ex9EymH+dYckcUXV3NwoRCoYwJ1UvpZA7wYlV8yBC7Xq1Xq/zqFdXV6UUFymlqMGc4g1VGLawjFWViGuHB3Zh4pqCHy/JmPnk5MTHXX1dbhX2Ooio4ZC6YXfTUoSo7/umabquyxloDEdDIV7zxM2jGqqtiSJh4ri0Xffw4eXDhw/f9rbnnrq4c3W1NRsSM4WCyBTuX46P2SoCsGAH1fdydC0QNUnoWD+zegSidFgMk8fnkC8c5Lcw/ON2Ay3YifOn4lUGd7Rt2/m1isjV9jqyCospxrHrusg2PN2caC73X3v9dXf6mZ/5iR/9sZhXd+7c2W+3n/mZn5lETlbdqusuTk8AfN7nfd7HPvaxv/23/sYnPv7xv/Udf8OIr66uXnjHO55/5vzevXvPPfPspz3//Hq9vri4ePreUzg9hRmY4UDO2O0vH97/+Mc//urLL3/4ox9/6aWXPvaxj736yiuXlw+HYWjb9uRkHa91v98TkZm2TWOjjttD13URJxGRsZQyDCKyXp9QUZrC6TPEcne0SUeE5n02zqrjUHZDudoP7nCSDDfjfgh/UpIyuqMGQFBtbSYuU1wZgECqWwdwvSHJuBwntCCFPv6Wb83HGeAtD77RZ28Z2cfBQOTOABF7De5YxCI86gqGzgZH8csYL1pgRi5MLnCGC5yIusTZXDWn1DZtg1byOI5jf+9u6vsx0Z45n5/fvXfvItupgh5cXfcZuH893L+20Y1ITU3HQZUTMbMaci65mCNBKtuZONRXZjYEmm6Fylocg7hotaIDS7OKZ98dilkjJFZoWlvmChYqIsI3XC1EPREJhIjUDmZ2586d559//s6dO8x8fX39oQ99+HA4BDU0oGAsj2bG3MzTB7Po32J6ViwU5YJUA9wsqSk0kUrmX+cPxiQl85SazWp9svKmHZmZnVHcsvX9eHJSclZfOYBYN5zYDLkMObsp5aw5D0TOjCgaFMnkcSSqNskUpjvuEU0SCaXm+lw0EdHBM3xCDRISPLZM98WFCFjUG4wqvYpi6hNvp6LFxT4oxEzxFYvqT07EbLB55tLk8rAnj391Er/lWIECxC6oLpv5EQSwILe427zqTpwuB0gq9uXw6lamntukEBj/LmM1mAzXI13Kl0cmIdMpTZEqd2HygS6prHFZP9bCfqt9ijQiwsJFUZ3T1TK8ESSb6KA8BxWnMHUNbWOWjTmGBP+1jJX5fo7NjlMAN4+DH7NwHp/PNWLoQNj3kxlFUNBIyPAC62EZmmHFNKuq5WKmjb7mappzKYUd6rBSbMwDcSTPWKkF60IsdNOcm1m2rDoalNjBRuyah+oQUjODqxEJO7oNC4sQwVmYCTI2oW2YV0mkkfOLkzt3z+7cvYNE3ZW89vJL6pnYRSgfgbE9iVf5qUAR/9mH4LESPQADKQB3ZZhDyTWK1IoHGlTTTFrjBLEzwUzdGQRzL+ruYPZSczQqZTQM40k+Q0Si/hMWmhqzT3b5Q9zYrSQFqtkfNCO4+SNptgPqHs9+LLJbDfcqFjJvY7NBv7QAMGng3DIU5qyGW9ZkGHBYnB83wKbz/aHuLseUpwrwBCwcquuzYTTfxgwCoyujp+J4SkmIiaVGw4J1jckjbnBYkxhwhzdCZ5t1k7rr/e56u1e3GgNMqWQVXqgnMUXgn4HIXWSq9dXCrnB3cMjkOLOw83rT9eNWFUVHcwfYCCAwU0qpSUyuWobwJe92O1Vtu9S2GCN3CHBGyPq41+AOz17e6TW7O3vKWdu2vby8untx923P3uv7fhz7xICRM6lHfJRnhZF627PtgpoOukgiv23Wi4iqmvlc11FEZtGI5VsO7vFtVBPjJ25g6UF3hx6F3XlRXyRmhZnlnEMzYw44xw2EgH5Kqes6IgpYaGZt23ZNE5pLBHrw4P5ht//ABz7A5GUYz0435yen2+12v9sF4v3RH37f+4qtTjallFc+/uJv/vpf/9TFnXe+850XFxequr26fv8Pvf/R5YPdbvf6q6+9/vrr2+320aNHL7300v1XX+v7vi96OBxUNUqbRLJW0zThx2gkqVbVx81mkxI7QYjbtg1OoKq2rbDqtu9rON3m4e3qMONsBnOzvDschly0+GHMLsKcivpuHLX4arOJ19QfajVSP+b8VAUnn1wbwaI+voObQP2m/+k4HmhB41z+9RbkW64hyxOWv36yWAIHoTTG5zSW4ksdIJMgGVOMIrNSxI1NEHQDjc5SShEeqf5SBgGamISsbahtpUkg8uI6lv6QS7fpqFCzG4jIBUkEKsW8gMTYhSEkoe5oBJGYCDX4VB9Z3bmUUVWDWmlmZSpwItJQ6ojIzYZxTKl1kexW3V2RgQg4rCiKTUVEp64lImYlotP1ar1eP/300yLNiy++fHV1NY5jsIC4NgLInUSoaZpx7GO+hMwSJndPAJ6AQz4lFJRSUtPOS+4MFwGEd8YXJWTjU4lTCQDnCpqjcQxmd49+iFzHIYyeaZqnlNxcyTiDGGQuiUQqzT7GLTNHjBSYCsi7m1lWF1kxh0bVcYzNSPi47BDBXU3nEXccujdj3VTpD5Rm5sJ0kditw21ZMZAaAAb5IjbIIHfXW+x3KpXheXN/NDIsyglisa/FNwKwGnKvbkGCgWLiHk2B+Gj8y9UGWUI4nelAXs10RY1kHKMZ4XuN6E28bSJBCJnWdWLmiNqUAhyPcwMTfgoYiG81zM4XsINADIi7BeXkONxna+FJaYGLWoI3Y4bOj5/8r7Yt956b+bHTL7YYxp8cMMTj3azZYhH9g/fwAT7AevhoZYBn02wh+WKmuZiZ2LW7eymWcynmajlnHXOT0owDa+6fuzuNtrVg0UGd1cmZzMiRNKx7JJATc5NYmFMXRaTB7i4QggwE13G/LbAsLI04ubmPZJLLoW3TmCgliRSC8FJN/jWbpW8e68BPyWbTguZTdZ84YoxI6TS3DCiZwhRTPTVomT40/Rcvz3wWmFHVWlG+qE1CfUubebnPzibW0lE7b8Ez5WdJEJ0/OJ8ff61WdYWhPEnATwRTwNwbX1Q2dHeuya83LMLFQoBbt3grXolFjIgW+p+hVNnIreizszMzZy0+o8EavnBzS7jhmZ7vYrVahZ0R3TFNmMivEJAzszspYi9nEbFcYNylhligh65Np+tV2PGqaoCQSJOart1j8DmIQTX1PqTfICwiVKuA2GSsBlWSmYhBwmhY9maaIamKEAC1sJhpNm4YLiLCnXg5HA5936eU1qebQ6krDBEbAc4R1fFbyTEecxXMstvtzs/Px344HA7PPffciy++yERMLgQDEcOYQczuilp0C8AcNpwvWF80T3LnzCBS1RBuYOZSdI4nUHU5+JKQthyp86ioB58QC5ojrsehQlOybFTTjiu3bds0Tc657/u2bYdxHEsWkXbVJZY4OXKcADCoau8yN03Tl3GzWW+3W9PcpWa73X74wx9+/tm3lTyklC7O796/f//k5KQchiGPH/nQB7/lT/3Y2dnZZrVi5lCgHYbh0aNHmnPILW6321JKRCZVFZQcomb9rg8zl5MNOSNjLCUUHQtJyeVwtQUwlhwh1sRCRCxERIlYRBpJzOxQEWlTIyJmVEYfR408w6vdfhwLiELx4jD0BuImgWz0ceiHrl0LJwqkTsFhEcTAFjEz9TKvI/EiGnrCmm5TTZvj3J99EIucItx8p8vXerzUlHuw3DXnc2jBzaFwUiRiVCcLhdAkzEGNJA5wBSMCxWpLvm6SuK3atErSiLhnNiIiK5qEkVozG/vezBIjJe4HAK2j7bNeb0u7dmlOTk6SknCbVh2ED+Jg7xwoo47mTCYw5uTEcBQ3zzliWRyerBjwbu4+lG3JNvtNEnGk9xfoGN/dtsZSQFO92aBxgirLUc2KO6kXIgJFSmFY7YWJCavd9jD0LxNR3/fhiSD2Wf2SjvE9AXi9Pol1MiqmRFczV30Id5oiu0LEsS3M72jp/QmZ8mX12vi6VSemWsowjH0jEBhCojMlEamZilRGLTwMw5DHMYsDqPF/VZQy5jwwNUTOMknjzjsIQH7DhWRmpShZNrecM0FVNcwmBmgSQS0hu0I1AbIVscVwpcpRNyKGu2qp9TOn4DmIEaLIE4EiihPaBIwppLOJMTPrYiPH5Hah6vCqUQOX+RFmTDhVWFl40KoH2iN5vZYEQuA/J/BSJp9AsQsQKEzwBJ4sl4jXMbxesxJPIsp3zBALlhsThW5e3A3BQ9SO5zqHNTh5DO/M4+RxOvdb7ed9m2pCCFBifE2Gwlx8+3Y+4RweXFaWnzFDZT7TfITq1f5VN36Dn594zuIZY4rXXXX+a01Fhk0naw8r0AF60LwvZVf0ABuZjKKIfChPGbEZzLbb62ohF63xopI9l1xKLOLB5hIKUQBsdw+JQIk4kTSgxJycGW27iZWBHAJm5kZaEcm7nQgBsGxwNytFhzz2cCXylLhtU2pAiZEoJWkasRTqXDb9dwsEvhnaDP+qV5omcEhQQJ0cVmDFLZOpeyZXuMOU1EJfZJ4wBI+BHrLcFCoObk5U8aIZMXuEo5iIYKZmU3kwAjMNQz/Lp81uSSwoP5g2x/j1yPKLm5hs8lrq3Tw8wViWLRYhIp5DeXF1hktKFDFHUJJEBM0ZwJT+UY0GAZOZE8+QwCa1urCVZ+QatxsmyyxxGzUIZvPF3VNKIIssyRmnDaWKfdMUYA172ieN09ie4wQzY2qI0lQ9mFJiAhOJjhrQMCUe+sPZ6UkjfrpZOcvDh5cpJTAXN3PaXu+brrUZAquFuFv1v4tYLvGYIdJQTMPjTkCSlMtwsmrOzk4+/BOvr1bYQUFO7Mg2FRsV1yxJXJ0FDTdJmkePHjz99NOf+MQnOJuIUGoAqHkJc4JTfAUz56HPObddarg9HA4D0K5WOeeubV/6xIsvvP35Z+49fdjtG+HLy4dNu0qr9X63h6Rusy5DTnzDameu+M/MajQStRpdpWOFKVrmSoY8j8KwhpeDLwbA1E/VQnV3m5zf1VdBAFHVhBx9hvTjOM7vd6ngOo5j4MMwSZdCuOrGToF2lveGyaItsOvrLbkDdBiHYbTT09PD4SCJIrpy796z7n7//n0tfnZ2ti96/8HVK+XB3OFuVsoYQsDMTNJEaK7YCMCqzgO5JHNXBcBGrZlB0qDeV43ZJnQhsjs750Lu6moOpXAiiPCkq7SsBoHUVSNYTVVjsGlEN5iISGGUoJ6l8Wxb0BpMAKlPtdfI3X0sGdVSvQnYjuS6o1MgZh+maBIwjQcc/QhHD8KMFQ3LzOYZz88PtViknJnJfCpnP7vHIgffiV2Y3JlcYWByMgUMpuYuIBZKkpKkF56+t141p11K1pdxTzo6XDW3bQsnN633wE5RLEDuMmPfD7vD8PIrr/30i5en52ftaiNdB24OeyScehm0gJHEhbsTIjJ4LqZBqZwagMmJW2swEEyZHUbs7tAqYsJGIEhqxZ3UjaQaWA4oFIA5wSE1bzJQQMSealIIPMrp8W53IKJxDCq+M7OqBumDJnmVeZ0kon7IsVSmlIhnj6OFqBgRjSW7u8FLyQ6f0zXnlRlAlJ3Y7/exfQQcCmdcLgcWIvZHjx4k2jCKuuWccTgY6OTkpG3b1eZM1Tebk9jS3Gm/3wUWnbXNHBZuHSIJV4kWH4ZhIy1Qy97MUtIiCWARyjmvV9JIWq1WZsagqL0R+ZNwdy3E02IyBxJjM2MGuTh8CokHrFdXZi77fX3LTgCKlTmFnkV0sdHWSBkMBF5QvhMxmCBzSkXkS0d9b7eaWFI1io7z/Ti5FJGOACHMTpUq/e9TcuBkivoyAmNEE70TqAVAaZJJvW3jRng6aKUUUaAqVEPVNe5hHXK9q7qw3DKmp1U3YOdbzNGf343NMwBzrSoakbb6ZDQVUenKLqY6BhhzatSN828GEukJ0cV/ye0Nv8sMU+3PKX8XzFPRCJ/8MDMaJKijRMgdNqDvkTNsByYvh7HfOYYkEB809yUP7upFXeFmIb8MwBSH4RBc91JGVOq+nW5O+uEw9kPQASKBSxIZH6p7LKX1yersznl30kAITChjyVlH4+r3Ke4ldRw+P8uFOQEyDgeGuxYmbxMfDru0kpPmaUBzHog9HHOAOVwgjggQ6adoPPAxSrAHhyJSomxKuzGHurv4aF6oKhSYaTErwualQIupVleaOdTgqlpqSDAyGmKSTGbPDFs8dIGmLIyZwjNjq+rQ12rPYOLs8KTZuXyECtBkFio7XicJOchDrcLcwmMaYoYSPs8FPHDA5poHUVLFbogLscPMozC3Qs0sJeKJOygEkBOBYeRKvujssCPhQuLBigv9tDB/w8tqxzShOEWI3WppKV7UUQQQNbXmPh2GIQxZL8Wn3cpdw19FJOzcSHLNcGsb8ZJPz86evjh76f7O3ElEo14bMU3FHGm5S8em6+rOFqJvN7c8VY0AYaD8lFJIlzPDnVVVEjG1RCR0DBC7WjEz9b7vc9mklNrGSzEbh5TaNjUJpKpZXYTHcWyaZrXaAPvxMKhQ27aunPPAkvq+d7XXXn7lztn5y0kO+91m1TmzljGl5CylFJLgATgmOwHAVCCokoNuxa9vUq1uhoNq2kA1g2hq8zuakZ5OuEJEbIEobCqAtvRn+M2iKzMUiavlSSVVpurwk9jCZE5NplMdxvG8RFRl1iuZdL/rzy/O7l48JSJXV1fDMGhxd89N4+6uCJ15AQFi3AzqUAsTHEBlOgNSS5vYJLdbVTyrIuts4gMgNrhClCKG5UjinhikZNmsVs+uLCyPGE6Q07zOOHISF0x04EksqRY7Mq9h4xvNiQETnnNuZen4XMbojgcXL3GGB/XjhOVxTPBgOv3GFYhoHMdj9NiPjoaQ2qid6ZOTYoKNPjlcXUsibphNs7mTqZCvmvb09PT84nSz2bSGNjFD4SpCwg1BK4fRDHyUnXR3tyJysT3sd7scFcx3WR9eX4K3qe04NcW47w1KzOTEyW0oc8wNIikemiZq9MxjrApPDhInigTrGIxsUYiqipoE14An7jciDBXv2qb3wZELN4XVqUbswQtSXjhofOI9VszDN/mKWDL6jjSS5a/z+503gpiJvGBou3v44GKvWl6w7aTf9+NoRO163XWtlFIut9sX2nes1yepW2mAWNanDKv1mRtRopRaNypukTPZNI0ZRFhEAnj4UcW0xkjnO18uIJjdolpzbJqm5SQhth5LG5qUCOG1rVdwI3dSAxkkLS81dacvvmISQHLAHI3ATKZOOBZGnJasJZMj3unc4WRGdNuuYr/BHa2iBUfFMudK6phzDmnqg4juVZGY4+t2Zncjg88phcBR8ONx1cS4SIm1EjjavvV/05fV+6y1Kt5ih34qNyKq26YFXStiyPR4iBjADP8mEuUC+83nH2ODOIYH/3X5DZYj3GM/xVHuUE1hLORu8CY4MdWasPoc09MakJEP6Pd22Nk42PiQmUsZ8tibj57IbSzjyEHBViOFqnkukdXv3tkIIicBjAO7wej6+lpr0XkAFn4cUqJWm0baddedrk7ONu1ZizaBDLkvljNGIyMHgaFm5isXB4QhiZnZsquWXMamado2MlcSM4e2CSXywfGpCfx+jq16zRgElAkNKlzdi7sRq7iDFO7Q4lZgIwyx5wsBzDA1WEC82L9maXeKkO+i3u/MN1nu2rPHeWZuLj3vs6t9uXMtn2FpjS9/xSRWfgSI0/dVz/3sAzYzkBOIlIUYqNqUzCwgopt2g1aTLeaSu8MdVfc2UIXFt/pNUhkTqR5zC5cP4NAouj3fcBglfsPiPJoypZSIGq3XayKKOGeU0yAKXkz0fIETkZBR17RWRrLSiKr256fPtY08urwahjHcxOaEBkS1+nztRCGJjbmCkxRWNRFYhIhSRayWUkoCzei6bv3/Y+/PYmbLsvNA7Ftr7X1ORPzDHfLmVHN1sUiKpDiUBsoSZE00bNgNCrItG54eum0Y7Yd+MGCgXwz4yfaD+0nttgwD7QdLBmy4G5CBVmuwGm21TdGi1BIlTsoq1pBkkZWV4733//+IOOfstZYf1t47Tvw3syip1WKmVBuJm/FHnDhxzj57WMO3vm+7FeJUfcvibkSSpJo4MM/CBhXixIGCo3EcX3rppWfTU4aCZRy2nNJcbIEIe87js+WZLUVS3m02M3OATpU54vfLvLDbu2+/8/nPfuZTr7z8j7761Yur69lsmksaRnWei6Uhr+gWzhozV9Y7nIgQ3F3OpP3O0UErsFDvLnJEKuWUD1wBo7VtAmYWPycU5FjuTkG04Ah5T+uDuJ7fnd1P/OwxIFvFkZ+EpPs/ALCoMbMgLDticnUCeRqHJ6+89spLT4KSMaU0HZf9fr8UEDGIzYs1PRWnVALSTEo1G0wAmVnX+TEj7bZUt6Zc1jVFADkLnN2bI05kpoAUL72O4QTrhTeHF+41GQGwkRGRk1VawhDACT08ja+1Z9mupPve7cz1tC/uyacFAWfPt18GWk6jn7Aa5ataqX5wJBuppV/idQiOtsfarpMIRGWZYhFITGSu5G5LWYhMxyFdXO52m+1mHLbb7XY7bvIgDph6md2VybNIDWeFs+ROIGaOOJcbIFh0nstkBBJRrUVR87z4VKyKILLa0e0IhBNYce/FG7Vvi9XFPZKdHjFXl++0CIdBzRTSUEYednUnPW5lWKuuNqhQEKnU+urI/hGzKVpuuIIJazeKALXOp81fJVDymiCKURdLcnzRa0iSmOMYuDn4hDwJW2HtN6JN5HAXU0rh8O82Y855mqZSlpzGV15+ze/mm9vjB+/fXl7Rdnu5SZu58HFWs4poquFGD90+BqpMohn6WBKRpMzsUREeQdTab0y2Uiqq/Va0IT9b/1B1kvrmUYdFhRwb1Pteo6j8znXt6lnrdTPrJ6PgO6qiYCfoe5u6DsB1NX/rk/JgE1xNOW/2dM3grcCZEUpjq25xBc/2FZooqGsozuCudTSFYQigMjq2QVGDD6tJejodt9UzNtywjXILXsRPOMBRlXxuONrpbGjsI99vn9gWzOaxiFUrzumcGvR7e3fn3uBZMSG98NE/v9ZiM/f+rmGVCsqJPAmCjGphzgCqJDOBCY09yQgOWzDf6eGDZf9cp+MmzWQkOhc9iBZTWFlsmRaNBAm5E5nDwBqQldEVDnLVeZlUC2BF51JmSSziUTnPwQIAVToOw268TJcPxnw5YkPADF2O5ailqC7uLhRpDSKYjBs4QTJnAmc+lLBWJdE45pwzSxPOcZdEkwIJFeG4Xvp+F733/yLbenGvC361psIbVIc6CrzACtyBBWbuSjBYYVvcDG5aZjKHa8QIVVXnRVVBbTt2SAMudvxO31iJOTGb2VQWampPPU3yogd0zzDr//YD1njR9ZEpsRjIQKrqFkJnCicJh7BFYXseRTKlJAAKRRWWMLMVWhmIhNQ4wc2jCsq6OlO70MrS1jbs7ube4x1tVlF1qvpdlVIAI6rQtfW99Q4NmoQ4T0+PNj9kRZ1jDmCTK6ZIyIVpM25ef+Xlw/7m6dOn0zRJGtR0Ns8s8RBSzo6qFMkrz6cq5p3prVIQOYgIoKpLSmnIUnQONDAQqMhGqwNy1mk6MrkkSSJkiBLKy8vLV18OOQF3p6VoZhovtptxdzgcrq+vSymmixZLLMyY55l4A2eRrDwL0fG412X64hc//1u/9VuqRYulFqVee9oAunVu5wyxjvvJz3XPvzCZvA/T3kXcdeRWwFR3h7ToA7waMA1p7C0u0s+4ThL2gdQTxf0jRa3dSlTlUux8qLiTmTmRkJt7xTiDiPwwHT94/qyUknJ+uNk9f/782c1zzyMFuSEkjq8sHSyqi2nU5gXDa2DNTojrqqwIwLlUZVhtjly/kVOMo84OFjPjlHtWs1pn8SdFZk9BYgjcgcIN4euQoXqPVVRiZWlauxoCKpvovYdI1GrU1u982Dqy7tIwt9fH1wneUs20CndJSv31epx4tzXaadkBciFytyi8IyARpSQ58XbcXmy311cXu3GThNydrdiswzi6m8FQ9WG9+s7V5UBdaxRGZjCn282mOKW5FANy2gybMY/baV72x2mapmKzO6mqk4vIbvdK9wBp0ShVNTOm9EJ/9t5ujIvA2jCmoJ12Jm+Vle49+HZyCVB3DjTKj1jQQjSmW/N924hoXbD+1h5mXw8wtJm1vtr+4PqRfV3F+brtqwR+P75qwadkjqnMh9mv/ULyhh3Pnt/9xm++lYdPw7PIVnhze1du726Inx+PMAMpIrrS1mczK2jspvWHGmAbCuG49RPDjTVzkpqvTkRDSn0nOt2yu5eibj30WRlRvNV2tGHpISwUhSOtph81HrQa/0sxnJa16KjY6rAqogZCcgo9oOZAibCsn2hQfRWyqT6Vt1HDsRhXoCaZeCMGBrgFmuuYIbh1+hdHUMKws0eeeSWqG9BWrwnDXk9uNesRx1R8YESb7PTFUwv7/nslCb+PHf0XoFV7D9JiFn4aJ3RvPMRHtWoX6C7f2t/7OA6GFjCFGVIGYEwaNWPqSr6AMkxAGc0cRWxtxIBBpzLfluNNmZ6jLPN0B8BMy7JADWRWtIR6BKAA1RkNMof7osdlngMXOk/HorO7ljKbLxsaSITYiZFzrrD3K7q4urh+eCVXOwwCMiw6l6nYAnLOzInFSSgJMVFCyoE9hxAwYDYn0xoCS20XoNBSb2sXEQmq2dv8339BW783gjsMZBRASVeQEgqhwNV9RlQJlsW9wBymbsVNyRRutpTuDcZ2qW7qxn3DxWnD9VWNG1C3WwAQZjuzb9H2OGvi9X1P73vf+vj+wl/IOsa/qftdaIk4YEU6Wj2BujMycxYKAslg0AuiCyOe55lbHqh/t/92l1Nc3Yl0B88scH4uDMp5fVcAAlopzMSRPWBzo8D8mBOdSGt6bwIIhxCr5CeAUgrXMv0WwWV2IGr3l+MxJ7iLF718dLUd8q9/9c1pmoppIgIJUNyDc7XmalWjXAspwvQgdXYtDvVmEHhzd82sWFmWJbAH+/0+tBCbGGNdeYgosbAlYWzHvNsOXqY4yXa383fvxjw+uL7YbHYGOk56nJdlLjZuhmEEME3Hu/0NtIiIjHSzLySBqEzsCrPf+NabX/nKVz7zqde/8Ru/WbQMm4u9qjmnzWZRT324nF7UjFCLCtyfMyuD/sydYNQosjekGQV4unmANSsVDyLXTJHh9ATXjijO+zPnfM91iRdrOl00/zYmmJ5mdvtiHQhMxOrqMHKDgskAvP3u+0+fP98O291ulx3HZV6sl8BF6L/Ua2NSN60FbnBq6RWzWFXbr3Xb33w1aLuRF/+Lfuw9G9fPzLWWp32rUvpyHMXuXi1TFwfclWpBgwSVPgCYB6lEbN8ndxBBKnTmwDTH7IQxvecNrvv5n7T1xS6qzvrTiMxSFjoejxV61rZGA4h9SKJLCQ8hCW92w4OL3W6zudhtMksS4tB+BJIQM6kWDn0Cci0LKCTbgv6XCAKWWr7F7J4GnvKlSOa7PRb3vM2QPJdFGYvb4mbBXclKRCROEDMN4tBSipXiquZE0hwJQjXKoz9jfFvthYjrEqjS67qiKsvV57x22le+OYdnC3KGMwuhSuSJCPWFmCjqvuI5RtkhyAJJTBGo0ro88or9a+0r3ntnTdRpZusK8/VXuhtWaQ3MizqRjOPlbvtwu7kyTXd7e/vdG/WBacjD4L7dbR8xsxlKqR3EzGpuhpV/GoWfsURxKSVnIj5pPIgIUtK5om5i94kq3L43uVkxHTRELuS0bZzfBoMhab2D1B46V0GM2HhIBPaVs56GCO2qiOhD+XPbRPOoiq5KxGZGqCCLNcdvpD1P+vEGBFOak7uTtNK+WFaph+UIBI7MqtdRUb8S2GNu4837yANADUDeJRC9Ypi5lo+vUn8UndFYT1+80xfb91OFn9jGBHEsH5nBO2FB22RafagKGQgAAQAASURBVPcFthg6GzC/S+nBF1u/JhbANUp6fTkYirtO0zRuJpvTdLSL3SOkC11IUnKDSF3udV7KdFyOdz4fWJfpuEfs6VpNUDLhCBC5l1JMJ1UNrWx2zMu8lKkWV9uRXJmckpWi5EtPIRFpSkNKKV3li4uNbEckAhRaSpkqyx0LQaBAKTAGZ0jCcoAaPKE4QLbM8xL0e+wEDQQShWRrRJ+ZIMwCsIdg6mnKv/iw7Hf9Cf7nbN6MRD+7E3MYw0I6klCIjEidFMtM5u5Fi7opaYGphX6Uubn2naaGFFuJU0UGqUX81Zqi4BL0HHLupjWL2hsYh84zhGgbUDDwv2inrb2zeBEbXJLKPcSBf2uhVg8NeGakGgQyD5GlGiE8RQuFOOLTcXYBpRqbrhAXIjLjCJz3LZmZgwkgbn5ZnIhSSiRjv9BqU9sJWsa1XHLptDHhy3VXs/da95jjVzoJzUBU7605BiIS5lvdbR3DIJe7i+c3T7/71rvElwSBsFDMAAqZBvQcV03aeMiJmZmj5m46DC8uwLyQGRFtt9vg5QNQvGTPp2v2ap2N48jkOfM4jtYi+rvd7nOfHpkTOHHKOW2c5W5/vL29U8N77713ONwlkQeXD1QXs1IWJpm2w0CwIWU2gpXvvPVbtzdfev1Tr7719nenm8LMaKWo1PSm65hHxezVXg134xwideqBhhW8P6POh2nUfAbgxJqYIVA5sppT6UTkTMTBHHkGSMYKHu3n8Q8/izg0T5UAOo1Y4MwnJM5Eod+RoKG1aU48JJ7L4oRh8FnLfr+/2+9THqcyh+XWSfbbSS0qGe9Nv8QSiK2Vc+fuLu2Ldr5gmmst2wsnqDG4WzkVF531ZzcUKwqU2CMHdYpWnvzIyL/WFyssovOLS0a9EV+9XrXVjb/wldWD6A+oPvTzA85uxL2hax2GPjD6AGNmFlRhxt3mcrvbbobNkLbjOA5pzEJubAYoyBJxFhGhw6IMiFApZO7mGohfgAgSCDsDA+YkzD6SOpNb1oFGHmV7cXsoHzy9QUqHBepZmFzIuaiVYlbu7qJ62xpBK3NUetToRjw1rxGl6hBWi4dOU6bmexrHI51goqEBAzqtOeHbl+qDxBoIjx0h5ZP+qrc+DbxlQ5am5qoQMxebaRXCW0+0dTIwXvSiwf4THRlLp4QktZ92VZ3KktK4G8ecN2qplFKMiqZhuCQe5wnLIlrM98chH1UHUxiFRXJ2MYEUPSFC2waBxXTg1CidIy1JmspkrTbZrLEKk1nOmYTViqqaqoESUZKkqubOgBEBxuj930Z1XXnqXnPa2HHKoMLBSWo0p+6bq4AOahDYVpMCNWdd50CfoeZKLv0S6nhZAyvce6AnSkOpstJxu+Yeamqz2+uQa5m9LiT4Udk8dmhTpe9rCK11C5vfiPY6Brx+mNjAKTh4tkp/P1X4L0jjjxhFrfmLj/jcD6yH/W66EAY7kxY8jWxjUpADi+rBfWGhnDWl/WG/3N4exk1KSA4GJaqFAAQFqflcQjfCdR7SYBbExw1YoTB3VYcWtaWUxXwGjMmNiKkIlcSJeUk0K5agDhZ201lLkZSYhFwTYzOkzeWQNxtkAQDVSOullPIwgBKc4QZ1zAYuYCw6LbNmJFcaSOZlWUwRqqZtMU8pQcQrDTUzJ276rv/CN18tcVbzAApSIgc0aghBChTiQu4IuWAz0+LLHMyxVlQSN7nuiv0IBEqgmYAabQshOTQaGHVzaJD/U0PAma2rCsNUMK+u5skqjrZmWMTKMHix1iP2o5qWqYfTWV1W/73emJncAtkUdDJ9fc8tztp/NZqtJOPCSQurYqWMXGlziCjnnGRA9wbJjIpzdveoSGFm81IKcotSk+Ref+mtFpNaFtVWZDMxMQTiFPmGM0+Dia+vr3WehoEfP76+vty9/dZ3BRDJCIAmMZGom5qnBkatrAbBFRMGCjccheNeb5hZaowXwzAsyxKZxtlnLjH3KouVusOMYDCMQ460ctQQ5rR84Ytf+qEf+j2f/uznH7/0igzj0w9u3vruO7/yK7/y5m/+xptvvvnuu2/f3d4wfHEtpQzDZjOk5XAQAcFczdS++c1v/uAP/uCTJ0/283eL6iBpAUdfsffg95lDiEByhlfTknj1Ca5drdX4iUjDejiujFpHy2iH+Epg3dYMH4j3i/XH1518rFJ/wGnlwrmjEiek1VOwfjnUricCdc4gA1PkoQAvZmWZweSEaZlvbm4Ph0NKCY3Tonn7auauDndiakDoMAGj32pGjmp1UuTuWm7yLO1Tb6xXYniDDgaKsMvuxffqMVZe8OUiIRwKMdyeZH04BXdnXVBfmnekyjrI5HxmE69/Y1V7vDbyqBHxrxeB6l2c1yZFW6ebHG2PbFw7FAjaKBpMKWV2oe0wXl1fPLi63gwplmCGWTFxB3kiSI29zUWdOIPcNNQaDARvCRgQgVKtR7XA83LybMoD8jZDMS7zMB9pmkZbssKVoQ4yVS+lLKpLXg7uFVjRHQN2gGO9ZgAGpqjAhlBNm9ddYdUfRh6k5WCSqAl0d3VthlRFWNWq7DpIiElS5PqoTqL1YOrOXc65s4z2qQ2Ag7KTAzdBZiuylgCw16FychvXz26d6l8PEmoBS6ZkinlepmV0w83d9A9+8dfI7fGjizd/4xtvv/3WMFzf7aeb59Ptc5dh3O8PED4ep7rmaMcFmFW5rfiVuqozkRD3y4hCADMTEdf6ONZ1+RGX7GPSum9FK+VV9xr1Ayy4HUI+oqnLMkCnUszTrufu0ucRek/DT8tFa1TXiig61RVvsFmFeRtOy90pTi+GvkQ7wOh8s+FO2oqWJgheQHbiFI2H0z1QKDkbOfX1pLpnK2agOvasOXt9YsvqGK22Uy1iZK+sgy9wWLUue3Ep+H77ZLUTWVEtiY53+RR2PLVWffo7AkTPvMHfFc/QVlmtnuQ0mIINUOg8H28BvbgcMQzAcS7Pb+6eP3r0EnDBSECvy+DIp6WUkJKxkIFpUFvKglJsmbWUUjMWEkpJwU/FxIHa9oEWYQ1J1UKz+uIOeEnVbE7bPI7jkFLabTYXV1e4GjCOyAkoKAoRSSJEOk1sCxn5bDYbFiNVYFk25gQlsJOZd11cgIQzETlTSgnCDb4uxEyUzso+2073L17rqc+IT8doJ7hZYQq86AJf3BezAldaSqX+XmbThcOmcRUFuZPVbaiq0bvn1HIbkfnohlC1lojq+h4MLryuu+kwnJUn6GvDmIhyPiXYsNqAeq1HfNpX44S2exHAICYmqiWFcIeFSUghqivEaxBgb+yglNYUc97i+suyxFZ3co5fMC67t9lhS23TPKFgRWQYBmYuejqDqkoeuz8ZGM54PQzD8ViV3+IyatpN2eCqxU3XfXR3e7t9/HCaJvEqq/Xmm79hBciVvxUUdO0o7rSq+KzP56SpUCtK3Wpw4eS3kCdGZh6GgZlKmSMM013llLIQS2ftsyJCwzBs8ub6weVrr732uc997k/9yX/14eufweUVnDAbht3F6/TpH/Lf9yf+RHn//X/wD/7BX/vrf+UXfuFv394+320vRWS+ncPlZnYBSim77ebb3/72j/zIjzx69Oi7771/2B+G7cbBR9U0ZJQPQQZG/8dAr7wKZyMM6zHazWJyhPvULTBUo6pyRVRnoCZYQETUygi7IiLpqXCUVm09SPo765FZX/Dp/RMECxVQ5YCpR3FhrcmpySIQScTwSDixdD+2Zee1TUNqeWwLhUw0qzEuwFBOAz7kwLyzP93v5Pa6T/LoKOHg14ylqR0Z1PbObm69qwE0UG7iqP8LW7AakLFhRbM66eNH2bmpVtcWiYX21HDe+mLUoXq+Cq+slyQ6nYHohYnfa0FjCYgXiU9Q0j78cs4p88tPXmLBmIecQ5fPiSiRDMJaJrNSYG6cOIiIjMYBaDV+PcsdPSvM7gZ3NTINZU4YE5IgsdPtQZ8f7m6OMBsXZWNRh5s6iYNUSZUyTmtad0tMEYXWhKoJaXB2cVTJqFM8IgCHZIA4KSBBVEtUg0znhNEcDxqtSJWIpQE+UatKV95/Q2oQuYhEvC8cmT53+jO61/pzXD+saD033sV+0FKF/aH34SGShFiX6d133pvubuHzy493zPLapz79zjvvjOPzz3/uXxnGi29987fe/M3v/Pqvf9154pwCPeFG7h7g8L7atFEU85FyjjhhxYlEDWcpnmQwP61FaGGjw+GwLAsJ55yRs4Syl+kwDLEf39va6r2f82NhtbPeW45i3PYdjdq479dvq7IQYq6aFu7kLMJ+Vm9/uuEYtXVeQitEvD++yulPXt3UKmB4QnN0n7BGouJ6tK+P/Uh+IYMX6ZG2UCicqzB9VJfUazt9w09x6+/V/Ps+4b8oze/5hL9D+2jP4Xc1N7hq4ROuE54G8oCEWzne3j5zm0WuN1cEHOfp9m7/dF5uh+1Las4w9061J5RSzps8bEo5OBU1IYYI3Bg5MQ+xvJkVUHFnYA7TyaNaWWdXJUogJqhw8LkygAITwmbIF7ud5Ly7vMSDByh3oBlaoNO8HM09AHu2mJthcTFOnogzzE2VmVVcSMgJBrNIaSGROBNYGnFkZfMiIubU4Q+t/U7J4U94q7lBuFMwxwbLphEKbHFbzGe3YlZonoiITE21iU8wVUFaMHPIz5ZThthaojgyB9VVC9pFdPw9VSY6aTJFHSZqjZ8fDRfZd5B7d+Gr1usp1ls2EaXG8mhg14LZVESYxUHurkwl3hGUosdpEZHKmr7iIGXm4/FYlkqAY8Am5Uox55k916IqLQQyBaAiQomgKLYQUxrF3WedVEYhZjcmZ7JynMh1t9moluScKDHD3Y5TcXfidFf04cOHT997Nwmurrf7uxshk0Qiy9VlWtSZhSXPc16KOafNbmNmx+Meisvtxt3neQYsXbHxdPXkUkBy/fA333/nMD70wecF4/aqmLr7IHlaFjFnhoREMoGZFK5QZ5dERHtVL8ZOzDRYFHSZMRg6Fdg83co2b166em+6OwxQfgJSEWUq5DN0FqYhZVbdXlwPwzhPtLl8IBeP/+h//b/zkz/5k/zo0+ogghFKUhFiYnMTInrl0U/9zB//yp/6o//H/8Of/2v/0V/RUm7ee+9Tw82777+1HbY8bJ7dTtePX3nvg3c3mb717Td/30/93vnug1974xu2fy6yE5MhpWMw5Yeqh4sQk0hiaaqo6oRK/U8mIkzE1MgG1dws7ESrCFKvDAWtzWIk1cjQGEiMPERRLIKUR+HS4E6owGC4u2lcFjNzHlKUaxHBvJhVtFhp+mkNJNUcs85IxM0/ifMnM4CpFveSB00gnMGWvcjhdiFf5mMxpSTJm7sXuGGEDiWchM0dHsqcVEowx1Jpt24rNguqgY8zq4vq/VvEZRDJS6rHa4lM4CogBHd3VjmhrfwMJVjTQ1XQoR1i0hP1qKTGxi3WH+tOW0rUXInEzNxqij4cgFJKaYtAqBr2nDIFVLXFg1qsCW6GlbuyWmsdZ6Yk3H1xIsnqro6c83a7vbi42Gw2OeftMo3jKIncFtIyZGK4luO8KKwQEScRilJcYk6lLCQCYvOk5g4hNSvzOKQEJTPzUsrs7pxYUnrfL53FmPZUntNyJ8s8ugOlqHrkFStJj5gz2USZiCNbEs+ERCiRthU8nZwsJ6JpmdaPXZwgwixF5+olgt0r9sQdwuNp2YaBnMhq+je4PwUlMpxERDQM3YtTcjhq/R2a4qKQ0yn1hzyMaNgKuDHB3bTYvX2iw0FLmZmQcgJQyszMTCi2SErbzUbLBC9MnISHnA+HQ06J2cAKntUWLcfd1cMf/JEf/uN/4o8a2e3fPnzmi59//4MPxiv57Bdf+uZv/kpOW2Yu03Q4HHMeXXG730sezCwnkpRAUFf1Ep7PcR4NdnHpqj4fp13ebmR8Nk2EeS5TEprn48WDDb9/W2jGxrYPRhkd0Hm5G+ZMnMhKIqZigZYBJbh5MWRGyjxrFH+4xVSMwewEJFR/irw9OiKFdg8x8psQDo0ZVIJUsiD/DKwYUUgPOVBc3UBRA8qMqjXiqGlyAuClTqhaIWxeUKWDw3gQhNZbq0T1hGoKqDdAu4ecIAmRETyd5q8zDX7CSWmUDjrEoX6KHzFR5IIIKHHrNSVYRVMY6DkEbulcAsDUi07rXbQV4eQM2OodxqlI9d5hH/3O99s/s3avCCwwJFFDRUCtGaLcJMhg9KGOgUVyAy5whkuwkgBAJyZtEQkg6krPo5MfdjH/RTTRFFuWh8Q4AIDAWDAI4Lfsy6vzb3/7qz9/HI+br/yI/fZ7N7/5/PXHP3I5P8Z8nYdRYc4LwQhkGGn4rOw+W17eF9wsuM12l8swLDvMo6ua3QJPnZ/tD+9pmW1ym9TnO5ue6fK+Lbd7fSnq9BJopNFs73qgVIrNm3H78OHrDx5+ivKAVCB73P32QRgtZUIQosEXLiZqEBEbbLaD+cSymE/Fpic3L4Flhh8LbSRrSXKQK7kIe34Z2GC4dKTbJHeDPbsrg2AAZYBZWGFmJpUCI9n6Sd57+fFop+FJ/W9r9kpbrCoXAwAMDHfADbQwLcAEn4CZ04IygQxsKJOowb0shX02NdfAK1VlBqFkRQGYatU7AAlgRtO8NHOLAVg1vcFN6lkqHYUTUeIEnkgUBoe5h1nLUX8XBPsB24mtxt0j/+xeKQaI406dpQVYK96kIm6CCq+iH02wTgASV8pvqey28b7VEoRaXG5RQ5gyd1xKKGIxQ4QSUtiRqZRKHJ4aB503fvyVmZjcRUyXMpdpk2TMAne3ksgERQAWojEx2XFatBxef/3VZ8+ejYNc7DbT8WCmY8rTfBjHkcIQCuxlW7pCeYmIEnEXN2dykW3O2Zby+KWHKbOqbsfN4XDYXWzgvGgxM2YhETMbhgEcTIMggnhZVNyd2IFWF+fsQYGj7hJE+YmwMPNms3Fv5Z4eg8CIncAiECZmhmK/P+S0efzKE07yp3/2z/zkT30lXV8HdCh8kZwELbg7z/NmGKfDcRzS/+zf/De/+LnP/zt/7s9dXFzI/P6jlx7fHpY0DJuN3t7ellLybvfd77zlP/FjDx48SCkVYjBlEivqrRyFHcTeDUFrJToknOCLqVOQxOgJitbTdKsZ6OcSxYRTmhurtGGjvvASOfOekVgppzU7CwBKKVXqjarYm60oLmxFV9NjFgAUp4rHNg+pH1+vsH0lLrJrbXfs8TqsglVB4+mWz1/3s63fWZ/txW/1P9c/5y0jtz54fdoXr//FKNG92r/ax3zqgfVX7p2nX8z6jl68gBdv5J8iFdB/UVUjXxQCBnnITpyM4SUYNBOTcTZVoiQEhxRQ+J/MDA4ecDanomZmAnIDLVpYhZmZWHLEQBa1xXyZj/NiR9VlcXUzx+IW/CPulfEYH1YQc++u1y/ujfb4iFt1bu+ie4/1fKRVapN4HGYW8647bO0xGVGs1kQt9tErQckVdCoX9BVohBsbc/zZ4xfreApCKbSTea4UkDq9aqzxsW6klFhImKBIQ77YJRjttsNulMPdMyF7eH1xuHtejgdhjEN69ODhNFd8h6qJpKBxil0tkBrn+wWP4+h+AJBzzjnHsjCOo05RdCqccnGLK1yWcjz6NE29TkG4pqb9hPNsU9KbPiQa7HzdG3Qa1f0J3psm3kAK1FC48WnwuriHysd9Aja3YPU7M6yJyPhsYvax1N9ez7KG74rK1dNzjKsx0OnZxTIZqyVWScU4TyCsoXYq94oLk9pFHzENmgr52TWfrwyOj/ryh7cP9QW+7w1+IlpULnwSHtZ56ivmBQMyANOM5YjlcNjvv/vOu2+8+/WX3vzaHVjx8Ief/AguBySEmCCaaDtLsgIrzikJsmGz4Yxhi7zDZgsHsAddQR7t/BWYYQZm8+Vg07Myf2DL7e1xJvNkSHBRczsIT5IXiD19ti90xPWCbQItkAKxserBLpUNzpnAcB4kMSU3KiWEszKIzBOogCIqCSJNrMQLKO0uhsLg3bgkFUlAgrNWFNG6fz7ExvgEtJXp9zt6rUxRveJBOuheSBdwgS4gwNXmxczgOs/zhiOaVmNqnSo7mJzNtHpYqJuvyGlStL3+zGTyCrioZn9scG7oO3gcZmYn+JufQKQ9o1iTAlT37l6qc4/sN1kjFBURS4bCxU8p0UQn3tK2A0V9jjdTsjoMOefgVAUgdZNlESnWvYU6lppBU8wMXbGwRqORBEkIEJfEZmU5iNtmM263IzNSYpHs7ocZd6T7w/T2b7354NF13o3Pnz8jwnYYD8e7auhwigo1uMLA5MKVIDngU8zMIUXOMqYsiQr4lVde2d/eLdM8DHmajuM4ujvNNVMqIup+cgjdvHWGuzoTmTKTKczZwO7OQmRsVjhxKBheX18T0fF4FIGA3NS8iHtV4CJhSiBsLi7cnUj+8B/5Y//qf/vPYsgwVzcRbmR3cHgpJTFHPnbcbKb97ZDyf+1nf/Zit/vf/q//N3NR4nR7+wzzwpIBbLfbYRjeeeedN9988wtf+MKvvfH1m7vi5FA1FBIij5LXwAmrFTgFrsmi2MiJuCocNBuXnZwNoPa4HaBGE+kVGhdz8MzwxaoC0N2LG9mZDRHJkHq8BOrpDHkVIzJGbASrYsivx23MHI2oyrk7FxMmSo/CQI81LgIvDoTf7gQSDm6YdUaeG8qxs5uuby1WgvX0Wbt29xaCdfPzZmuyn9YztX/ozB2953KsX/sKOHfvK0Qf8sXVp/XNNYPIvZ87uTTm93rgo+7xe7feXWY2z3Pv3lnLMAwi7K7C2AyShNx1tx0T0ZBzJhYQwEKcJGk5OrmDjEWjEIxBnOeiFAxYzMzJoaWUMulB5+M076d5UjMSRVK1EpJQzSHkVsfXHYjaDytruvb26mnWIyXXldNPOD2H9jP0URShq95tax+A2FNbirnhQk96OmhlZZG5AQByDywTcIKYAuY9+IJVsGDt2PR7sRXZ5trh6Wfrf65hLeyAmhBvt9uXn1wOUj796uOXH18f7p4OKK8+urp99n5ZdCQkkU+9+tJvvvWeG+Wc42SmEJEoYejQeveqKR8/Mc82z3P84t3d3d3dXaLL8HxUlcWL6fr6V53VxpiqnHzCmLVO5jCrKhPNRayoOIottvpR51OpnrYHboI5qCMUVkOlnTbCQ60PzRzuvOJbDqwBxx3hhWgaCISIha1na9Vx5b4iVcynnxikDCBvwmmR8LF7VX9RXI32OQAKNDZeWCrOvxeht1oVHqkdq9d7fpi3TaE3rjxP/a8PbZ8E7+Jf+rbaUD4hbsMpcQRUyRZQpIgyISWAn93e7YtdPXryyqef7J48nvTyyesvg931sJI7LqAEj5pvgiembQYbFvYNdAsP9fpLEAED6ALC2CaMTKqieynPYYft5hmKRTE4oPA74BYyledvPz08m2aldHh0uTGo05ySYRKYkgNQUKmSiQ7QABpgnMFQhhKUVQV2NFeSLMTAETwRz2YRdJUkGxIBCSiBN5KU6I44gRPCGww0U9/KPv4P+czh7yubn71D3GNZoS9CVNwXwmI6u87uM6miFCFFBddUVljYmZEZBgyZ953LV7FFVe1aa+utFiuOjDWyDDViiG4Q9mP6twD4Kli9suuMiKwl/E55BT87Mp3m60cbbN3QITfq0UQiIpJepwLSlWRc3xqZqXOgRf9WT7V+ytJ4zyFM5AN5ghI7i+g0kS8PH1x84TOf/tSnXr+63D158uThw4ckfPP87q133v7ggw/efPv5G2+8cTiUi2EgScd5LqVcXl4WQySyvSjIYCYxQestRSHhbEReFIIFVhZcX19dbLfvfPe7psWNUouXO5RrSNjCGMycgKYd0FgNiIgzZ7Cyz8VdFSZBLGilgJNZScSPHj1KKZViKdUOMTWPn3IGwZ2EeZABnJjSn/wTfwr5Yrm7TTnLJjBa1fF2QIiEhQAXglmWtByn4TL/0Z/5mW9+85t/6d/73++P0+MnL+3n+Tgt4zi6LdM0EdEv/dKv/I/++/+DL33pS//wV95w0yxcrKCSkYi7w+BqxRVASsmdO0cCzDQA5d0EISNuHqD3Z3xKpQSCcR2OWdu4tfdaNtFbxJ0hBTVqECZRnWOQiK03K9aoMpGgOaQ9Zk9GpBbFXNyzjDE1aZVwM6qAK6q2O7NDA6Ld1VnahPEXEl8fOrGp/9fA2fHl0zHe0ZWIiVCDPH3h6stB16VZ/Vy/mGhrk3Td+lXdW0TaNeNDWxjAa9/MV+nN1VpzCv/fu/31Rx/aPvRq+zu8qlSMd+4WnWwBzK0AnlNkAU2e3zFjTDkH1sJ5yDnnvOEiIu7qakxMwsTsrjF4jFydvZiqLYvOqk+P+2mZj3MpDnA2puJu5nbiRYC7C+CwMHtXJv79PDBeeIeIJBL8zZkM3Vfm7q+hS33EIGy9YidsQE+fMzGTUJWNJSJHjY80b6WR3FR+yZhTJFWLsItS+PoJ9n2r/1L/c02u29fwSAZ2adnI1dciBzUmZKar3fDyo+shLY8f7K52wmUvmF56uF3uPiBk0aMWPL68+E28F3GHlJI7DQMBbKD9fn82ts+noZmF7MeyLMfjcZM3RIOpTdMkmCOmw8w5i5y2JuoD+55DQr36zsz0NJ6pQ6/p1DNUtUSd2MPx693iq5m7zrzVzjQHoF5rRfqMEmKvZlzVUAXATY4C51xlfYyFfu86uFYXhNRZq2K+S/iEXJcjRtBqoa2iyDECK8CTzAGCMNwaTSiRwCvivbPR9FLDGHd16XC0gsN2EWehq7paOxzQtY9HK23EF9r3XcFPQFut6ufP63eAVnwMWisKoMo4GoVgBWUCKcqiKT148urVxSuPnjxcBr7Mj/nhQwwpBJ8AM5REDltghDDs1MEOTowEHyOWDw+raGvO6oOzkG+EmAUQQz7C5yl924XEOLMAE/SZ6Qfqz5eLwR8mUuaXPu8PHoBm98NC86CALbAD4Ug0EZYqoV4OYAUGcIJnFKC4aJAjGIsbFncFTZwmnxdDBmWnESTOobK4ZXbmSUQQagMdu15RqtZd6OjAf/6P7nduZ5BmW/2Pzt5vBoj7RG6Aks+OApvdFtOFaUFZnJ3YKWCSTMWhLTTp7tTX4RbKXFta9ZNzPe21d9e+t7bTPEmGl755mVVGABFBI6dYY/K6WXj6xeBT6MZqMzqoOoQraFbFqjIRUUrJoR38UwGi57bdyRtktpYaYj/r28TILEYQEZFGMUrIKatIKQtztfaEQcSshYmszIvOF2N6+dXXPv+5T3/x85+9vtoxnEiXw/vDMLz0YPuZ176cc/5gb1//xg/+/N/+hV/96tcePHopX2yfPn1KeMCMlLIWL3BXY1dmFg87EjWZaSUoLuFczETkyUsvaZlvb54R+eGw3w6jWmBwFeAgAlYtrORZwpR3CtpVg4MpEg5cmLTMi1odGFoIxhAAIvLgwaMhjzViTU7NyTQjgxvIYMJ8e3v76KVXv/zlH/zyD/2oH5a8u4IQCGYgoDkOLsQELMsixHDnYRjycPf++xePH//sz/7s3/kb/+Hf/Xt//+Hjl9JS7t56SxhlWTLZbrd755333nnnnT/0h/7Qr3/9W8/389X11c3tIc5BgWYiqkTs7kKk1Qj2KE9KVfOauxWC+vixljGo/2/bADcJ2zX2wJvQPMkJoVqnZgvfnE0bOmUz+oRhZoXbchYp90gMKtYGUyDEPPglV25iGHzkoWJuUVdtDgEhaIbNIHRvAvd/1++cXn/E9nfP4O6v+wR+8Zz9nXP/yj/0Gu75aetGq0hV+/UPcdju2Z2nZe7cw7l3/hd/7nt4g/duf/3mvY/6SXgYwexQDdlH92QEIi9Ojrt5Il9iKUspjSlv0xSJ3yHl3W4HSWykSiIDYG5e5rJE09ndnx+hBnWoUw/kGJxZOg65dXqMS+A8mtadDV5lv0+3sF4eqeFVGgFVGMx96LKDpUcZmJoZXSWBAQYxKmaDwxM8daYHyQyq40eR8+/6QBEyrKS5kaiJPcbN3Lp3hxZHrBMtYiZNax2IcgWIZGaOitNSTLWSlJKTkAvxIDwmTgLofjk+216NbIdN0rvbiSjPh8M861iZP+fwOc3AzOM4GmhZlmCPqUOxCa8HgJ9lMjMS3mw24zgmTlqqa5qZ634JENE8z/M8i4iqwgwhqwteb21RRxtuzTr56YHFYaBnU4lqNq+51lZKjd20MVwxOOgszXWQ1LGhHqWAZk1AlbhqUnnQBMFXYNHU2H1XuhfxvjTw6Wm8uXu7TKo7c/2U3F3AKzGeymjqa10sWIuuGUEYHIWI7iAS97AqPso9s5b7q64mgDX5REtUenS412NO4Q++f+bv+4GfmPaCN0hw7tWkH+tWRZ+aOACUYQSHLoHfgCTebH28fD7dTm8/LWnz8quvXeQHwFicQW7wVNN/E3QGDwDgBXoEAbQDUKvVvDqGSuy0NSQnLAAZxJlpJ7QhfEEkk4iDCQvklvCe4C7BX/ncl4HNxeYhIUc+ULHgYoYtYkf4HewWeuPLrdmxzHcMyhzUOAqPsi7At/CFJTkVFKOsMoJ8GbeDCsnAygRnGJtJMWHJIhmSgJ4WXIWzP4pW+GPTvJeF1j9p/SdCRiI+AQAlPwKAL/BCvhAWQmEyqKotDpACVoI0n2ClSR7E0tY9ww9tRHRa9ldvYmW5rW2he4dxVd0zd2dKDYwm632hV1FFMLNLXwTXJhH5CriJEKb3FomM0gh2AZNDuQFgzYuZuZZ10Uj3BolOnBVSqyhrdJGa5SEggUsVTDJ3ZmYYnNkaFkjNmZmKbi4ukJk5f+a1J1/+gS++/Og6k9689908yCZnzlnLNC0HLGPJ+eHVSz/7X/3jP/6jP/B//w/+0j/81a/OzhcXF9M0jbtdTiOTuip8gZMACSi1OhPkEBFXI3JmEsbDB1ePHz98+v67y3TIwkdXSVFS78qMCi8stRbanAIQ7IgcPYEIJhyRcmcQuUbFIlyTMJFLSqP4g8ur7WbDnMoCpkXclBCZ1BoQcHK36+trMv9XPv+F/PAhGK4ggTtUNeeWulRjEZjnlKf9ftztoKUcjhePHwN4+NJLf/rP/Dd/6dfeuLu7M9Q0glkJFfhxzD//8z//r/1r/+PXP/Xqzde+OQhIJ9hA8ADSOsdgqD/FrcwkkQhVr6zHUiIB4S0eTZA+MbD2f6yN7+7erMIY7HDmEzCsD9V12LuhxWhVBEVJmCrdzXraKGqV7mmStAm2BmF68wmBKAYPa8jZK3A0uKHshZm9dpOirZN4vkJWrWfp93CB1myN6yPX76//5RWo+6PO2fuqTsxzgB8zt1XyRc/WA662fhzrW177h+0rZ2mxe+f8qHbvcce31umO/u9C1vm548cUJB5Bq+gHOLGZKeeZE82HIOy6kGErg8q4mKmrzYu7q+oxXISyxPkPsxMJmIzYnUJayCJIfLrfMy7loEu41+TDlnhUKH7j8AiTva3FLYdD0vk2CJQYqNyPQJV7rYVvoUpPDhg7kzkRWFaJvvh1dmAtVt9y7HVanWoIe5AFNUFX2/ohxqzpc8dbxLGXHpzmcl1tArkYJdzOpIRFcBSw6Z4UKNOk83I4Lou7c845avyIOMa8iLh5ZCBPV9h6V1XzUENIKaXtdjtuN6ypLBrg1c0m7ZcpZxg88paZ5cSH6Os5WvNap9fuiI3TAz5q7sZO1gNS1a+GgH01aImIqXJ+3uvDWL6a70ciLbDoHjiIAFFwqi4vqo3arK9RUJdK90Y1jLqf0726vLM1gVpdIdDQmy7xBFe4iUa23Lu5OZBVWIK56uiQ1V9rJSiVyNQAqrw5Mbpq3T6auXhy+cKxjM7+UJ8QwFrc4vvt49/Ot6HIFzE+NOL4sWxaYxaMphAc3E0QggPTtD8ceLh69OrnhJfdJqftqxdXT5Bech8XE2JWm4gL6TTdvE/HZTNkpAQokkEydAYSLAMDCIioD8whjtEwxBzzwFoRGGMx8YWJIDQSO4sxdga72gyMnWM8FmVOzBlakGaEFgKOsAP0hvKN6J7kxvW4lAP74lpgniRm84XqYolMJ9OJkgybpK4yupOBowDBQcEDlJgTSwqFekWDtZmdM0V8nFuPTDUusLMFJ/DzvvIMJ3ioC0autRAZQQM+52pqXpYlDEUtpS/mfcDzCV9ZwaQArAXv1sUL0fy84dwAtmAz6AC6lhZ60Txrx/eKPEfbRbr1636GDEIwpa18PFYYkTP5skTBkLuKknsjKs2D5FagElgYqrw4JuQWqthAEoqyCC8aTHsAQAaquSHXYu3i4e5RqWYuIgwfh/H6YvPpT3/68cNrLcvtzdNXnjxkMrd5no+l8GBDokVoZNt869d/6Ud/4qf+V//Lf+vP/fl/7y//v/6Ty92DqTBBRDKRGC/GSrBELARlZ6ZkwiBhtgheMZj59Vdf3o756++951qY83YcyE2E3V0CKOUWR4qQMMxInBrJhBmBiNkhoRoFgxnFfDLLkghIzMMgFxcXu+3lOGzdb0mVo/KXTtFTImLC5cXWnD/44IPn3/6OXF1pGt5/9vzu7p1SypMnTy4uLt5///3j/k5EDofDT/ze33s8Ht9+67uAPXn0OLkv+33e7f74z/zM//Mv/+U33nhjv99fXV24KekCLeo+5M03vvGtr3/96z/xe3/8u2+9M+1vRiHttnZL6tEqDMzc+G09MEOUkqyG4ApJWJ1BXo9OxNrRk3urwd+tfzKvQbo2voEqzlHPsIq7n+ZMmwlr8gbgZB71IlpvHuN6Hnqlcmj3K0zaqJ3imKZ+YW0W9Tvt82rtvfTXteb2fNKuj/yoF2vzvffG2se7d7YXm5/nCdfGer/ae7ewvgx3B2riYo0Rxbo+6kMykB9+Md/jIvuL9R2tK9bWRZtETgRKhCagEToN+/0eABMlGYjIOYRBMEh2MEQgQyE5qutSlsWmaVqCLtXUzDQ2Z3cLvn5nDwbGE0diH+R1FySiaia/4PFWcq3VfdUFc11Aa+19qjfF1cWNld6JyVosw1e7FjMInjgw7CQgCsrwU/gFqDm/cNKqv0SNM8ZbVWp34dZDYu3s1RL25uDdS8v3uSkipURtp0SqPw5WVTNh5kRRya1ClhJvtrnoUchmU5BOh8mMkqRltpwzM0c1dcjTYzWSW+8GZYkDqvCIkVW+GaCUQlZ6VwRylVNQ71DOOaVkwYtmxhZIiFV4yKqD6O4wI1VQF1+pj10CLGMWrk6kBKOImaViEABQuJQIRoLTEgGAHK7W1Q5jeghWe3PEuVYjq/KS29ncT+F1r6ePneaaE0pZ2rONUdlqOjgF1hUA3E73BjJaL2J+Uqro48vdG+ClXaCtfEIATgx4SKoaKiy5nKlTEOAcHUiV+YZOp/p++8S3kzfY4IXB3/ixzhM6gBMruq1LXPR4/K1vf3uebl9/7ZVXH30RpHDD9Ai8gV8vLpLIgWkpd8f30vIch2e6/+DZdEzi292w3WWaBmxfhjN4AAQsgUZI4AkEbBNGwigUSoYFsIR94i2GK/gAL2q3wC141smSAHlHjo0nwgBDolwou0MYTIE7VfgRfmTssTyT+dZsDz2UsldYFmLboRwxgPSOeJ95xGw2HjLL0QwJcCXy4A9mbxsi3XOlUNdk+sQIUNTLbZyiBFT8AqF5gwY4fIKp20Km8OJQsgVeInUMc1hBWQyAs6uycC//A6oSVP3FkwkBb9Zmz9StD6BKU38yh04f1ThknKR5iYru6MZh7RrQfxrwbgysI7/dqok/q1HFUewX5snK1GtuKLuaM7t7ahDBeL/qmwWksL3DqyYcziHlxG6JiFiECF6hNwQWFw9Odma+2G51mVV1d3kxjuPd3R10GRJ/8MH7SSgPlFJKSQBXLfv9nZBdXT/6rTe//pN/4I/8W/+L//nN3fEv/7W/+anPf2l/XIgoEZfoBbgQEkOJhQXiRJCQsIjAgOuDBw9KmZ8//cDdzXV3sT0ej8xQtYrm9ULu7B6Cc0lI61OpUWYGwY0cQsQOcVOQwJyJYGQuSRLTMAybzWaz2RKDdGYCCdfcqjuDhLFJdPPBe69/6rPf+vWv/l/+wv/593zlp//wn/yZ337ru48eJGa+vLx8cH113B90mYnISvmFX/gFInrnu2+/8sqT995779UnL2+326/94i9eJR/G7Wazm+c5JynTcbjc6TyZ6jJNnNLf+3t/77/1Z//sL/7iL77xxhuPHj+ZtNLUenXAOMpOUkogxAMF4AXQxYE8XkWKIAZCjAs6s4bDOq2+TWquhSHEnevPJBY0F9HMoluJSFdejTWmU6xMlvhUQHAU95ySN+y1u0fKsXtWZuZmvuJmxKp+11cBfmPyYmbWHGAjImEO+mA0NOCLzkCfPn0yn0+8swTdeo3op10vBOsz3PMG11/s/fCh19MbtQzkvQuIu1t/t3/aoeDdZ7h3wf2S6uvzy/YPdxpXfQVFjZT6qjKdzBWRAKNIHlTxz6IFKyhm7+qcR/eTNoaZqbq6z+7mMMfRDMcJWCIj6O6llLksIEkpQ6qEoKQYZlA3ogR0VpG4bT2rG2w5kFOOqXp9p8dRxxWojvmVe9mSJAywgOK0xKEnUX2ziMsxmkw5MbUcU2QIa/fUgFsth7NqZxM1Hup5nvsg6ews/RhfbVoVr6EaJXmBOYkEnYjoYutYDDWh10jDppQ6NhtAKSXJjtgNWMynpcDmICN1FBEBFiI6TAvxOOSN6hKBTBYICzMb27on1wUVAOCccyZSbfGaUso0TYJEnh2+LMusUFUiEZFhkHme5nm2RtdG4kxMp3QfIjQf6xZ5ldYgOT1fnAhg9B4ewd2hRsxrT652ekpnichYM3uNbpyTiODBQNe/266rNivqFZdympKJ2c109RCpBSDsdF8xx9t1RikE7q82KTXmrkZL02+OYACHbRhqhADdN/+6cVV/3Hs8Jfq24//J4a3oyCubF7cY3onS4/vtk9liEHW3gT8qUPhxa9awnE09xeAGsE6TbHavvvap6bi/eHAFIegMYQyfinG6tDgGEV1fbLeiULMPnr//9rs672V3RXwBpfnuhkJYRthJSFg5GyeFUNqCNwzxoktZSAuZ0eY9psfAE+BKl/3++PY8v+t2vNhcAZd59wTDNZFAL0CXpFJGOKFUX4cZTJSZrgiG8SWME2MGJil7tcVhsCzzLW8Au/HpWdZDRsbmBmxpKUtOy2KcAFL4UctRg/DJT0mCXtcOGH3s56y1fDWDW2i3rdY1k+EgA7T6hH6AKrQUW8iUYO7FXUUCbRnpCWKQsKiAGGF21mBuzxC0hbfD3xQwqiUAeMEgXFt93WIMFEwzfavwoKpqqbKEZ3dq7u6R0Go7/1kWJL4efKcd1prMSkqDu+s8L8ti8I7MqS/IAE6JRQZ3d6v7vau5WzCKRrXMMAy6lNizl2WBuYhY0f1+f3l5CaCUJeccVRZpkFKl4W0QhgwRaZ4WHVIed+PDhw+HYbi9eQ7M28zbTSpWRJOLHw5HB0KXbH/7tJSynZbf+va3Xnr1C//Gv/E//Xu/9I/KMm2GIXK1AprNMnN0zbgZKyEbAuEDEYGVz37u81/8/Od+7ud+LmW5vLpYjtMyHW2ZF+zHcby+uliWZZnVgTwInLwouSdisGn45TAmCEiX2R3jkJh2YXoyC5luxs3ASCl94XOfJ8lEst1uuSQIL0UVzknYwLCBmWzJQsv++ZPHP/A//O/9dx9+5gs64Kd/+kflZFTgtddefe21V+Jh/gf//r//yiuv/Df+9J+GLhBBKe+//74R/ub/9+eur6+J/PrqskxHE3jRybQAeXchkr7+zW8eDoef/PHf+/T995ZlsWXZ7XallGG7MfPDNA3DME1TMyPhhZhZ2ENMb7rbj+NIIHMfx1FE5nkuZZnnMo5jTilk+iJwsiwFftKzXlQ9kqiAlQjwu7uTg5q7JR4SVydXCgAcQtwUVywsV2ZOIq3DuSt1WigcyllW0M2KqpmN40ghbdg8xojNiIgzsZ+Q3NSZb0LBQhUr7y4cjDiMGxVKSqmc0/cTnQjf16bYPe+urxF+buStHb+2cHzIkTiZkvezi/1UqQndeK3Xsp4+iluI53UvmNRfr9/vfxJRQIXXtnuDDp6y3+sXrtpXyXt3188QrWalbAEixBJBPYJLQBYBlHJ6CnGAEhtBzTGp2kJEy7IsS2EWAxsnrzBO9hr+dBIh5yr/E3RXIu1iGBFUrL/u0iBt1QRfVQB2o9xbGi2yXu6OxrwrqP3jUKq5JVmBSCEjoKaqqoG1Dw+SXIutQnIAEjExzcvEzMwpBn8pVorFXafkKaF5BVF+IKVozkOMWFUNcp1Sitk8TVNgLIdhYEoEgbNIHRtElHMWkXAawxs8HA697LyOWNlAivniSCxZDcWU02ClHEs5zuXZ3SGPm2nGfl4W8/1+767MqQ1RM60jR1X7uGtXq1OZGNhebPuQ/s53vvP6K1/c5K0ibqR2u5nd7I8vzebuiSXnPGy3GDaYl3meh2ETY8yD4SclGLQUgN0tlKCrPFVVBDWOwypf60mKQ0QA8hDFkRpysmDZCZYyqrnaeZ4hFSJm8xzfJUngE/2Au9ewSPCusWEFIY7pEQd3lLKvtvyUUnienRbL/LQMEpy45jOjvMQbQbmvsE/ORBBU9ohIPjuIGUnvEfQBHkZ1BWhE6k/Dh4wZCcC7wxCSjs2NrJOq+rA9efj99olskRxHyzCHF9HKj6kFsT6syv3j4T1GbApOQJFhY/MEzpuLB84DgSEb81gXoIyUKqh6HEQMIEYypv1umGc/6jRjs8Cd0yhCxHBVc4MmThvi8b33boZxN47bTPCiNE/sJqAPbr9xufmcyD7JI8kk8h7RW7sts+6zHFGOoA3GS+ASfg0bASYkIBOyg0tI5yAYFrdwdiSiXUpPpE7YhTczsCTced5i/z4uHJwglpbDcjyaFaAAC5IXPZimzfYiPHwJAiqEytpH0iV8zBq1f1uRkMG9ruKd4AJWzItZSeXgpZhZEkIKbWaFlXmahQgOuEkwdGkRorIUwsl8RVgCHpgmRMC075idjjvCr7FjRlDbV1mNHnZk5thMVN1sqbg5o9jgRHJ8Gh5HlJV1HcGGxSAAqrosS2zftipHcvdkZmYlKv6CJWSFavWuJBHNzHSZmeCd/c8leEPdYaWUsrj7kHMSlsREkBRLvxO8U6U1ZJElIUjuCRpVZcew2YybjTnNxSiJ0ODkx2kRds5wo8hGHZcyq+k0XT3gp8/vnh2W1z77/Ad+9Cf/zJ/+2b/wf/33Hz+5JGFdakllNdndtykhopdeo7zsUFjO+enTp1VjoGioYuScFbYs07IEUwWYkqqGDHq30Q0O00qnapFbA7uzWyKAKAlfXF8k4XI4zIfjf/wf/ydffePXiGgYNg4QswiBRYac3Afxh5vRp8Ojh1fXD652yR5++mUM0KKgdWlSXXHj5ZMnTzabDQjmhmIAHr/6yu97+OCn/0t/8Of+yn/0q7/ySzod0pjvnt0ty3K5u3j6/Fka8rAZ97d3P/dzP/cn//h/+Vvf+PpvfvvNiyQ5C1yFkVIW4c1mu4y5LE3AJJFQHe6qBaAY5cacWeAQELEM22CA4NRoEp04gawpqAQuoiYAmczMmqiDNkCaAzyIu3O1t6p6RPgLbgAJBwVey7dwYO4aeYZ1J9BqptJOwut1YKxfnFw17XQ2Zwn97qWs3bz+xX5w/0rmOuexkpepbmFboeLWsEIV9p+j9hNRIdnzBdT+by/upqtE4vo2X9x3X/xWv6OVV3nuSK+8zXXzUybwQ9zae/28PlW/sHuX1wXx7h3QRC8bOM0AUpxsiD5OWlU4B5sjFneK/mSmlABxMqYmrBQzi+C6xFHtrt2h5hbP534X+hl7JPspIhhfZxDxaajcA5MAqLlusuraxSgmoAI8qr6BiEj1RSuuzw2M00Nlh5GFV9aDFHTOHtS3FiZhZklZROKCI2saq1x413V7SGkYhp50JQqBxzPVyjgytrc+TtBiAYuZMdixOIo5OYpRcR/HC0q5YJrUtJRiAialFvJUN+6pe5hrhZt0r2aFIo5cn7otyxIxVFUtog4zuJktphVa2iJEcVO2LD4ViAy7nS96GpAN1i4ikNwzexSKEB4xBCN3slDb6QEaFck11xfToYF+Tz1T0SQtj8zc4ZrcfEtYldhps4gQDiGx2RmUKJLOYOqyE+vZZwDxKUBT0QfucMY659li2OSIApDVczSiGnv2lsk3V3iUazudavwqWDROGMI9aPGRllHs+cTotCg+YYetM0gEoJUk0VkG8nfOP3yvNe777XetneWUAHxsNQlb3WBbnx1wAiXAwMIyEpFHdAyM2FEaYygAx8wB5fCC/fPD3TMtR+hhVmyPA0km3MAIUgglAeAtQOCUyyGJJZrYC5Yj65LcwTQMJmxp3GB8ACyDDapsPg2Z4HewCZpw991StsAjkcc0JIIwRsIWyFE8DrjaxAwmEARIDnEMZi5GKgQihrFcgSaUGxSGAwXuLomRHKwQzaNhkhBuQuueijog+4SU+3JE4AILTw05CnegABoVmG4zmbIrxEkVXnQxIoIWuALIIsHSYh7afmEKeFvZmq3YHKi+BbgbUdQQaCnaIunOHEnFgMwpM4dmXssodpBFr22qe1mQPPRdnjlIv/rWf7I9iKpqbd8KAZhWdfs4JrW6Bed2LgCNlrTuchXL4k7NGj/1rrfkgNdfDW8ypRTx2sxUWhfEf8yUhNAYzNBy9LH5T8uy083d3d2b3z6+87Yk2CBuOl9ejINwHoSZU5LNZpM3o4hQ0mEuxmme529+85uXj179kR/+4SGLLjNUvUZ7U1jTDjocDhrKYrFdu0OL2vLNb37z2Qfvv/veO0Qk8GVZ9vu9iGAcy6KllHDEXdwUy7IMw9A29Mg/MTNLIiu1I+AkcOfIZfHTD96/vtjlxMtx+Yt/8S9e7DbHu1tmGYUksbEgy7jdDETbxA+3OZX88MHVo0dXrz6+wPvfwcuvD7x1kYrN7GG3E2Qul1K8FE4JRKUsTEibEWX58pe//Nqrr3zjq7+2f/5szHnYbt2R07jMhci2u8uvfe1r/5U/9Se+/INfeue7v7UbhJlhnrIwi7GIsHsSiXAZVeHpZrplSgAKF2fKOatqJD1aBZFFjFyLq6kQh6luMLiTVygJOZjF3Y2qkdAMLBfmMGXQIvQSJmOFS1s8gly5lVzIPQI+VElcza0CoeE90F1LxakWB6LZZz1sqV0a7tx77JVU4bHEa24ljms3rNr950wbL/pFOM/jr52l/sLP2Q7PjlzpyJ0te+fA1BfXxfUFvPi7qwvje+/0pefe8e31/TOsr//8SKDpQPblcu17f+gdMQrgqEYk14REcwi9weqbZ4gCBipd01JK4sycOCUzC2KOYg0axyTMsBK/H3VwfQ08GwwvvMOOF01RWqViTiuyefMEuh4PyClLkLh02Z4Kaa7LP4Gl1W06yMEiAgqFgujomldchSrcY8gTHMRCzCAOqQqQmMFMD/Pk7oG0DKeirufMzJxzTjmHIxE8JJmTu4PILFR2NZLqOQ1uIU3g7sbsRGTqBQaziFw4MTk7MTyByVzUyJyUuAAALdzEY5rLxxxORUxz7lDVMMSYKaVkOscSERm2i4uLPvY6tDjuK6W022Qzm6ZJVZmlbuBWp6q7u0V009Aj3/GQqivlAXI+ndZhrQoUECbu1XCnYdxLMMKRNosTSncR21pRbRUzbzTl3mIb7A6uRXf3olpk4E4asxqWKcabObVqFq+Vie5FiShEhrp7D6AvtlFLWMGdtQsCMgqiEBhkIveTw9YLCBmkiJJ45xbOuldY1MM3ncxL1ujTj0g29AM+pu7E99uqrZhFmzdY873OH/WAPw5ttd+v32OSJMxOIBaHRKkh8ezU3IDqPoJBmHW6OR7ujlKKLWZmtkRKbXY28sWxEBEoQxgYBhpEGT67H1mPyZcIAJHszHbgR8Bj4GB0TeMDPT51JFNlX0DLNB1vjiJ5vr4aUMTBRsKUE2fAHYv5nNiCBxDYAAOwIYxM7JITJUNhMDAAPN0WmUrKNs3T7EqJwAwsoEXEmRKqDuGLc7AmC3vnfQwfMleMeg3JgUBkMIWHK6jwBShks1lxKFzhBjd2c3PTxczIXKoaq3nRcPyq+dKZva2WQ1lLb/RMW4fIeWfXb5Ud66U4Gp1v6KE7fDKTjFD18CpEq3kGQLXxPqIfWrVU0/OuxyVmjjB5bLdmpkXVbRiyqzeKA2cWWDEgPD0RgVaWgoheq6qIRAVXN5KYeWEWjUQ1Zz7RxIU9XQPSOHWWqh4Oh+lw57aMAnEdMpEtQmDBsqgqNhu6vLzc7i7HcdzQYXd1ff3gpauH0/P9/M7T/ebi4Wc+/fq77z1jYmHiQYgHK1pMYT7f7s2slNnVWlcauU7TdHfzPGUZx9EIm80mEqmzFdWg1mCAYRp+e0TKiZkgRARyZiQWjSojc3djIBOF0vzLDx+qambeXF4u03Tnerm92O/32UsiUWYRTkyJkZONyR9d7UYuD7b0pc+/+s1f/4dffHCJq8F9ch6BU7FFc3Xw2c9+9vb2liTo7kDMpZT9fn+d+TjtD4e7y+3mIicr883NzbIoUUrDuN/vx+0O6n/n7/ydH/2hH/j7m+HpfjbVLCRCTFTgVhRmwtX/SSwpCTsMxI6LYePunoeUkgx5nuclLTlnpjTPs3rlt438g7urnPIkxUlVS/i4XFEHBCQKc94RpT4tKdeyRuQgYgG3pNkKx27Vmq+ShgpnYmdydzmndmge2mmSrF2dEwbyHGm5njwfOmnXp+rnObPRz0lZ1r9CHRi5SgL4KbZ0L3cX/95/M1pfWe5d2D3/9t4t3Lt+5vu1H+trvufi3uvDdb+tf/feTwSQe/2t/hPrN/v6sJQp7o9XuLlTPUM9w8lk9/ove5UZt1h5vaoFeKrVVE4h8bByUJlZKoum4eQ/s9JpbGTiF28KOBWRNQemuQRAVAm2IlamE6mJAeI1JNLKXPvICTucTuOK0ch+whwhAlFZzni/iaLkUGTlIKlDS00HzvORTslDCc6VSPfVIeTsFdZNBDG7f35acQ71U3lTegApahyQmRKxMA2QNC/LvJRi7izMEnJK2qhKAyca3sKHTC47sXXnnCcnCrwo0ziOVw8fYB7MjGUdjoHXndgZzQsioiRkhlKIpU+qJhvlBKieSiIBgEzoTKPCQ6WHqswjupy9GRoMoA5FteocraazlpOoVHB2hSVFL5gC7sF3CwQyuf72aVb2kBLO1gc0OhmuTDqohD3MHG5acIy3QdhjyY4WHgPIbIlUYTiLTpV4w9cZvBa5hsdgZiDoByNPGvC69RLKIPPwJPs76FSrFY53Ov8prbQW7/q+c/jJaN0bPD27jzJXf/cavTCeWpiTQhQrUvyno2ghuGOJYxgiYEFCERRiE4C1OHNmGkxF0g6Y4EZQVbAx8Yh0cbkbVBezPcFSMlACGYg5b9W2sAvgWotous7D4wXleDyKWvYZuF2wpzzm7SzbwssuEkeg2fhIOC76vOgNp2kpezNLPGa5Yly4bdxSHj+NcQD25kcmtf1+urvdeIGZLu5MNCYkgRqgJpGOklYX2sLoHZATy9g/p2f1T9kCQtRUq6MocvEyEQq8wBe4mi+BxeBygBqHqe9uCi+qqsY1BwZT117wcs8aOrWOeMLKJoycygqSWePjL5qF6A4hnU6lqpCTgXd+WD/D6Uf7n7FddmeVmUUCK2qdqY/6fhCpQYqtmE+R+2qgMIlQ5sBtelCSssDcc2KCqKq72jIbgTmGtVHLGRLIzKBmq/BtxLwBmNk4MLkyYbfbXl2MZLYdOAuPQyby/X4/TZPk5O43N3e3d9PzZ+9fXz8l/s40G+fdsHlj3F7P+/2Yk3nd2lV1KUsF6TpXSJIpg1ggwonSMKZ5Po7DZllmwLbb7eZi5yUMJm8SWGwKIkuJ3ZWdCZH2bHuYmhA7LG7I3SmYMeCHw2GZDrYZmJncReXp0+elzA8ym4AALUSzgSwrFjrwuEvCDy/l5Ucbn28xPwN2OhuPIwB0LucwQYDPfv7z77//PojmOZRDSUjG7QbkNzc38+EIt8SYrGRJIvnZ7eHR41c4DcsyJcEv//Iv/8Gv/MSP/diPfudv/113kpSa6j2MfMxjpYBnYqGoViIHsQ0NJ5bzkIdh5rTIknMmojklAMMwENE0TfM8m9nSMGnMbPB5nqeymJnBg5mGmRVeShVkm+cF1dyposxuZq7hZ7q7N2smLK3UaBI6fV8EOyL7ERZ5S7l7i5mde02xqNXg/RrJdEr1rA21e5P2Q//tB9+bvTjZRg3ltQLjrc/8UW3tbq1P2/Vn7rX1wrRu9xazFx3IF33O793uuYv3XvQ/IzjQgbj9Rta5nRfulxHV4fURMxBrDBGtOjbO3yBzdO6b1WMcBOMQwQ5F0HSiSwn1dq04vq7TwFUeoLUP7892tf2hU63MnKkF8ySydRIICTS0wbrPlWM8N8RIHeVRtNBk0M2ru0btnXqS7uQAAIrBVRsmU5dlKaaMquiw/rc9Got52gvWVXWZp+4d8apJa733iEhEihUCAGVykEUaUyDu6oqymFnUHEChWvO6ETAyU2MmU9QSShhRdzbaaOFTyS4ASrLb7WbD8aCOKmgUBlJg/p8/vxGRzW6bWco0gQQiLLCinCTIXQiBgwrJhMoBQERgpxh1RN7WE4QGYRsJpmdsPTVowdSjv0F27i1uFaRN6zEZKGJuuFMCIp7Vxvd5CDlQRmteUAMRdTz62XxvE+F8jHlglfuI7QfXy+klhWhA0BqDiDOvlST05EICDT1q4QQHtDtCmYa1VEugORbcR5++wFZ4du/94HuHfczN0X/ZGq9LBz/m3iAAwQIQkJy6KwgQzJy5V8m6e6FK0Y+mEAEGE5jA8A1sHPIFthfL4dbdRYTyWI7Gmw1qwYc7uPiY/RL0WK5HmfbT8X23cCoLVGFKYk4MzkA22Sa+EFzw9nKaFmaYL6Z3kpery51sGJgoPxAzKQpS0AJbxM38qNPT4/EdLdMmj2nzALyzKZfFj7dPtxeb2Quh7JgOz75bDjeSk5biDk4JkiEZXoy5MMzrtlKla1tO4hPUCO6m4PAGFQihjgNQ3IvbTG5uxb2Qh0pCC7e5x8LadkAjq0LW7u6u7h3AcjKWYk+MAKv7mbXTA6lYURt2E+hFGwyAaulRTjivwGdhwqEja1oO8MOFIddmZ4feMHMqpaSUqlHV8jABGqwbbrN8ECmaolV7vOU61vdfeSmKGlnkDMNBYgEIXKsc1Z1RFANHdUp8PQord5ssjE3avPLySy8/emC6DAnjkK1oShxhbbC8++67v/2dt+b55tOfefnq6sHt/nh4/ym5f+c73/ng6beuHz/Kw3icbSlWAtlk5sRElA3NoGev1KaJpMoYpCGrldB2TCnt55nYmUCgzAmAugklOKsqMYlwStJ39HAa3bWBgCoCkmFW5s1ms9ttpsORqFKhPHjwiG7eZ1Wj2dS0JIIupsp89+zw5FMvv/7kukxPr588eue3v/nSMObrx6WO6boPh3tDjl/7tV8josdPXqIwmmLzFMbN0ze/+U21ZZ4O5XiwZd5sduDh2e1hv99Lzu6uy/Fuvvvt73z7j/2xP/af/aNfPxwOkKRmDHMWV5dEwcrCzIOklFIUR5nxwJJyJqKU0pCHUZIOyjmVUobszMySASSWLKmUkkuZyxLgrqAqpyhPSlJKiVyxms0gVYXwrgXU47BFVdWZpQmwQN0De01OIcpXy5xWzpt7lZuPcVvtCnc3S5K9yYWds+rVdmZhA70krLf19PtQl+/FWf3in2t/6Z539719sI86zxqiuT6mn7z/u16AuifWlpn7p+0nWZ/hxfbi+/167n9E9qF3ur5Oa8gGIrJgsAq72rkmXJw90sZUeW47ZJQaWsHhTE7sDIMVco2CP6omsnMN3wS9/ulKghGuQsZj3XRuy3F1Tu5ddqf9XK/piZiJl36ARbltrKw1Y7m68VrSxk2cPEYt9cBcS8id+hwAkNJwTwfXFMUNdYG1pZTOhyScN2NNCba9CqrF3WOlJaJxHFMSAKWomY1ywquY2bIs8brLvaAFNequEczMWOCZ3GDuaq4+DBsiKaXM8+zsiwJsEG+bCJvBLRbV6hbVybXK/3uUnQewpfm6RKR2KqAniWSq92vuLrEHCZv5VCaR7Arx5lj3/8op0+6Nobe7f8S+wuq0w1buXFWPaEgbAFE02EZd9RXrtbmxITb1Ct1sebO+Bd/7rT6HTlT58X4r3jNFRNaqNlSE+BsudG0WxA+si/nq+Wvd6RmVrne615VaUlxqDAH3sLditJi35F6djzADA9pKELmSu8IBJlild1rBn1athnpe+NHvt49bO38u67rBj6U3CACYgQRwl1S2tj0YAChHthDkpkQC2xArkyiMkYDEznABjSSbcdyx59u74qzIatMC3xoWOINHZ3a6QHqA4SXIJeSORXSh2SF2JJ+8uNlepAT7qXAyDA4hpMuLR5imMu3dMaTMmwtgBAg0QPpqUGAiMBbdHw6MLDxfbCTvCF6SzaRKh6Mhh/N7WHR69oEf96obM/NENCZDgjNAi+NYtFYUfxTVUwsYfWwbBaiBDK5OC2GGz8AM7IM+FD6bqbuT1lSWu5uqheK8GXMSDhEgAQoUhuKVlb+ZE42HwNqKbQ0+6iuSmO4xrQKXp11+7ROi7Sy1DiugZN4r6rsy8L31XNRr4NLOawjXiQesDMLKYSogo9MRkVGp20ANIjqImFmb4RgHd3RTqCqF3YNcY8YAcs7ugRk/0dMFaCqzROm8t2QCM7MtOQ3CkhOzgMwTp8RiyYeUAGy3F5zk2dPnpZRS/OGTVx8+fPxg0dde/9xxLtu332N5m5jff+/9uWgxUMokyYmdLMhsusNPhEa6iuI2pCGlxLvddrMpZR6GoZTiS6WddCghMAPMzD45M+ecc86MJpsmICMY42T+Vtvr8vKSYLe3t6728OHDTR72fHc8Hh9kkZw8J5OBMyfijWAzSk6aWceBNhnbAYfprsx3g24LtcFU8ZX1Qfzqr/7q1YPrH/jBL6ecAUy6JKFpnqfnz3/hF37h5tnTR9fXvB2/+9ZvH/cH47LZ7J7v9ynncTOoWwL+7t/9u6+/+up2O07TFKVALEySluVQikhQLxIFalSEGOLuI/I4jCIC4e12myQzMyfptSiLaVksipSWZZmWIx18WRZhcoILqZEH7jgYNITFyALpySyUeuahlCLBWe9eTugyqwzyRATqm04U4oLcwg0grI1np1BEPB/MpwQhorqmt5NX2iP9zUelBgq/N4H7NO6zbu3prZ2fF90zfJjZd+/9Fz9av9lJRD/qgI+61IoqVH3x+Ht3ce/r7cI+km7n3vXX8+Ckco6PdjVPfTtkBFuGESiy77H8EsENTHD2yPFEQXf1zOoCGfNdjRyx7rsHgJMJZNajBYEGOXv2wkSRjLKGj2Gic/erz81VRrE2JmbmLCfpVwDuVmNG7UgzOyumquUOFCkRoprMYVnF2rFyVLgRPAJwNrPFSriCfnrEQkRpyCKSpe49pTmK3dMjogjYrfu/A25P0n+NsCSYPK0JKtSxhEIQciNfyD1K2syMIK5mqmYluDfDhfEWklB1r04Ri4iWU00vETXPzoRsHDK1URdvridsS2HGFmPDMKjqNE3unnMGyzIvqj4MG6BW9NenU2OkbfxD1xM8DwPwgl27XgFW0yQ4AWLdkEaTao0tAIAzmdVIFlqMTE90xOuMqPsLwSacz/f13DE7DVD3yqkVWGP7MLOgZubrNFSsp797UNH087u70zoCXZljmukfriDOawjr3Amf0KEtM0kga27hOsX4Ymuz79S+7xx+zNtZOODj3RYEXJkCh4KaAaz8I+pVXCERDAaoIAnxwKQW0Q0DbACPIMEgmYaU3ekIHCm7GYyEkEDFKBuNzhuSK2CLYWAxW5Yyz8tiJMSkQ5pyyjAqR8WQXMSMRbfIA24/KHeCNPJGYFvQiHI55wNBhDaMBAygjJQIWZ/fQSciYQywBAXMsviweQZVBTlSmRaab0nnWsaz2xKJR+2hDE7zVMsZQkGHW4bwhCb4pDRydyyMCVgcB/LZcQQKYXEUh6Hyh5lgqMsmYK6opd0SMa+zpdUNq9WnLpItYNz3yt7WGKjuAa6tvr7d9K2tL7w1Ykg9DVgDiN2uqMbOR7eVfXJmZ6a+ZcJO/O84FZefWQP9x7oNx8yBBXWt3u0aPhQ2+okIvrEsiEjOOafs7sVL9E64jm6LMAi2HI93N8QwpoEJQ2JVXaZJ1bMN8zyXYsx0nIs6FtOUBy72uc99bndx9Yt//5fnpagBJCklTlkdi+pcNEtmIvLUGB2cCUQuMhD5NE3ulkT2+7u4r2JSSlFd3D1J8KRyd99DsbPdnLaaT+4Pu4IQiUyXy8tLIhKqal3bzW5ZloE1jRnD6DlzSonKRR4utvTKVT7c3R7unn3qtd+frq4evPoZDPzmN3798RdfCWr7gC5FSsQJr776quTEzA63MCuFNpvN/+0v/aWvf+Nrjx8/1rvn77/3NjkePr5+7+k+pXR9fb0/TLc3d2QTj/zGG28s03Qodnd3x8yU8rjZypCmiRHls413ATChmlVIlMZxjFvejZvLy8s0jCmlzWaTUiqm+/3+eJzDIdwfj8dpL8yH47FPj4jALaWgKTXH3TmziCxzEeKWSzkNPyKKkWdmvUCRiFIaetLcV9QUc1nW9hwACkbW1fRbfURCbG5lxZkZv1saB2OP6987w3om94nXz9yPefHr6/fvHY+P3kq7B3Xv+BfPHP9+D0fxXnN3NPq0k1H4EUnL/n5NR5wXSQIIB+PFe2E5xavoBW+wm/WnXo2tyN2JPci6wAgEsldqWYWRWwgBgTyYWAlGLDFRGxtoZ910Yg8zVq1SfbiZ9oHUwe09kGQdvPkhmdhwCNG8a7RYA1qA7N7d9Xs3s8gN9o/unQfVlTjzNt3hru1c7eQQAIooVPB5nlOtD5TIhIdrp0HRqRoUnW3tEhHEmsyc3MnMmRMzl3LsKMQuUNR3BzqvJ0QQL8BAHoV3wT1PfioqFhFOyVwL3M3DqwRO2BtaRVJjQ6GGjTEzzjyOSYv0n+5DKIKpa6BOp+IchmEcR3efDgci3l1egVPIdHRJwwjySKcM7SnKup6rk60eQZv7FkB+EhG033V3dldTIpKV40pEi9cq57WhEPFUnELLpxAve+3T9bABAKrAY5xDtU/jduVOExEM5tpHVxxsZnkY65ExkNt5SimBhyMiUJQsOEJ3/mQFtddVob57bhX/v7aX4v0VOpQ8cjIUkBens0zghzFYfNKwav+ytoiAfCLSg8BJlLy2ytWEMIG0biUwr4EeQQEymMRxki1AGuEOZmwkD5jLBJpTFkCC3Y5YzMmcCzhDgAywyJXLvvid2SzMLJS3M/gCR9HZWYh4dB8lAUebnpW7Z8uwwzAIO8ADZHvMx4TBMQhEIOAMIxCn9IRgjEHI69wcNsSM5+9gOYIkpY17ybwQg4qSi7AgD5YFOYNIhixp6KzIHsZtkE1GRh8Gl4/9pDSCGbQ9qoV8gS8EdSruxVHIrbIeAqUYGjE+LBmKaoG5qQLGDaLZjKUGM16ZggCs6fTew231xFvfvNZLMV44lZmlKibhod/WZAm16QyfGRVrA6y5i/Wnu42BFXaDiJKIEi1aFnHfDDzPRYvlnEFwU1ESgRQbJDOneT5qEoUn4U3ODh03QxpEl8nJmcEsZlCPcKoxSYEj5WNRIpJxw+5sxczSMBhKKcVRQG7e1Ip5c3dXCA4/jnn7+OGDzSjuOqub2eyUORmnOeUjyzQV12U67Ic8Ho5Hg9wdp+f7abi8mvfzcpw5JZIUxJzDMACoqlYEN2cHVawZAMiQRJg5qS7jmA/HGy/BA4CcM5HAlIiYrCzzw6ttKUV1KtMx55wT1Yi1jzBl1OdpbmEhpJSs6JgHW4rNQbtCA8uRl1Eks2ZKmTi5ZxPB7jClx48+e8BWxzFdOR4dfX7nc5/lt5//wm57dbV9uRgLXRJt4ewLHl2/9JlPv07HhcZM+7vddoOl/Kd/5a9+/f/911/W6enTp3d3twziIT999qx4GYSm4wFWUhZC2qvp5uFX37k7Lk9TSomHBL87HoZlScxWls24US2X2+2yTOVwePDkkYgfbu8e5ke7fKmJd48ebR5dbx88+MIPfOn1T396e3Gx22yY+ebm+Xfffeett9769m//5vzuO5fPbzRxPg46TwPL4U7fefb+1cOrI/lEtMBIwDkl4ek467yMJK5GUJYo9oSqOiEYTWE2SkoDHc2hllKafRmSiAxCJxScV6lQKDySPMWMYCQQTQAoUbeJIS4i1XaM1H/j4qs2WS8QbdJnIVFdK3kIoJMdtkE2apAANNELYqumXkMInFIcdXIC1ejnTsNQcQcNy+IAIJZq1KY5IxBm5mVZUNXDQsWxpsjItZp7IfIRX21gNndfmtFMzGASLVEhYSAwAW4gwItbkJoQQKaVGcWwUFWQW7t59fx1DfJ16F9bUgTrd4m0AdXuudm8OJFV7TRyYrir13oAAJXbE82FKF5qbg5MzKW7uHBwiOywM4Pq/M2cokaEwFwzOx7449oN7tkDfg64BpJ0nSL2UF2rxWghGG6ovpOb18AEeSTQGB5MZxCRlNggMVTcDURlWVJKRCmkBd29bSUBdnCmBIS9RcwylzlGaYo8PthhxVWGgXKCCJhrZbUpTE2LuwdVGsGEKBFY4MJZEjOZLzAK/Vy3EkyZ3SVrekoUYg8559vbW2Z2WMqScy426nJU+N1UJtXtmI42T3rcJFl0nhfNaTPNAt+wc6ZM9G7ObIqyzACpUyllmcuihTmYj2kpJfa/PGYrZZp0HBMlurm5YWYtVgX7zEyZPLFl10LGm3R5XA6UriTtbve6uRiHUOmY1FARMSyNp0XN3E20gkdj9wy6MHMJ2vFzpAwI6DV7kQ+s6AYi5oFTgFLcm5II2BBwpBPQIFK1Q9SKA0SU0irZ2HZ6a9qb8RPL4ZBSovZwuzHBjTMWRMTEkeKzCqkQPjnwNfaxzPETFSnXnHKuQq160rNiZmijHq2Jzs7e12knrFajSEwfxhIQ6OrXOhMJgUFQEIHh6gCRAAFhXRuYhFp8yH7mZvSE5D+Ok/hCaeI/WfuXLQPZk8jVVm1ePnlLIHuvwV5BoWMQAQgYoXtIejIRwbiTVxsKzuDEkZML9NPvQlc7Hq//5NNFMLAxH+LyQOTsRi75GYEJmT07RAmaYGnJePb+s28+fjQB/uD61duZDsOrt4u8PN0Ad2R35GXgwWhIvADiuJoBw7VgN263R/0NzE9HEfBPLst7BV/LFyXJZ1W3ZJdmRr7M43bebIyWQfOYJoz/P9Db1/YV0Ai6AB4BLwMPwVsgby4+c3ec5vIB0fNd1DFrxpJhX5j9rugiRqqHucykumECsSXez8fd9SO4YVloOow2L+OCQaCZZcgAoERHJMBGICmhAEARgDzB+J9ciuKfdHou8XQcABKDVmZERKYUpKjk5AYsIOUqL1FgR+jkWkhAM8pi7MyykQDalILlFlT10FyV3ROzsB+WAwCI5CSAlDLHnl05HQOIZF5JeMLuqcHBiAp6KbYsSrwwp8YNabGQMjPcamL6PItIkGrYoPT9d13530N4ESPm0yQS98gWgYkiEO/moJC+KMSeMlJU+jXyMGJmReX8qIX8ZuqRMMT6ytDARe0d6nHxfgwzCYg5R0llRKLDfgrIV9jTQRkX7/uxXFxcwG2a7j549nTcJOatlhmwNGTOiYSXZbm7O6jq9dWlk8ylkCTitByW5zeH589vI/bcHfTqHEfMFf3BhFffI5GnJG8lazfvKozxTK3HAxabaalhb2IzQ2EWSimxVrkFrzLTXf7Sa1GlCMUyWtTd4aOXrAgzTJzcjQKHOM/zs2fP3n3vu08uMfoDygQqD7OVw9Onz54/ePipMh/3d7rMdPP0+b/7b//v/if/+r/+ld//EwCTAM/e+fm/9bf+H3/xL26nY7C5MLMqh93X4gr1Sbm5u7p6iepnEldTVyJ2rvz7IhKYTxGhqq8teTNutjtjypvxpdde+bGf+sqXf/xHX/nUpxD0DLHvi/wwY3r+/I2vffVr3/j61//+f3Z7e5uTcNkebp5vt9uXXnrp9nBLAYHzk+kfKWviXoBzv3ivjxnUyl0ACJBb98f6azWjqubFDiCUQOGdMAOAqjo0GP8j99iRk3yeIwqLvVog5JXHz10J644lomVeAGhLHngrieS4zmaNNfUXBOUqEaFJKfYxDKJEtaKsX3MvYauWGxGpNVR03aClD2kiorQ63sjcUHMacDRhnjB0qRYBeUh5uJvDGezdeK0iR0QwJ9yDT55lAsMR5IA8/mO0e5nSdZei5X/QuH9oJXx/D71Se7ulE/pDXLN7cauoxmo9vfeL60zvegHsi8np04YZ6LdwWlfad6gjDValAraqfe3yhpvN5uQDtHwOs/SFN75Yi6uB4iXy80HspC3ikHMNTCzLUlrqDEBOEt0oiYRylpSlXnzIjaqfkCruntrGQy0acnKEW2Yslsp+2X6aKSFYcOpkMyMyVYCqu1tKEZEIVMQQrhmzFR1FbIF9kJiX6hu3rTFCKabWKy64wcvXOo0dvwFiRPE9hdxfi5l0Y/cjcvX9rlefnobOahRxL7rzhomInwiEyfq00YduQWlzSvfdW1j6kmitLsXM+PywtQ9575qrAcGnCRX/Ugt1hZzG+lu9G+tjBQym5iEKUjnoAIB8NQvbeQwQrwwdRJVbB0S1ED4eQ3U0wCe6r5CnX1HIOBi9vJlslXrqCcnv3fg/t0/4/fZCo49a1T+qqOx+gvfeKP3kNF5FQHpznY7zfMT+gDQj5R3nIDJ2FnIBJzAxDU7i7qTFJQxRJggwJNmSHJEYeAo/AiMB7sX86JgdC48ybDEuBpvNDUYAw7CUI5MSJ+YJdARHSaTyQBvjQ+zXrsQZzCGgE9FS5sqNFTcQFXL3dnCs/gBOSfx1o9+F3D2jYVfvXwfQ/EAHzOFki7uDCkHhjXiPLKpQmBlqsAUaKr9GHeMnkppWRBCvdBhUX2aZOZjMTmvseSkgM7uf1RPmdGZIxDkbvvRUjd8PQFuK+8HVjfmwPQJn0NBTGNcbgO7edz2E6d2qEQM62x7Oln6zVewnDjPTAOQoEYmEYOPZNUWmpVvtARPqic4oDGOu4kxhSRwOc9i+drC7u7vnz4fMJIlSEnefpsWcSHIxVXXOiVMqimRMkgqV/XQsqpvd5bHcOKGYiZtAIkhlZglrIG/l0WEmM4v6s7DF6z9EcGdOEQ82hAtHcS8ppSSVJB2AcErCsft5jyKH6UekVqxS9iUK38OcmZNdQ1md4CzOxLY4HSe+GOkwTdN0KMe7crgZ338LsmCTRr4ex/x/+nf/nTe/9dbnP/vDt7flO99556XHr9DtO3/hz//bf+P1137ix3/s6uri1375V772ta/tYLd3h2mazUxEnGDLEmrxS7CGihBxYYPCXclA0q+/jpssmXLNAEzTtN1uCZjnebPZPLy6unz46DBPn/nSl376j/6R3/NTPymPHjWuG4YDy9GL0jiOD69//A/+/t/z4z/2xqdf+//8zf/0N77+zXI46DLe3TwvZheXl4d5EjdxoSZFTOFw1AyKc1QDmkdxoFX9PaYq+IHCDjKpNVrhNMU0gDe+WWdpHkKUiJvkENQAyFgBCkVJFpEwst1dwNz8xgiUVO/LEbzq3dyEWUFzzOo0WU0H5nsCYqflq2PtTt89y4xBLWrcTqZbTeu1tcZdgzuJiDw0PIiYKYL94WIEQvo0wcFcy8NjMljE6s/WVXa4EeDs5uBAi1GgQ7xGEyLjduaBv3iDVDkSTxfwPdqHmt1YLY5hvFrrt+osOfx8D1svaNZw9u1+T1Z1PSGRr0JaL95LHBZhnZYx5nsXGf7LeiGOh4LmhNQRSCwisXf2p6ntOr3lZ3LUttXbPfVMl48HllBGrcELqaB9XqH6ezxusfatc4ozqVlO5ppJpnmeE0sMpYYjJaqP270hG8Pp6NtY7QQGtOYP3cS9+rdnFlNvZmRk7A6nc4gLAKcqQ2qLUStt7UPHzCTIcniuKa9ELFWaKR4s6jivzz08xrPHSgRyK5H8r25Kd+f68Lg3OD8kYBHPekU8s36fVicUIKYhnKR/sR0uTZDpXuyk3ks7Sx/2sVZ37i30YU8natPWX+4tYlWvv/LXnoyJ845pM47Q17TeQ6jfUqJEiFA0U4tQxdIXg5Y9iGRPwIZmMkZqsfKU9td0eo0onfXQD6LWAXUZYYBAhhUe9R8r2nQ2Er/vHP5jtY/I1v2z6b0P3TI+Ia2xpzqY6pAlWEThD/v9Nh+xG3nYJmIhDv1VpgHkRAM5wwvswDIJNg4Ao+CS8gOQIwN428mJBmYmFKej+5G4QGjYaCnzPN8tnjaayBJ8KNOeeGAWSYPwDr4BBwnOIglpgWoxMUkCCMwROKnuFBKhKei0Jmj8bZUjiu89LK6m/+/OM2Ss5OXP2LVqetAA85CXgFWAg2kLYTTxOW3AByJVJSt1wauJjZNUbEUS5bwuSTjFZ51jj40tnlY19lh5j9FW1QxnsA4AKaVzBoMT9SZWwb5m2JwVKKLtUJGsonuBvFVhYJhu659Osd0TkRkcDUO2Om8kQPplmZ449ztXW7ULqMEknToNPFaOZSh7mJl5IRp7X7RjDKBxHJVM4ON2U+bp6bNn7np9fbnlLRHUjQxEXkpRxzAMxnl2Xw7HYnZ7c3y+P0yqJELMRrAKoFEK9hFTklTjlBGkp5gJpMXcyMyYmMIKhlAH6SHo1NQNQtBTgDkMInJUgjsrs1rxCDhYEG1HQUjIJcCF4VAtdZ7Sxo2tFFNiT85GpHuUy824TNPxOE+H4+3T9xR34/UwfvoJvv3rGHZX+vy7/+gX3/7q1x4+fH2Z8dVvvCFDfufZB++8+Wu/9Lf+BsheeeWVLMPTDz6Y5zTP81xKJYD1kNNsbCjsILCzMFlo7Bh1Re1EnFLKKXNTS4sMQ2JmBYDNZncwe/j6az/103/gx/7A78f1Jcj3h+Nms1Gd8zCAN2S2TEdnSkPOm82P/eE/cvXg0d/4q3/ljV/65c3lxTQdlttnF1cPF1OFmxawEBhqHmjMmN9daJCcGOyIghy4qZXqkeBkNXdhsDYBoKrmzmAi4qqx5mZG7MGxEY+vGtMiOedq0QY0lDgIkDh0wszhBgqiWW5oGphZl4GuvoHE1AAAJwiRwgGY6nqWWbNZ+7/MTUu6GuAVhw07fQsA2qJzWgzQ6YI7Q2PtEwAa9MfNCux6BuEInWDmfW0SABCIkjORn2T32EDiUZFH4eg4kDopy4piK/qt24G+ynvwR5gCfB5aO7ukOKDnSM895N7q4mPkNcvjRHB3EQYIRlZt1FPmjahzy7Ug6KqfrddxMYX2nKrLC26nnwAIH1ZyGQMIwbkcfKHoKd9+b9XaJmh4aJzgTkyqaqbupSzayu1ah+WUUspZIgmmK0ghgHUNbXWKqksQTCeQIFFQXVQBK/NirRQcq2r1MwcD1b3vviUxWKof4jWLG0PjVEUJRB1HzKgqfi7Eaii21HQgamRW27d4XdDedkSzQFPUWR7I7To7gpIqtbusU8a7A78eUt1Ld3ec3SBITnME6zCBr76OGnZCy1TTSkmC2vNdT9soknP3jhEIDiynFplt87GPin62PkX7Q6mjd93DL04rr55YsyFOt9nZld2DULo5sd5xglAEm4IH1Lx3SABfEZ1UMYQwuJC0uzagojKcSCNiBa5kfDC33ldmVew+vtjKEQFUxUJE+RNRV7GP3uveJvwjXMKPplj4lw0C+k/T+AUXuinheEC92jrZ84HefPWz7qXGv+w1xxsD+EPW7U9OS2iBmvAJDQDMYSKyzDr6wjkjMTtGYagAyR1kITYI+AI/wPdCA8COTNgxP0BysDp+BXyVeCAeAWc/aDpGUBc0yzCLH8qUlvlq2AzApkx7x0RchkE4DyQJcDBgz9mPRAtshjIEcPJiFePJTFUtNpZaRujLhcg2JB4lQTgYZRrL6Gm6/TN7aP9089FRBU56M4cSKVAcxStXENjVXQ1GXmBKrm5KrnCFKXlU8UTZtsEAEZRSzgrsSUS6gDJW7pK3wvW189YCx6dESz+Amd2LqnXe774NVTOj4TL6d/sG1PfcmEp9z8XKDsFqYe/X+eI1r22nVClh3Esp6/7s9nFKIq0GoZ+6/0yvKKgbTPMJAal0S8MgIoAReeWso1ov4Y34gRsrZyllt7tgJiZPhd11mo9Pn90cpuN2u72+viaRuSx3NzfPbm7B2GwvZNi6+/E439zs7/ZHdZCkYjAK8V4vbmxGwojf4+7othBB+KRVJpLdFiIPoJTaUoKsxJnZtRg7jNkMQmKKghId4wqLIrSlGgS2TrLlHBH60AEjRyklEQMotoBpMWVmcjJ2J09ON2kSn9797tN/9EtvPH7n8pVPP3jy2qORHPuE4/HHv/T59771nZde+sznPv9DX//Gt//qX/8bj588ebwbr66uSplvbm4Oz57u3Y/749Eu53lWVclB08eqczF1qhw/DkS5mjCZu9XBERJmVN0PNXXfDCMLYG4weL2jT33pC7/v9//Br/zhn8blpZZFxs14ceFgIvawZwR5u3Og6HzQckH0+R/9kT+2LLc3z3/rG9+4eviAyJ/ePA9aL3Z2com0LBunvASbU/NsEvFCZ3a2r6B0QHdmm4q9UtFipswMs3iQ8dGiJW5hnTGglu+LHMv6/UiYgMxB5TwNJUFgc76TccgKcK0VdK/equJsBtZ51D20YsQclW7esxPu4zAE3Ui35OJOS+g4e3j4HKMLQM45S6V3WoP0ClPou0Qop8e3srRMYXNm6krBmQCHJRKFm5s7ed0oiEh64KmZ2qUvNGHO1owBznrsRU/vQ9vasK591UI5/en3FY39ZCK/6KTdO3N/uLYilhQRO1/fXvzu2qzvP92v0N2DlcTP29pwD+FQM+NTnO2MBUQ61pQAwAzuJwLPGk7UtaqsUBN6ZSERdtSytOq8uG23G1XNpRTv2Nd28Y2NNUJ8sBLjp585LieGDYOY0WsmUSGsbGYsndKTewItsuj9dav9WuWpKImzM2soUsSYtDj5SWUppVQZz3vsp25pTm1BKKW0gV0joxW/arYsk2oBUq+C6y1CMyRM51yILz730zgk0qbzuX5wACTl05999YCEmEff4KlO7VZJ14MaIX+24t29dz0scrYIoEVtoxWNdSaMlQoMWTuiLUm4vmxCdURjrMSl1JtVj5CcBO1SWzLQl0qQrnKM4XcxhXQEtZBB9RNW4E4jZyf1ziADAZxDjAVBvRvzSOs3apcCzu6lkegohVYIqFcRA3jRrGwC92v9w++337l9pHneoNRE5HVJ+V5LOnGEJFqCN5R74KB7HuMn7Ok4MqqQaoVXMZxh5Ehp8AVlKlKeC2+Qhk3ObmIqbs6FweyMEJeH34C2gq27gLbEVwYlnxd6Zr5N2AKXgJtPTjNogPNSCovKiGWCWgYuQbMvz5diThOU2HPOAhRAfH6mdgufW9yIQ0mDTB1WWRHImJnEGOj4GZCAWjY+pBZXmhM9XvfPv+ejGdjhDGrkP+4OoghOrb1BJVfAag2/FscCL+TqNsMWIVhR14UcHOczVTMBW8t79cFpZliWU/BxZQCYoVf7m1WWOHTnsO3jfoLY8Mleavvm2eJ8bmrSOTilYZHODu6HMUskpVZbhq8PQ+uyfnepb+f9OtZGcN29/CxH6ev9YA3BaqFZcg4km7uzECp9u9fgOBiACJvV0kQi6Ui8ZZl3ux0LpjKT8Ga7Lbo8u7m73e9vDnuRbPDjvBR14XRzc5Oe7lJK81xuD8dlURmGwdmXomYKjxLTis5e9eP/n71//bluy/LDoN8YY8619t7P895PnVNd9+qqvsfta5wmacU4tiySD4DvMSJgQchXEH8ASHxCEAuCRJABiRATQSRCwInAsR2DbMlgY7DbTndXV1VX1/VUndt73stz2WvNOcbgw5hz7rWf9z1V3Z3ypUkvHb1nP/uy1lxzzTmuv/EbTQ0FcYBVd59y1Iy5uTkqO9zVvKq2OU0Qr2okRA5zSgR3V2pVduh53/OeBMPcHEkDVYU5O1SEiI71QxJ2I0s5mZuTEyaj5y+Pu2T0wdUv/4MvP/ne4ebDt9755reX5frR/Sem/O23P7h58fJ3/85P//gXv1iK/9gXvnBcy8feevPD5x++eP/l4f4Dd7x48UJ7oBlsZiBxERHkGkBquDvUihvcjSgo/IWH1jRXVaJVjURkmlNKqSxr0PRDoeo/98//87/jd/8u3LsHgWtaXZmywYi5mHZmBWJilmmWab29mnbpi//Uz/xzT9//D9//4P3vXd17+ODZy+fNyG+oydYBhtmpqQ0LgpEWxMIpnwaAzKNuFwCE4BSOUGI26dPOGuvNrKZoKesjotEsjwA/x59MzhKL2UFjPByNvJqzF8KyG2QR6hyKsfsYDICrVfdhIA4vIn44MoEAgiuCQdyl7kk6dLjeODkRcYpGfCEaFM4RSc+CzJSEU+xQ97ivG1W4ApaEiJgoEeDuu93OuxFpZmhocF8oKgNJ4ewNdWEAIoTRpEmrum6ixqGbdt7t7lqRcjfoAfRJe+1xx84+ycRXpGR8WUBbLp/T9zfvnCYNxMFp3HOVzJxZmLlsSua2I7lznnG2JGmIwWYt44yu+c5BREYBNu62N5OASilGCJBJMEwGYgNg1VpKaYxHm/OwkHAKzy0eVq3Vq+WcT5W0RPH+/uIQ4xTV6mYbtArUiJ3EGORQJiQRiZreCP175NaIKCCBZ+13RYS4NWwYmBEzi1gTEwQEGPlY0MyI2vKAnALwxFC0oQKBUSaAY6OISNEa/rOZ6QbistlGcTvVrJqxqqB3RFArI186vhZ62k/pMo8Grnf05Z3FcGd1bT999Zvb5CGjNYtvUqsNGac69e7/qyq0N2Pgu2ceO9S7b8nMPvLbsaR9sB268yvj78UfW0XPRBQtoLQp7hPy0+BMvWiihZDGXrKopo4y4wCUMTPEYITg9/Uo2wvRELicdruRsycGNz84vtMAFYGkaJlJ69PZw96OAIw70Cgfoy/La5va+6B0t36V/sndZ/bbxw8+YsJPfxJ1SB42mdvu/rV3fgC7yGul5W+Bo8cyACN3bs0ovH/EtdjLly+f7O7j3iVSIjCWZE7waMFbXW9IXwA7TJfwDEvgDLpQX9Ss6JE1Qx6AL9yuq966F8OO6aA1yyxz3pdbcRxAF2Dd52vTWuqtSyI/gCa4wXOtLxzX5IWFhBmBC8mTLVfODvbGuM7OScQl3JMeg+PhE9ImzgXgB7iC//B3V0hYi8gjGYEGQJRQHRWoDHUvDgWc2VpYqzkHUYNAVqtrsapoLgzCORgYsU4P5qpaa6HuEEV2YeTrOik1ubtqJSJAQhUSO3v4e0qEAHBF0HU4DuhKTVXhp8TgsFWGVhqaK27mTJifUGaienI4xxe2qm2rtZg5db2SmBVO4Nb0sKcv3d2bX9BDQc2oarT/iZkJbq6NdZAIgTEgcwQxZ+RVhhobGUUToWC/cSjIiH2phctx8uTulCSxoDYGyNvnL2rRYsqccs6U8vsfPnvn+fXFxUXOObAokwFAUdfTnbKZGTxozkavrfAHgqODNn4zgESJBUQCz1JRa2UIM6c0RRC8emTVrduRkRJUMxPioo3DbXC3DM1N7lYVTJmll/W/FJk8COdIwgZyZCd3zwTUBeXKPnz7+YffLR8+e8+mbzx/eUuYbhb+D/6D/9ubf+eXKe1/8Utf+dTnPv/h1e03v/e+uqWL3YsXL65XJxKtKknIudbVjVKinLNXY3d1N1O4myl5Y04MkzQyO23FgxDoLuJg0xTJzDzP8/379+eLA+33utzyfi8pLaXkDAUlkLBwd2uq1uoVRLv9YV2O0zT9zt/7+77ypS+9+8531lr2F4dSFnFShKqJthPGITh7poQ4NtVYvhR5exCrepQtxY+jH2GsxjwNvpC1arRBq+YNfkkcucmzOP3Yb8PUjiVKTF7DIRpODQlIcIJSIyxZalCYTptkbdhMAz82thLZ4IRo3USwxXSZA1jrEpq1+SHMUR/Y/Whyd2GC1sjLkikIAkkt7u4AyDxDGZq4d9GU1mhunufIFI37jYu/VDOzWmsxVVWjkJdwh7PDLfRaUyfGLuoIMivqO9/diWzDUbERXh8l6LfoiCF8x3HnT9nw2Yxzxh4HC20qoIg4qKC6yeJBfNWWfDB7DXHZkPCn+Bv31/FfwIu3mWRmDr80SGvQRfYYktlJLgdC190rPPrTxZsx/zUmHFZrHW3iRxBx/EsMh42ULzFG80BsNEf0bmmumnp4BUTkVauuZCSUiMFMKQjGO6mSuVPAO5lTSlZLKMWmOCTQ8uPuTLUQpUjFAxAWNeLAjwHUSjxJJAeRkqqaM7sTN7ezp6BOEy8S9NwS+3FQdaORSNWxW8ech1oJx497JyTiUzHniN22BdYWlbqdtxjhE9TzpCN6AfyrS3GYyrZFcLpzSoJeTCKCACx493I6z1C8Vm81J1tvrvmT1no5jpxeG1NQpo1dw6c+qjjfaBRB6fNCICLqRcA9F+otZBgCqiFrNvYEtzDT2BxtIgnw3pj+5J4RtcaORD3px82cMyeWDlwlghACrdUntF0kAluGxmtqADefkYgaeQ11x2Nrp8ba6w5rgy/99vHrP15j9Ls7Ni0oOxR0QEb7JN9FjZ4AwNv3v48i+Cf8CK4kIPxfC7cEQMoTyyxphzrfXn9Q15II8IoM9uSFycndHMXsRuqHcIFcgibSPSiB2CHVqRbJfkH0CAoQm63gAs/ge7UeZD6kHXJmrzN8Bqe8m/J6rLqYZfg1bAYMTuzHakegCsPdqSickGcuV6A2FuOGVCQwTupXQNLowJ2dI2FITtDwqf4JeHSnGJIrkQGVgke0tRYsHp1CXDusQIW6iyBwoLYSdGZH8EsD0XX15GJhE48b8j/stFYdxiycO0wSKWVrRfuhaCAiZpWoEdKAInHX/Le7bmHX8sOEGDmt0JubX3kneDsLVo6Cw+1v4xhmVQfjdMPSLOAhxMxbntNBY2pmjZHDTmnTUAqBHWI+FVREgNA9ggkR0NNRghgWWFg1ta7hTDq0VvWGHeWcctTGzDmxy/F4o6opT6UqGJS43tZyXO49nOc8H2/Xl1e3xPJwPuwPMzPDudYqOZ3408IJtKbbamkJGQBM3slQ25GYiTgLp8TB/3N7rGWtFIwgGlgpEaq11sw5uN2Zo/QhAZ5zPh6Px3UBMKXkPTdIRFNKAOpaAMypxfVTLpIJnpxMpEAZEIMeDgcvt/M8P354+eh+3onfO9z7kQePv3Hz4YOH+Rf+/peNdtN0WFZbbm7miwcfPL++qa48HS4vnt+sT6/Xw/7BixcvUll3vHNCVbdanHOsjJRScGWG54dw/AD1ZhmAIKDMklISZiIqpWippZTIdO93hzeefOz5y5fICdDb43E+XMYyYAcTXF1bHhxJUkIyoEKNCI784ME/9Tt+x1e/9Ivf+eY33DVlNrBC2F3VjToSbGMSRaZaAN+U/CGicxH+D9vTHB5GkzFLSolIiWaFE9WWJ3EndmKPokTiKI0xtIbm1ALtW5BAi5e3AHzrA4m4PtydrMUXRipg/DsCPDHPSiekIvf+5zBXYJ87iQiacTnC+dT5i8O6jRtZvI5P3SoxE8L5pAwkeCbncGDMnezenFTJow0Mu3BL5aW6xCCjcpylbQuhqda6rliKVbRkABGBhSDuVN20tkIuANWgYT26M1Mk5x3mDobYppjq+x+vscI7rCLGOZ4Ib/ALpx/2LMKW3TEenzeW2caAkogjN9IYKafTSXAuYYcMHaPiDXAfw3rmAXEkO6vebg4hUbPlt/K99y9pXmjRuq5rrZWsc8OIDEKUoYq0EXs1h5CIZknMSIkD4ekEkEmiaU6qCrKqplY2PlUsdQMSEdiJBSnxw4f3SynH47quazxAsAGt3YtaBYGFB9w6pSRCIhScYegkYQFObzO4cYYj/OxGQa/lKXAW1M5fHb2M8O4G7A+iOZwig7eNT2w6yszaZb6I5Bwg8IY9H3odAFFiZs7pFAqy03rbqLa7Sek7/8bRulaEeXziHKZx+77NirtTElJtIlcaNpc0uuOO0iwg/Cf3EY0SkIyfAI0lNXKOTde6twTbXbpRosjpnIZtnfYALDTAPv3bAKAG4TAPt2n9vop4e6r+KxoOACEoqygicUEI1T46CXnv00YD2EkbzxDNV+dgXyIyRIQaguYNjlHbZkj9BbVEYkeC/Hb14K/nuDtL3feLwwGLOmB/DcHM9qvWCOLidfiErzv/b60jAsPkLeiAZuVSni8k75IeKF2XaKkGglXMREZsQk7mcDW1G6rGLpgewfdwAu0i/G3mVA+EC+ILOEAVvIAKkQB71wP5JSAkWtbgSa6g5GalLJBUyw3T5FQdTFwc1ax6JAOrCQMpIwlBnbgl4ZlsbOoNsTN6Aw5q+3o7A4NM5x/PwY3NI/zA5u8BFYguGOEcqrtGA1yHupq7cuRyvbpVdogwKEE1OBaIwFkiejaUhfUyjaanOhxGtbb302RG5s3OZKZRyCCpuyQNSWRQwIm5BXDvOH4n+zYMsw0GZET6hurZOqg93OlAZA3aqba/HbbHsK+aBxSGfs45pSlaFKPXQbo7weZ5zjnP0xTWT3DQqxYEeiegOO7TlMhhhrh59ahpoe5tISVhnmuttUZL4tYaPqaVKKZVkXiaJmKvbok4T5OoppQ4T8uysOQ87d774MPj7To/2N+/f58vcTgc8jybopZKRNobN0tO/WmpiKhTLXUCU0riMDMCKHFiwaYZdA8wdjPOfMoZzWgTJgo6h9xrzBjEowADfjweAeym2eCqqr2UxTsaSkQCnMbBXiqp4bWjaHdKiTOzrbq+8eAe0crJP/eZT13OJHZ86803DjcvfvlXvvr40ZMPnx0vDvfu33vwzbffiyUn0zxNuw8+fL6u1cEvr68huRxvfDlGJm0p5Wa5ISJORNTkccB/4K0xGtNZdRARmWotZZommAc+Lec8pywiqsopwV3SvCMHfM5zW38ASQd29ywLAwqRaQ6D47Of/1ya8sXlYV1uqkb/K1OtRJKnREc/lqOkFG3BBz+emVWzyE6rqpbCvXpQRHbz5O7rWmutWZKralUiOi43dakQZoGVWk37I1azaJHUzKCOvivMTA5X45zGnCQhB1BOuzcTgWxK+bguLSRjblZBnFMO2cnMolECGHkVHvtT1bbOZykFjUziTL8yc+7W9hAfqhoQZjV1q2Qe7CBENuXEBHYnhHOtDVaqVcwu7l0w84sXL9bjzX6/F2IBsbBIWtf1+vqKmdv77Ilp3qVdxu2trmuBI7rYMINERJIZ1qKlqJlCWJy2bKjDqHZXBIJigMjdk8hWwI3gjPRaqS320jvC8+Sw2QmISw2bQAhruD/NeF+IA8IwBGv/+Sl/yMwjDTtmfvif7t4YIGtNIvM0LcsypWkImfADo8PP8Xgc9xLN6G3TxYQ6fZFsSgi6yG0aKM7mtTWaG9I/xtOglSN7wm1FMfNwDq1FfGBmt7e3Dx48uPngRojvX9774MOn4RNO00RKpipC9+/fS4xpSo8e3n/8+PHt1fUvfenL61oPh8toMLjbHdRWEAKVuhknq2rUKgOIsaXERAhFmCgDneSGEzMfj8ebm5tIBorI8XgkmeikU4KBjSTPRHK8XXa7nUjaLpWYxuPxmLNNU4qNsywLgN1ud3OtLBy0O8fjkWg2M7dUa72+vn78+PF+v+/nMZbmDbbTRmLOmYhsA4oZjwk9xLB1BeNPEW+NK1S1qrtzEhZBI4lhEMEsCkMBQq9FhDuqgoiI0ySuinC2R+6XCESSEkUu0R1mtpa2zumEGoX2QmAzjzXv1KJCYx/lHP0r0Zd3XCUljrF5B2C35wEkT5QTgABWcRIQk3egoHNPWgKQkJ9M0lLC3RWI6HAXnhwJwIiFm5s7HM4NUzpopbph2hpgazdVGaQOOrVGBLpLyRvsYsTguX9q8KiRCcc1Iuv47eP7HA1gsgnQMHPkhyJ8GtlCgnuD7TFgBAGow5eHlPZekBZnBj4iAPdP5uE9sMvETq5+9im1GxY1vvfgrdv1Q1W+vP9IRFAWCCET80Tw9XZRW4WdxcxuasVEF5j3IIEWJGMqTNV9P8/3Mc1wqF5zWlhUraRil/feoPQedJnmgxa6On64n57Lxf3LomvRpSzXNy/NU5qckNNEMs3ABXRRdRIGBE5q6uwgpN1Ob5el1klmDYoJJgNN04y1Al5rXdc6XSQIw1E0+NFaKQjsRE/+D/UR3gkmB+4P5EBYdAFdXgFzP2pdQFUCmx48/6RQVVu9qsIJxnCG11oUEFD0wwlwRK01ndDmp3g0UbMruJO3DYV+x2eLgC2aYdPnJxguGhaPQ9CNCHUDsxCZnjhFB0/4ua1yipgPa2dLQ6qq8zxtzYZxRLwYQJ4k51z7kTbmyBlaLEKGKSXmdhlmlnSKBTAPUF6TyN2NNmYOTxDw4PNgboFbs9rsHnZvaFWEsI4JXddFEiVOzAxmr1SqqdWc826f5nkveb6+XZjTNO0AkPNutxORFZ13wbRWjSdnHfuqTryZF+0rK3JBcGdKLe/iAS8ZjQ3ITD0yQgwzY1gKa16rQwzEwsPzHawPtS+FSJ5MPQNA3dUMoBXxRCQMZpqYJLHkLFNiQK+XF4/v52rX3377K1/41Fv7i+kbX/+lq/nyYp72OV/Rcu+wO8zpePvy+dOnNKWUmKeky6JFzVCiR8a6qlZK4sPpZWYmVS0twhG+Gnl7LnMPvQcusE3CcnsEME2Tu+tabmmZ5+V4XOu6Yl0x7VhY3dWUOUlvtNnL6ZpGoPZGUl/89ua9D57G0rp3/+LFi2KGxPCWcyBPLIldgpbThdlBJEKRw4M6NHBXDfJAHmZY5CQZY3OaOwkxC0hEiE0aoDSYFceeinCMdBg4c8vFSW8sQUTmDnMmF4Y7ZEsM5V1lNvsxcmittZe7n4rczLVxXjUCiaFrmU8eQktm9tCDtM6/Z/s0okDMLSmL1pCItBYipoxMLERqHmZoTgYBWyGniZ0F4nWSzBFuV0VdSFeGiGeRlKgzGpvOhJybxGBG9O1hQiUII4urI6xYIlJQRJWIiCRYaaMy0RSnmtrMJ4fQ3RsCpTl+TfoPBeN+JtfY75py59FrhHjlTr2BTagrpTR+vj3n9tdbM4XPGfzHgyhatv5qYCy3KsQb8q7tI+Fmh5uiuik5Nr4iMWcSZhGmZrbKKSl6eujutVbqDSE3EAfyziUTiyc23PASY0Lmed5NczRrLaXM87yf711e7Oc5TYnf/NiTz3zm0w/v3X/69MPvfe97T5+9AMxc4/R2Xm3ebXEBMM9zrXVZbtEbzJZSiJJ3RDEnYTZ3L0UPu9xCqFG2rAq4kYXmi6R7zHeT6qp3q78ayoPMrFbrWjm5UykKRB9TF5Gc5tjHOWegjshuo1pl7s/n7GhuYQWo627qgNFGoeTNXQwPkAhEbopelYCOKbCRxkeLwPVun1Ej3XEEG7oXZgbZSADy4ECK1O6Ge6AJmx7AHtbJWKjjX/QmE+7ux2N8bWBHm2Zc13HF1ma3k9Ezc5yhfV8tqgr7hXwYhU61/yrIRDtZLlrNKMO15w87ZWhHkEa7sOh73goOLU4anxJFdeh4f6SYWoMKAnX4KAN6njMM+9WIxg8jF/oPy4h9ZVn9ljwIJ3hwP4x6HcI2dklns43zJCE658f2C7+Vju1uiq3UwdBRehRAWQKSpEvs7uf5kd0+Mzw3J7GCSjABE80smuq6GIxdhUn1Busz4B0kRboHQFBFyrx7YGVle4rdG2leFrsuei20gp05gQ+gPbFQMk9Hk2u5vYfdvUdP+OX1y+NS1/WY58u8n2ErwGBxFzJvVDEEmWd4NVgERnXUlkcbGWYkQRb4KVYbflMIgh86YPQHno42rZ4BOAzmDiUyRwUpeXFU+GJeYAVkaARGClWwE5Tdo4ecmzpgbqGJeqBa/cTscNdCwHlkFtvyDaJSStXW5o2Zt20UWlCrJ+vMzOFm2jgZeoLBNix3d/J43R09MZK0SXAvpQybhMbFmgreqICNDzlcyvEmgNGlmrbdMKgTLaSUmCm4AvJ0ClR3QwRbbRFk9s2dYEIwH6YoPgnrRJijrCwsBiOCSBSrFMCDrWRE8ZkZws7kJOqUUs7zfnbKeY4UfEppR2nOU7Q1FyJ1T2aJ6+2tuKmqiWSS7M00FR5G+WY6mFPL2LEwaqNipyRCupYgiKOUiTw6heUs0bUZQOKgJ7HgzchZag2zyTr9nbtblGsywBwZxbb4ap2JxIkF4kBy5wxJYDKjBeIFuF5ulrormp+9+O4z3Kuro9yQrV6PL5+9d/PsfZTbqtA5WU1eKhtcjarBySUZgawCQcUeWcm2ymsNqFWCwyxStWKNSUg0aa3V1dxtmqbGKqdWDCKVmadp+vznP49pRl0gOyEOxNSZN4iuFBwgFGACmLNBLi8vP/vZz37wznfNKbF40hpdT6KWUlREanAej4KZPpV6MkTC0miXMlUmSsGOKNDagF8iwsoc3BhAYlZSop59JI/FjLBsiHut6amQz9WsKm3K6YkIHca03XXUbUjZeHehCRnk1PraE21DrifzrklelsHa2G9d4R6WvXUS5ODnpaABJGMnwARczcAeze6SkNSkXtx94sTMEa7IJAYtywrzeZ6HQIn8UgymdyOoqGViSjl5byfgcEKFGSEq87yiFkzCAuHkvprC3BnkAcenaGAgYEtdrVYd87MVc1vTf3vEd/h1imjL1DKEWOKtNOxWOIF75ieiCxsdc2LP2j7o4ZUN0RkDtnrW69zMCGdfO42ZWUQULX9qMKoAGh1LS7XlDEQiic0seFq1c8BsFwmllmxxQlQ+wh0ODpoYLe7srg0JbRVIzBChnNNuN+33M2CllHmas0S6XXa73b2L+fHjR0+ePLl/eVjXlZnRmq+0/dfroq3f1mm25zmX0hRKC8Op5hSmo8VowVRNl2W5d7EjySKiZqE+QkGFtzas/AgpdD/HtmFRtKCJuEcYrgk3d69VmXZV1/D6UkoABRQiZeHeajX8Ma9VVTlJgwlsFg/R2WIYS46o9zUNg8A9eIDwiqEQZ9ANcphoY1t7s0Viygj9ceH0BYCEuDFxE2xdY43G5WJxI0o8iADXXr8b8qFUHffS85snflp359YBou8vPfXqbLyi3FhwALieNRxz91PHFPeoNQR64Ks1/jGP0E7zB1spCsOcA5bW/ECCG3zTH8I7m2hMOvpHipYVDNujuRYdshhBodYCwE/Zqq370dxCAIO54aMEzn/Gj9OkvAbqv3UFz9zCOLx1lRwuIjYVhiOF+1usnjOsDe/sStFFrHnFzSFMEa6g6WHaP6jXl8bJWMBmttCaKAtE0kzVk2l0YnOoVX8mSpQXpMfgHVCoXJtZSivyFVJyXBNpYhYYvIIJNIEPJC5T8ewMA3bYCUniY1G/VUqSJkwHOypByNiRqocDWFhXSQIzN0fsPiJiFkjVoOCrUTeM1k1BiQRNpW6DHf9IffshcNDEiMMK3JwKuzqK28rsAvXGVhU9cSq5+bq08KK5u5K7usJ8ztmtkVy7G/e+yiOEdu6DnZzD7llRc5qE+NTP3IffpUEI0N+3fsTsuZ++6YMRtIumcdc9yAsM7ds+bbQv2GCsmLkLXRoac2uTjFNt93VwecvmAyYCcyuG6bdu7uem7RmrgcNJhISFFZECcHdAMcxjNE/S0YBVOYtZOGPoMCFj5pwn6Z15zUxrQEgi6MuBjQQovJeUEqxqASubmasSgdyIKMtJ0DNxQ8EQezBHOJxan7dtloCIhDO8MiMGWYrWyqqamISlcgVkSjkmQRr53oj+6tCmQpGjaiCocDC4h8ioZydWmzPnCKFT9BbP6mr5QDnLUW8OkHzYP716/+rFwslvnr189uHLF8+vtfK7b397qQJdP/b48tnVtcAImqBMxAEThNOUW1aELFgwAso8zzNTIpTSwEVEABM5uOVhzLTU1VtIRkSIWtgDkXjhRCSPHz+KhyUtQvY60UBt0QKto5GDZJre+sxnfvZnf/ZLf/8Xnn14xQIGsZCZgb1lB9AdH2ICqZmqRSFhAgThu0bivtmsZtW91TVR9Fon4qg57LSigcqLmpxRjBcBjlgnwqfc8pn1b5YE4tFkJDD3DFhAr4UpmBKjG/PYxm0A/fVog3naipteB8QNF5pylw5Va63cEuwW/CIMhB+S82RazbrLjHBxLFLWiTgJZRZPILirieSU0rIsZsYpodTb29vjcQU3twAsxaFqszfSS6jVWuuqKfE0ZSIvIWHh5nA3psZJ6W5whYPNzYnNq9Va1IglpQCPORPIGeK9SmrMBp/THY8tudUErtbzzCfp5pFifcWiGwa9bShqRER6zvAUq/POL9ma0La8LnUPH+7U6pyDm5UBUGjS86dJwJ2VA7TybmZey2oWPRA3EKwAo254mz28f3d06k7aHCwgyhuhDbPab5kN0TUH7RIOmE/RoZAlsUwpTykXXoutKXDw5EQ0Z9ntdvM8p8QAiF21uqvgFJiMKB71cFifIvOOeaZNswcWMAdNWWNtcSezRlTr7tWsaIVzMc8JRLSuaymFIOHp9Tk4bRnuCfy2QswDa98m3BlObhBhaHj94o5SyrIs6yrNiyMfOzpekERlGm1nNeZz++edT8efWhuxjUz5pID74knCW2bU7QrFcJaApr2ZBbBSiKx92t2/EadH9yrbQt1sBwphhGav+eZo1zR39xMPrTtiG8PdfZLc20C1S5M5JITHKf1IqcvYVm8T4z9NCLXVNzrlcN+PAzUtFMRUcHi4B1GebN1mCMSF9ZmijcvBARLzFt/buCK98CnwitS8Zn71ay08hPFcTnP4fY/fenmtH9ZBG59wrLpXDjv3+mKGt35+E4m/jqn+J+7YZpKb4hA4AW4jUxoStyBP2NH0gOZ7PO1pShAC2XJbd16QBYnSnKxMXqiWAjetN64memS+xu5QzdZlvXn57psff4x8Bffb5SXN0xRZHFvdnWwG9kQukjwdCAfk+3DVcnO7OtJuf/GQp0uYMO8h3J6GVDcyNYOKWex8hHGYU6ZMIrKiUVp5YItqdBEHBbe6NJmEYBD8IWYKX61E/Yhn0a5YCQo28gIoaIUVRgmdTWywCnMEblBVbA3RD7TWWQ6nCKN2ScveoxV2inadX/oUKBw6uZsr2EJ2wot2d2Y2w4CKuLf/GgHpxtnT3slwkAvapp/wcBq3HmnI8/HR5tK4s8mGfbW1nYYJZGZp5OLCGwRO7bnRqBdAPVYa+qDfuVNvih3fFOZW+eMFirBra+/uLYmq1m4YDQyMjemIdijsxm4aQV/FsizrWplZVXOeSqmllFrMDFaMMs0pRXSWyFiougEkvUlAkCUJSFsWh9zczHojDZi1APWYmqaNGscmT5JUSlRLMZO4MFHKfHNzk4hdhJlST5YCspTVu7vPSQAER86YqG3pibvnfE8YGUxaxVTYCAYHkagWFTNJt6XU5Wqiev9wIJmeX9/crKXW9MGz93b7B2987K0XN7eLWp6niPEwyJmC4rP2dvQAGo0KKIaXMwNA0VLUzdiJSQwtxBvVqNabnFxdXSVmqBNRzpNILqVcX1//2je/8eM/fSEphatbbWVOkVYaWUEAjXK264q6llQr5+nJo8f7/Xx9JVap+kjyqLtUD3RxNMNM1czUaq1gOm04Gr2wTjuhmacu3lLZJJJ7xi/2egvYM5CFEgPSqrBUS6QNmBqGWURpAxmNejCKZLGRmLcWFA1wrCfJYg5z6s0A1awXizaZMpbcidOIeLBWWufxDw8wsXSTzNwdncFjSvlYVdcVIjkxg9zI3UQa2fHQvswxqBhvglDKk1SvuK1FcbPs9zxnqZyKk5tP5gQi5VLL8bZULUQJgDD7YJzSLlxIyNwqz5zibybwlLjyQqWqmyqInAgGhZs3AzqwhUM2vfb1drpe0Qsn6bZ1G8aLEUjzAXlwsLRyu2FDj0eACFOFdditanS/nYiCTyiwcYwTU2iXimfNeLrYVOsFAAHcJ2uBKh4JXhHvRCwYcQTmnKR5710mExHLlrw0ZF3ziKTD75kZ3OT2CXBxjiokIquaL/aHw26/30cbw5zlsJsuLw83N4coMGBO6hR8m1Urn/oTcsQpvCeuW5l0IvNKoHme3VlcmITIDV7UDEkku7fGhqUUJLbgOtsY28zs3uqcx2ibntqUkroTx+bjHMKVWg/S+AKFuWMGVS+lHo/H0EctdEMt/rLxBM9UaBcpwElBtOGNJ4VNwAhqJNKgnh38cwKCb1ZmvPRQ0gPnSQ16amakStY73fdbJmbQmZ8XJ6yl8IZwaCBIt+On7lFHGNPvhr0BAFMfQwR8hi/aw9ptZZqHkhyqjSiq0tWiDHBc97RKmcjtxNRlANjFqGmHnjUKK20she6wAWgNpOLN5oJ6V1j9a6UnnTpGlCi66wKMNl1nma/t4/Zzo/91x392HUK0JUR3cKA91NuYVc7hoL3JCUINobedtN6J6B/h6P8hHDYiGb1RS5A+qWOiGfky7e/L7pIyIO5QXWxBzX7kPKVpcs7FjnWBWGE34MpwY7fPfb2oSGq4vn6xHN+f8yOYuQnpQ09qyAwGzY5LMhActMLVoOw76HEpGXLY57y/eIS099Uo70EMBjsyFxOBTvAEY7C5CzhJnqZpImQoT5ws58gOgRrvYDSnjhsnesV1e11M9jc1oz/4GMB1aj0GK7yCKmwlFPdC1eEKqwgpV9xUTTUlAhBdl7kRqrm717VwAyEE0TehkWii3+/23vyOUB3yX7V4RwO5o7OM+jzPqlorPOJ6RCxwdrUFm5IWdI1DRAPUsBH2HpX/m+xi+8nwJ7cGj49k42Z6Q6R3RGuL044BpHE6b15sW9Yi4mjlkoRWUDH8y+1l3B2u61pySoMKggFmyVm8knmNZHq/0XYhESISVWWhibOZxJMDkFnYUdzNbF1Xd1/XktLClFS1LIu7CxEzT1Oi0sjWQmtFCpBchWCNsqLnap0D8tToJQJQQhz8B2G0RVMtd6ca8NfGMOuu7o0/xt3300xRZsYjZk3dOAuobXMI46N5nmFtZsKsbDOZnYmZPBGxUSJQlLsq3ZZltz8QzR++uH582D96cO94vLa0q7JXkWPBUZdHj9548slPf/CVr2F3YZKXqsXVIYYo2+hdiTmSGzIOhL/mZFZV3Q3EYE7VIuVFIgOzSLlHB6IZ2jQh55mZmdLTp08BgGGmkCQi1lRAr4tzePcGh92R80RugF9dXREw53y8sdYSMDoWkxGRs1Mr0mnpberMe9v900kIThvDNXrR5LGww1+LPAY35Omp/I/NN5T0il77N1b4yLAnFuvGdxjccaSeM0H3weIFd4uzBMHFub3Zb60NOxG7k7rVombVN3u7xsCsMVg2kF8vNVaN1oWpZazCB0Dnm2pVUu7ua/HiCginjJTTjqdVy/X1zaqWFNVX50rJCaszqovZ8bYc18JEO84GNm/2bTxfRaPJajPZ8rDcjpxEpLpd3xyNIonsRnAjp7uSpE/dmfD1j0Bznb25wfGO70d8k3LjZY5fEDsLWCIr02yUli/eoJzvDAAb2Of2KkS0NR+H39IG1X22sXFUNSRVe+4i0peujaQoIZDGAjGz+bA3gq9hhrO7q1WzXkUAHV6NsDAT91DlGKF3GPDAKYxiwpRSQIVrrWZVZJ8l1Vpfvnz5sY99bJ5nj840YJineQenzHlzj6GYrHZmFDNzKPM0ivTM2IxI2PlU3EjCJIHQF85OLMYkmQGe5xlACOroDmgGEfEtacrm0bgRpUArhGYHc0qJavFAvkR6KokkmWJJDhnYJX1IyGHCni023mQIx6X9TqkGM3X1FhYGrKWF+05okSA/yz0SQCIjQniiq6FIArNHTKLbBk5wrbEUaIOZZ4AQVXOxIlsG0UEmMm0XczwAABoVln27jcMG/WwkD8PtMo9qD/QdcrIEepHqdpacjIlxQoduprSRNgwbyNhhMckIhHNskF5I2oGgvcNYP+V5gmrryLkbWue01skTRGaVSJrbgmCfx0k/bvf796/7e11M6s4Z/v//uOtEg09NCJs37qeyw60s3YJIfysdGw/k1cEP64ZOfxAUSfJFOtyfLy6RbhWLkzF2Xr2wipQ0XZLkXLMxQ2+BW/ajWYUms6PTIdF0ee9we3wG/u58uLw4/IjaYnbNNIMPxFGtNMOZrKBOSuIrSdpPe/A0iQhNF/CdUyUDOKD4geUDJMMFNZGReI3msyYZYBghWgAQ4AoX16JWWqr/fKmfqCF+qPN85+isFK9+usLUXR2FvUS7ebcVpoiG4daiaaE/hLXJfCN1I/MOE3Xf3ot5BPZHBg89vLvVBZt3bETwY1jbiOH2xTjD5h2gB+/QTQszGzbkeH8olDGknjw4dSkbJDfYRDCHeD/N5wgQd1MhjLq0sWwEqLThVQfOTgFEiP8sHG5m7DxCtifKnc52MM1cK4goCCrDgSaiWleRmajxUIeftq6r95aAKSUw5nkO9h0iTjK1e2Zel7ocy2625fYFgKgBdCZ1U3PFaNNBrlbNnRKTmJtwHgFyQdDJMHMzDVTVpXG4hdY3bR1Xa63SaiPMq15cXJipN2MojANntFi9xyypBd66BQzQYr3bFVbpqMQJzILEJmTcOxqmNFXlF9d18nxM+eU165Lf93JL+cb0FqTTheeLwvONkfJOSVZfV0wAFw8uNk5YghR5PMrmMhkBnXk/OEXACIfslOggASWRIBVclsU0UhxrzmtkPj98/hTCDrtdjvvDwSHOPabb8vHmBAMU5kB1yQ5iICVYee977yzLMrLkY/kKi4iLiDVWW/UokxOxHtLYrvVm6hGzCDOrnnIgA4085hxEAq5UeylQ6zU/GsjQJtdvZmMjuHutq2qJNJG1CHi0O5OOsDjJrLFpB4gOesoWtEXYiW2gVtE6XqgG92lzX4cfEvOSc07csgG3t4uuYd/3vgU9I7RtVNayi263pfpScs5pFqvulNK8x3GtbsdithQzq9GinVjV6Fhvb1erdnGYZZpD5zGRWmT8NK7CJACbGZkbwb2W6K0tnNM0CZtC3YqaxueJCMLcxfO5lHxF6J/mc3w6Xrg7/OxXQ4aOf0duLfMpixIX7cGc3h7Gz6SnexvfSAwyceR+W6FSj8zFIxaRwJ9EK3lvUTY9ieD+KLnDK+JorF2dkic6HKMDg8e9D6cufmVeqbchiU0aXFJtPjeu7CAf2+oDEZmmyayu61pKEqFpmhx6c3OlWuIuzCz8mZSSG1JufTVVi9mZh5xzTrmx1/AgwuGInjszc0o5D6K2NoAMcslWHUDdMMq20b76OKyVg7f43SmASiMxxdH6iLnhThH4/8zE8zyPKQXgaIQ0eZ42C22T4LrrwrUXXTKAiDgSd7ESwo4w8yAv7093jH+7Pk+LuhFwnERHtLNEeKotTk8wo+6JMVrfCN9EfEGtzenwUe11lgEiNbmZ2PaCqGWwx0f9TrkrMj+fB7jfmRbq2hOkzHwq0vOTQ0fdX2tjg4b37u5E1lN4UV7RH0IHxgKg0VgVcp5CPH0p4FI+fEIEw3P4hIGEkvBPXnkiv338xg7+qNQQGXwoO7xCJPNbj1dmsyC7Nd9iFgYyuHSULAAoIDxjf29/uCfZlFZnmm1fUcLCgzt4oinv9lINpCtRERzNiWEEcUoPLh+sVkArKAEPhBL7CyKGHwCBC+gASBY32rvvZbpBMqacMAMAMihznqwucIGyR1Q0gjUEQILrjbVZ79EyrosMQwfaqKpqVfhWTP0j2zNbV/DMxPLirg4lrw4lUvfCbmDALAogw/4VFpBAjwC8U7+xN0KHxBkwihBuL9rfCvk7BsZ2DNYpi4koT9JQdXQqsWfmjVyVRigDAK35t28I1UN/qeogsGA++wI2Js1QZJE2w4aThplF2EzPB99eDMd16IX4N+Fc4d0xtZmZeukVbQzTrQ3n7gTknKlFgExEUk/qnGzZAVJq6Kl2b3GfgRxTVbfRIoyZJKXpcGBmYUpBcbncLFdXV1d0FTbQxU6CwQxEZrZqNStk7mpCZISqphWU2mQNl5UAeHB7uDA4yB262iMiEcosNxoGllp1JxPJ7l6rirCZaa1huItQlhTIwCDwbDqm9wde19XVovqLiKTPj0PD22VIAqXIYxMR0b3Lhy+vnl2X41uPH7/73vXbL995/OjRO2RXN/ritjrtTPjp1RHPrpTnSgZPlaBUQeIeJbJMejNWLfWSDDNjSrXWUqpqBN+DRD7quCDkHB3H+0K6urpy95ymnDNAy7I8+/DFu/t3f+lLv/wH/tAfPjy4L1qrGxEv68pTIurYkBY9azsgU8/ErCvK8b333iuloOF4/bQBmBvkWHsSvLlPcNOtxUmboxGCgmyTQ3d3s9ILZe9GmMyC8WFTHdH3JDNzi8e0VcHMpSzhCQNBJtTSLymF23DXG/QNNDEEzPZN5hYuVVUrcdqTUTt2MvedMsxoYo7ltN6u7p6nBjRVM4KHV0zdF2Vm62RZx7XWWndOEwuKSk4GXs2ZZVWztTjgYJdk4FoNpS5LIUJKU04zkRpchKRLt9i8xCKlUQsKUNy0qLpREpacU3rw4MFay+26LGsp0WZu47NtDeXx71bm3pnVH3gMG2VZlvE0t3a8jHYX574ldfzGdmBjWW5WVEPKjTOPqwQadl3XdV3NLOccdLXMnHNetY5rmRmsq+H+5ngRvp/1oy/XBj3trKSeUpqm6XA4HA6HaZpePn2mqg0Qwj4YVmNsUTk26maZ+Xg8znPe7/a73S6k/eFwePLkCXU/c5qmnOdowaqdMy2GFBKYmc3s5cuX09w7wahGgE9VOZM1kCenNKXJwBSuZkA3u8ek5mtRDxApIRpAUetou0m6hq6+8xTGHBKiJJ6IXCgPKTGevncY9thNofmHwgbC/xo1b3cXIfUoTx+L+yB3if9aGBHo57FNI+OxvNv5a6Xw95iC73TEIgdrQ1uN7oi6337zbK4dtMkdTKtRNwmED0mADTjrnVAxnyZ27IsxY+P24yPvVazSCWbiX7azBOO4R+uB3YDmdh8WRBsfYGgI4IQCHR8G3ehdEq/Y2KOJxV2nQluPO0fwh0d1U3DHt8mHw2gQCNFpx/06ZctvH5vj11vxtT1+i071dv/GO5u7sPaV7R8AwEhz2s2gJCAkJpuz00pHU4S5hTRhntJydGdTZaxMbCjuxW2Fpf1hlmkPY2gGzVHtDBN4BhGwA2cQM2XIBLwHFKA2vDQSkMDEk8PUKYESrLRWyebkVquVomTeDJvYEV2jQQRyilS22yX+4SUF707z6999RQ6P/UvkpNbK5anjKRovS+vBQ4PohZjcQsoF+4s0PjDAzK10y/mkcIfA32rn4bx1DdZI/lh4aO2oehuT1lUuDZprNP/orLVy2Gyq2hmsm/U4ftUt0zN+uyGx4wgEGRHXWsaYx7H9iXf4W7yTXBctljOlTKsWSWqEdS0OZ+aCmkk4ZWVZFUQ5s5pZFklpAgzmQikCw9HvdZomLctajo6cp8NalhDi4Vj1khkb80tgN4vm70nyst6aRZMrWtdVi0U74XU9BkAu7/KBDqsv0zSlvVj1eZpznhSu1TN876Qgf/7itjyvWkQyiauT2qpWrVhglRAZHiIjKm4JPiV58vDy6tlTUNX1OE/58v4lET9+8vji4mLVmnP+3rvv/NRP/VTKeZ7nMPj2u11wflweLq6vry8uLogobA7qJWdRBXQ8HuMnwXAAIKW0Hq/XdY2PilpfN7neyG1x9gfruv/qt0rO837/8GrJ9z7++Prlt26ME+er4wvONx+8+109LrKs87zfE90UFQGEb9cyT/NCB1WFoJTCIvN+b2YkUlXTvCu6lPU2Z0kpr+u6lDKDp5QUsOqSMzGbskvK6YDo4FcrETnqUuyd9+ruG29+72tf+dGf+ZmdFlwdcbjMUOgtOIJcyYy0GHM203kSsQVaQACvv/orv/DO03dulptSV2cGsoDh6sbkwu5EmXzJOQdmzMzEiA0lkFRwEGpZi5kTSLDoMiGBOeWw3Q1MktOyLK6RNGYicrO6KtQnyrUaol+1OyuJIzxhYU7EQC0RYklBRaLVPM87p7WaMjGJNE7ZWoygMG31e6ReDUlMqupaqodkIXKCE2mDVg0+BsVEQUPbpI6GWYUkBBErFUBka0VYVUtZzexodb+bKKVqtapqWdgty84cMC/kuVd4WXWALy9kXf3m+PLq9kXKOc2zuad5coKWouvKOXFmdz0eb5hZEk8Xs8CRBEw5zQQj1eUYqX4noqJ1mqYkFX5cMOec57RnrcfjcS16vLni22OepzTNu/2B0krHsmprEhMVoXLedfBk4PaDejSqeo3txpy8RlYt11oH54SE0+eeJOVJDnJRVq21wlwoemsH+yibuZoGZuQk2SVmDhTURBTZBYJDzQgskuEOFmJO05xzy/2qVg2QOZ8omkMeQjg8kGJWe9VlYkG1UircRSjnPEWv12pEbKVaqUmEWNUWFstZluV4XI7hqiU3SZxFpsz3L9KTxxcPHjyY5/yLH757u14fDgemXKySZK0V7AY9rrfmVVimRDYnK8f9RMYurLuJEilrmQiieshpIpApw9UKIfNuUvHqfploXVeGy5yISBIBXst6uNiVUqB2mHcTCVWbUmJiK5x5nqY0J6vLWmA+T0sppVYSLl6d+Hi8kTQT88zsWmE6zZOZVrWUploqjDIxgZWFnE/euDPLwuT76R7r3uo8yc7qsdSyHI2Z11sXu4TW9XYlGCc1W4FKpKqL6pLSJMKSJgeC/9o6A0rLQOYZrZa+a+LehImIglRZ3VUrNDwoFREIj7gDwjgJNIJ6+1WguNVIJnd388Z/HDzEBKYIyHW1XQqROwH1lC4O6L9H6zc6dcQiYicOmCm5CRBeXHM2zRXGHI1moKYiIinBvWjlVlzQWmPFvpOUwdTS5aY0MKnukoPEKoobqUlXVRFxtVpPVotZMbjwnoJY6ywKg978iEZXegKYyKCvOA9nOT2CATwSCNLSjqczNsiqM8BwMWKCjEADm3b/NhxLbt49JEjDxhUjLud+Chycm6cnSXX+ScVrjo9wh5ytM/z9Oo/faIptdBB/dQR3TuUOkFMbv5w+96jZV1CBLcDSJiZaElLtoEoCoGSIsvaWlo6irPG83HALAo2OIO1FnMHlDn77N3PHv7HDTs/r7EIEJxp4UQrz2N0zoqRSAAEyGc+EmYpiVaDSBPsR2t1j+47rV6x8L+G5A2ICSr4o9CUy41Ar3Zbi9XiBchDNSXOiA8vuRvZUd+AHkirye8B9x6X6ZAhw7hSLldmYdoQd7F2AgD2cgRV0A3oOqgCBb6keqZaOcFkh4qkk92SExaTOylSUSeajm5EVCxlBqD5VXj3V5CCDWVakhEIDfh1dtoJOvFWHejqbz+2ERqaV/O5+ueP2jZphougsr4BRS8kq4IYXBCDBXcncVeFGRstxFYBHbacrEcGB2kuyHXBt+7YtxijsJ+5lZQCKlsgYRq+BMTDZ9E8mIubcpJwGcSyYJYk4N0sg2uSOwGXQtqmq8IyGcmKm5N4yuJlzVDe22B8ogXLKNcRvyMyRDSTa7aY+kqRupZS6FhHhXkuPJppkdMKoura5ZXdUEFKmE6NDex4h80fjQWlAOAYZWipz5NnCIWx2XctIBNxOgsu7lELnTbTGDGIj00eUHcA0TVEcFaKBA8UnEp6V96yoiMzzLCI6GjS3cGwQ2DUIDaN1ZyZnNGLzHtO1Sq0JpExMOTG5mvvnPve5H/3MJz/zyU98/tOffPz48cc/+cWb4+0bb7xx+elP+bNn/9e/9Jd+9md/9tP/zO9vEQhmmOOdd+COt96K8DCYoYpawYyUYIZ1RUqoFbVCtf0HgLlcPY2g+LIsy7KsxyVoD25ubm5ubq6urspxCQ/zxYsXz58///q733nrE59cv/6NZVkPl5drqR88/d6nPvWp3aM3vvnNb5aijx8/JpK1lt2Br6+vPdPhcJim9Pz5czefJC11ubp6sdvtmPNuSrBZq+m6sNNhN9miUW4XmLfME3UcEffSERGR1Gym73zjG3/9r/21Nx4/Wtf17//if/K7f8/vefSpT8GqHauToLDMu3ni65dXu8MFO9aXz6d7F3D/4Fvf/qVf/AfvvPtdHU20zwtTR6ohHqX2QlARsWgMCgzcU6t16cc4yUj4oMVaWl6OextM01OYH6Mkl1Z2TnYCVCca0MK2dCOsQn1IISCYOXy8sdQjXzTY9hzk3rpI44y0jTf8ewSATh5RG+qABbrbneYTFLAIPQXSzKyn9t0iDaVKRPP+IJINuF3WUkoxI2Y7J0cZLUmZeZr2DBI4QKWoLsdaFq/VVBmecw4+XjBFNgl+6o4qIlSt1lK1zG7JPM8Tg+Z5njGvWkspS1mH3TlCX35esnV6mpuJ9U24Dtb4RwTtCRNRypxzFmJMLdcxUnnUEBenfg/o+RCRHjzrYMV4JhHf2S6t+De803gzBCYJAwi8aJtP7U+/S/hxRZGodO4kzG3+T7DPkZaMhHm/lonINOXdlC8OuzcePfz4W28+ePBAhIi+Kr0klSyseY6QyjbWGF1oVStSw9ASgZO4+7IsL1++fPz48ZgTU3XWlOd5zlZux6bgjnFV1WVZQmJYI4vOwgwgp+y2bh+iu7Mg5V7I55wSiNk8XC0dc6uqRGrm0cwwHvmIGoxzmpnqqbFvC9TKaUUR3V1LAAI96O4Am6lscKThmLTbjNhDSuP8IzYxRMFYjUTUIJDYLM5mWxMznwCWHdB7Z7X3z2BmoFOvESLyFjE+CbQuOogIJJKYvdHTcfDiA4jvY+BC3d2doiB+k3flDnk4yZ/Nar9zxCKO9dw56GkzfiRm7QW0tAE7DFD9mN7tOe9srniJV+RA8N6NTKMHhcfpETSHKj534AyaSMbBSutoT7/1fmFvycZIVHJPRMZZNbCp7sOqfSVfcXrnN5v78n+4rs72+HUNMTjJAJx899Mv3c1Q3SrIg6OcEDEH802VIPvJBd2c2TZNIGkzHN64Yf+40KQ/8LqvfmHrzTajZPg/Eb4DZ+I90YxFiAqROimsmu9Yd6A5ZXYlowquDGXOBAUp00IAYYZfg6aWTcEsMlvjAl7dV8dqXt1rshtYUV2MKkslMscKLyzmuropmVlVrxr1NbQL7LnBHFbbLmJPJNbZ9mAGrUWrWmPoQIfO0kdl9E6b7rXzGUHWjxIwp6MjwxFiyVsA6HSV09LpjSLCjss5SwwjWEWGODqdupUOAhg225Dm6BJpO0TaGGx2InU7g0hsddNG6Z8yittvekf6jO+Mqwx7o8nPTfeLrd4BENC54RsCUDthjkDDoOJtkF1EAGlt9BqIAxh9CM+eQeiMzWi6ihVmLmcq8DSFKSWLZiYd1GREBqQ+1lcU52vmy8zCAYgriwRlYtNe7j7P8zzPamWtOdzu3OBPBCIwyNncvNERuLs3/KCQcNaUDrLbWjNZ0pxyEnr86OHVhx9+/OOf+Jf/5J/4Z3/un7731pu42IMZ2KPW2BI+HT7+6c9fV//q3/r/3N7e3tzcrOv64sWLr375K8fj8c0331zX9erqijaMrCklM1vX9d69e3FrAfHa7XbTNBHR48eX8bX9NB8Oh4vHD57sdiJyOByYGCLIGfOMnMOx/F/9G//jv/23//bv/2d//k//qT/zv/sLf+HHfuzH3nrzR7759a//3b/7dz/12R+9urp69uzZ++9/sKxFRJ5fX99/ePnBBx+Eoz7Ps9ZVGI8fPbi6ulmXW7cg/wsmWBBRLbXqmnMmkXmeLi/vk+D6+jrSHaptmcaSBcDL9Ttvf/P/8O/82ymlb377W7/6K7/0X/iX/sVPf/4LfNjDGUTwUq9uv/7lr37xi1+c792fLi9Qytd++Rf/n3/jr/8n/+DvvXj6gat6PUVfiDbtizf7yjbA7rGExk+YyOhsjZkZsYdgDnuOO4Y5bNbu3JIIpcRErWXZyTQho56uG8Jiu9s9GuJJc8naPjnxmLu7R+M+J6iquRvBhwZ0D58wpGq/r9cgatw9z5P0iEytJVLNvDm8exFGDqeinuDd6AH7SXCklA6HgxPfHo8KEEkSjsSmuxOo2qk2uqwKMidOqkdVK6utC6xeXhwYHkhvA3W6HF7XNaYiTTmltCPRo1cttVZ1VNOUkqRJRCZhZjacsJGnCrpXxMUdC7IZ5X7XbCUKdlwnIiEewGwAbiNINmT9mdzrAq9fuOM3eNNlCEDYhf4Kpn84CeMFtZ7pFZtegsSJiAaSUxIxNiVt7QGezjAc/pwzgHjuRJ5Tnud5N0/zPB8Oh3v37h0OO5jtdrtSSkqJwLCGwFQ9FRIE75lZcDghNf6rhtDmTnN6czyOskN3JzXy1hPFe3gCwjlnapw0NuiUc87zNMXt5yy1MrEKQzhgGSYj7mvO5MxMnGL9bpCNfd8N58RZzwGE3AGZtTaHcKitaIQi/XbGqeKpEo2VRvEkg/eqD+AE827JQGbe1GFuH/F2k/aXfOd999HBz+2cAGnEAtri7GxjzIyNH0sUCTPf/njMRDQMBBBIh66cw4baGmwbydm+75G7Oa0Qi+Ti+d7wLbbzNP8AdEOus7UcRgvW8UTasGI4m4fbzrZxNsap3J3k3KHw4asPMXvmlp+CPj3NaIC7NrRplH4bO1uvYOy1PQ4jGilBIumGL8PZT4nK15hMZwP8fuwjP/j4CPP6h1as9VHn2Zjthrv+LftYEL55ds0mNm/Wf8RBGDBvfzZVR+zdQ2j+9sYnpLvXb6iMf4LqCxnotLevnT82sPRh91+0YkICQ2bgAnpBuMTRCGBeKh3NmVQJE2EPvkhMlqrTFbzAV6IEUuGVeCV3eIUX0C3hAMqOiQCHMdSpuC+uC2BmT2uttS7ELjBmcxQzpaoEIwc5t2IFc6mWdt4KPb2qmxMCdclJwNx5AV1HALpHstCnw4FXpMNrgMR8/vK1u8P9NRPc8oEBIO+8g4ABuok2RL2AjQZep+ieNe8u9K+ob9/fXNqHjdflUgsin4mpbbLhdQUFfiahmzob4XVshGSEZYeiGddtwnujy7Yj3JoKY0hbdbP1VN1dbUzRyZlEx4C08sVWSm0epDLbuw1JOmyRzCfocFOEwgNfO74e4sLdVy211lpX8lbH/+ocxZ/D+Bsz3v41UkRriqb9VLW6R4ZQEhH7uq6hpyODNHQA+jozqzWqRNglZeYkuQ3m0PsvJZYslFLKIknEa3n8+PGP/ugXv/DjP6HI3/nat59+8N7Tp0/ff377zjvvfO+dd66vrzmnd999l5Pc3NyIyLqu+/1+nucXz55fXl4Wmdz93r374wGHMWdmqPXWjBW6rre3zyN7UGtd15UxovWKjfq8vb65vLx88OBBXOJwODx48OBwODx7uXztG9+dDw+/9/6HX/jJn/kDf+APPvqJn/jpr3zF8/wv/9k/i8v7+s47L1++fPvttz/88MP333//ww++96u/+qvvvvvuy5fP12W5urq6vn652+0udvvb20VVU5qmKXueIkv55MmTYHlxs2VZcr69uLi4vLw0C8Kh3PLDVdVd3Zd3vvfVL/3Sxz/+pojk3fz+d7/1b/4bf+7nf/7nf+wnfirlqdb63XfeffrBs6987dd++qd/+tGjR48ePfjmr339y7/yy+++812r68Vuvrleb8vxbEfFEkySPK1LYyEaTdu2ZsfYsdQj6GYKeKC/QNawnmDbGC4UJH5KCIM4CLXcRYKBr8UPWtkPmbnDTEjIxM8DQhuPsT8+Dnar5r00g49QaxWzZsC8LmWxdYC9o0mJKMzReZ69aqwT7XkYEXGzwCiGy6zq4Di5G7WESfD6xl5TQ+MOIQbxUouDAxzFbRcrdzwKeTBxgdhKdTWDVmbKknf7fWRHa13NrIY5T/FONfgEn3eHnHkODL+7utZF13WVvOY0B3xzv9+XUtZ1jV0w9vI2XbyVNtxZIidJ2q00DjYnB3GTcYEDCO/Xeus5Zia00q9+RALqTOwCIMhICcfbOItBGE4C+qxNiLu7Wvj/8dWRWTpPYjdAYHhZAISYaWNNnisD6g5hzrnWSuQpy5ynKOFrsjpn7hgNia5YZpEc3tYijogDJ2HjaZqEfDSDJfZ5nvcXFwFib0JMBPFErO6TIHXCz+YonUR6SilKqcdzFCE4CXEiZY70jZtVVwtd4cQwjmie4qTwMDKuQOhmjNhQp/z1eK4jW9WfoFkjtcYr/bJH5KLxi8Tm9JYBdnezU1LL3athq8upOzYnx2NT70pEZqdHtn18Yz1Hyz+8osjjLu8YRNvZaHkaM9w1RBRgPS6DCBTtrhhE1mtWe5KwfSFR4zci8mEzcCMzcHZCJw/oF1H0qD716kQAVhduMu8k05iZUtrG4MfuaAlLb0O8c/e0cUTb9xsh0wBS2oY9ptWfxLepuTHW/ZA++aD2/sknrGQcMSKnBd4wihGIMTDABHFygCnaknlj07HNcqIzHwB3vJofwFb66hHNG/z1kNGPzqX8Rqv4XvWz7BzL5+O0tE0K+ukbIHSGXiUozKLPNNACDdGSPpwFdrZROLppLNnPOxzsJr3Oh0q/qXv8T3N8lB9692m2Fq9tvHL6FqE7hEYI5PYE2oMuYAfQLYhIXGhdTWFMqMkn5AMxp2wmrvpStYoUmiRzAVVEJ3pfgJfuO4eocZM9pAQFFqC6Va3X7pWows2NHAYyJgRfBoOZE0tSpaqm5r4WGJkrwcFuAgI7mZNVsxopQTgRGQFMEn1KXzMfr07XKTP/Wqzv9km/zhxC17PNCSQvZhVxv2Sh3kg8AovmHnnOEMemVUBEjkCPBKTR3HW0HGvmRBvK69jOX39jPR4aAmqb7sLGH/POujK8QdpEuqnLcNUTJP6kX4BIwIz3fYC20imAeHc+N87eeMfqGe5sO04ADU7ZI9EA0tBkw95KhspcaxWQtb4yDnEhYiAK+dS9miXG+G3o2ZOxzkTCYBoO+lb5cccaASd61viale38nmgAOge6HY/H6+vrUisx5t1UlwLAeq9kd4BMGlUm5pTTNHMKtF0GIGBiF+IslDhKUhRmV8tNfvjk3fff/9//u//eN7/17dvb26Xqsiz3Hj6Z5/nJxz72qS/+5OM3nhDRj3zqk2+88cbFxUVK6f79+/M8P336dE7545/7HJihFQPPQ528OxCkKQHwE/eglVK8lsDWrut6c3NzfX19c30dFYY3NzcBE33+/Pm7z1987TvvLsvy/N3v7C8fvfvBiz/3P/2fv/XWW/+Pv/H/+uQnP/nw4cNvfetbf+Uv/cfzPD9+/Pizn/3sZ7/4Ez/98CHu3cNyhbKi1uv3333//feffvDeN77xjWfPnn39V7/2ne985/1337u9vb1dF2ZOnBPLs2dPAczzPE3ZCKUuy8q73S5QwZGVXbVoR+3uJk6+fvvXvvrGm2/Wp1USPXr8+O/8zb/+D/7u31HVeX/x3e9+N+f5+vr2xbvfISKeppvr63VdpsTJ/eZ4sxxvzAxqblarqUcHBYlFAlqJIYnZqXfl8iCia9Pc/UcYuYFPxihvdo5tNkxf8OzmlcDmWvVkNwMAas5T+KBhgLaFyg41tLYcp9bMfXedGOQH+Kpt+O6fqHfi2SRbE3YMj1oaJ+iOSKK7IDfEeSSI3JvVab0a2JpDCIWTeTHdpcTkINnKArgFkA9mruEwotZazFmEwSTs3poPxTFNE8zhGhFhIp7yNOcU9+1mVd3MkxGlxGk65Pn29nZZllorlxIeS0pp1RqiQFWraVmVc0op5XkKhzwcQtswZAyhMUQTM5ORoDX5aCnf1mX0JIu4cTSeJH6cNhyVWq3WE81GXw+jL71tvcEhxOkuyC3sTiMCN7Rtc8DihGYqIkRSq6saB/mwu3MD2SGyXYEgRYTgaWShGQQyEJE51KKsH0BKyV0TBaMpTVl2OU1zPhwObzx+vNvtIiWYUuI0MTODXfxEobE5mLlLKQqPpZSllBJ4m622i3kUiHaDus12l+2xyDMPSHMDVLMDXiVYu4SYXBhkXkuJzeVqSq4WQYnWf8Xda63uvbjUTNWA1gdvPLjmv5G5jYQfjfGYu6vcSRu6ay2q1dyaH4ieqnUzIgk7ZjtX1F199HSxdUT0yBNuFejIGw+V33+7UfA8dK5AjeBKZ/qRiMyq96fT/n0FGeEbmWZmLf9yArs66JRxDXnisBCMxNywbQQWD/ID2pgsLQ/gfua2Oe44QSODDXc4gkvG3Uk1HEK9I+VEiTrFaT+lN5fbyS0cUcCpdYW56xJQy9w6cMrPj6ml4P/tDqEjnLpgvN76hBaOkGMlkj6TDMT32MEAN2Bcp8NxAD6NYZzCPA0kaQOEfLKXWyDzjjXcUhD4IRy/OYfQ7vwdb24QeqfPHQ2r1+MVBhjBib3F3mARpzidr8HHmAhbmC45n/mE/er0Gr/6zjv/KH3Cs+O1DiL53W/5eNPbr8LWjqgsfILtqu8SXSFIryAMU1fz1WxhO4APMoto0oWKPgcvc/JxCrjBVuDKMQFMbYJjd1ZCdVtdteoaKsmhZuretB4xVN3MWRiSRcS0aKlLKQkkxGB44oheG1xESlWNujCWYL0ywjRNkAzlcCVewQ3gNAVt9l7vYP+gpd9Crr1cMGpZldjcK8Hdq0NBTqZh/8DV3eFK5gTXtTg5M8O0Efv7SVIQ0SlFGeuWeRA4N93XQhvYvonNn9v3xzuqp6QfunfjPaF1ig73HN3o2LQ9s3cxfrqojYDjaZCnIOMryl16p2h0XTOsU984hH1Lx7SIu55YRodK40YVOLQogrFr3AZ6wb25JwoPuM1yyplF1Dgs4HVdp5TvnB/dUP6otUA9DxD3YMYCLMsSGEsimufZO+y1hbFb621B+LSJUuJ5mhyY56mZuEMSGsUEmyurq6kCc0rf/PrX3nnnvSdvvfuxNz/+e/7Z3/+FL/74pz/3+U988pNvvPFGurgAAe4wBVGppSlCZhAfHjwxM8gMMyQZt9FfAJSQdy1Gm3fkDiI2y7NRaxnPkE1oMCY7Sg1zPp2q1r/2f/4//vk//+drrX/8z/xXHzx4oKovX758+vTpkx/51N/4W3/77bfftla0mn7kR35ERD735hv37l185jOfeeNjjx8+/thnP/+F3/3z/3ncv4QDH3548/77b7/99ld+5atf//rXv/3tb3/rW996+uLF8Xi8urqSsjx48ICZb26ub26up2nK82RuilZGGMDXzOu3v/mNT37yk2Rlubl68uTJsw/eZeZnH7wL58sH92+eP93tdjPnev1cVW8dKSWBH18uN9cvy3p0d3ZvJpwGPV1r9uCdjr8toWHxO6JSS0Daen7QuPEtCsuCHLat/sGC3dahmSUjqHkzjZ16xz6r6tlbdAqGXpdlFi9OAX4z4wpL2FSqODyaKxO5grhjHrSbTDKM1zuLPyzO6I4aqbDoVR81Wt4cm1ZSVbTu8h5AtVjFgUEldbAk9LwQBayurBWgxL540boUVXMnkJO0UtvADYQFKaPaNgQAwynJxDzN02E/mxbUiDCRMztzOAjUqQurOtcansx+n9erl0StG426BWUIyPRoASVIKQ2f0DdlxkMgtB1Xm/dLVM3AoGY3W8OPSrSlJButGofNOs4jiYYoG+/H/GvFCB+M59udCmzl2Fh1vnFchwQjR5DELMuyruvIImrHCUfcL7Gco/MsWgkSeyRoAHNXcU7ElEiEoBHyaFG2nPOUMjNyzhcX+yQSfccTo4q4ExTe9wifkF9qZuRIrsw5pyTSXSwzM9vv9znfEJGreVXOU855ZiZbGoUpgYjmlKtbSmm9Pbr3MbvHGiZmhkmixCCvAk8pCcNqTUIpEvJOGiTDkDEAVQUYp84cSCk5tYWKjaJ18+3DGmsmNnHLdwkkxSaIti4EcH/R9F1Kso0djOcldCrv3Gr64WiNd06PsI/w/LW5s5/HOBr8yZvnsbGuohSQBhHoaTts/UactEzOI+EJ7WlS8rOMa7sooRfYBfsrUSzJzS1sTZ+g0ugPImJIJ8pynrZxt5B4AMEaO7cPoe0bsBNtwyt3HN0YFVqkz89ieXF/8Xxt3NMwL6mTdrXawdiwzQeuAZxv3NknaGjwo0TRICHKTpwbqUxj8gn3rycT4kbPsI4DA9ClfS8I7LO6tXYGZG475cNaeH1q4jXvtoH9gEzNK+c5jYTPzmybQQDjmY5azVPDhQbhIzdHhVWCuyAay7Zwg7Oiw3RJQMYeXZqsM/oA0PMLfn+I6D8en/DVMZ2Cyj/gqDy6MjqABJ/cLyDfBANIhANTJTBQDC/ZMuQe0j3wnJSrouAFYZlsDw7fT4EVDqLF3FNA99suMFiFrt7ayUoEGoM1gCkRTxwBXGW4gluYOLjlIUzEbNAUW5LJWSZJ5GIRG+57n0m4maPucZFXfbs+pLv587tzePbOSd0PJIWCgFhjbkQOcoISDGQEJbgHK4xraJzwBs/AoJt0FEefhp4nGFV5bjZqm0P5bR2xASPFRsoBiIKpH3jwpmOE9W7A2KTBhiGxvYr3thBncjXsjZ7PGB+1qPbGQSXhYbpsNNSJ2hoIqIgRpVap7hEjk4SAlsV/cA/OPWuRb+YWORdmEZ5S8g3TdyTF2yA2RpWDw4Osprm3OrwTTx2jR7+f/gTaSmVm69AaZlrXMs+zqppp1Iccj0fAmNx7f3B3A5iJkTCnXHIBOCVpapQcEWK3GjqDg9rbDebHsn7xi1/8Y3/qz/y+f+bnP/87fhZpp6vJxGB40eV4TCnJlCAJhFawCixLmSaW/SwOAKVa9OnqN9WXO5Oqeyv66gqMhYKTCTDDWQMohwi7MOXcnvoaDCLyL/zhf/Hf+/f/w6WWP/pf/1dDZ2JdIYJaj8fbWuv1i+dvv/12pBa/853vfOWrX3n//ffXdYWbCN27uHj06MFnPv3JT3/yU4fD4cd+9Atf/P3/uS/+gX8Bzri6ev9Xf/Wrv/bVb37zm3/1r/6VX/iFX7i6utrv98tyi6YZzU5NQVBN11oI6+OHD589/WBKIvD3vvfdlHna7Wop8zzfvnzGVo9XL9zp8vISwPFmlUREZLUw+TRNuq5La6ISxoOM1azdqjiV3mIsibaxBbwlNgjeS+4lgg7qpkOQfN4xyAKQwIkyhC1bv6iZK+zE8AvArKryQMShW11t4zt806feNy0NsQE6BiANhG0LLL9jEm28lOZymLXiMSahU/iZmSlJJ3YyYWZOrX0iwd1YvZgy0rjQenvtTmstRZ2EJWURgSP61YSgRIMCcq315rgKuWwubGa1ViExNgdBOCSN1nq7LguJGkACt2JKqtzrZmutUXEkvRmgu9eyuGtwM0ZmMlKg/an5Zko4tMKQ466I/iIneO0G5RR9zxiehZHEzFyD1QeJJRj+ecPpN+TYyAhtfQxuyGQg6Ob6weSR+PJuqhI7EVgwzSnn7NCqq7sCZB65LmGHRSacOaXUM41wZxFJLC7OzHB0V9aZwaDM4qwslKVlnFp+dV2vrl4EzU9wizFzFimqrlZrxc7H4u/zN0ztqJqT4ZnHCu/U1WRmoibM0zR5aZ0Pm30gAkX4aVEUEUpokvbbLDzNIkRaFmHs57zbkaSmnLj1qTOygM9ulVZTSM2Lif3gZ3m5pvPOlUiMXIsGkxPxaRfXWiIVvN1344rbM49dz962sLlBLZy31KzeDTARgDsREvEgEAYaXUE0mTercI/+iEQIVcubiglyCtzgcJZGN8IWTATgwxnrmjQ+SgkWrZNOtVluxiJnUfwBq1APmHcgHdwdXgFOgwd8GykGRRB+TNeYwGSk5zhnADBnokH40OaWTsOnbjmgDzX+OE1Fl9KN1PTVuNlpVlpEaPMcTrZUl8Z9JM3FbRQy7kotzajUiuVifAY4Bx08bYrBgI2nFwuJ+psjr2XUvJf2w4847vhCw/K784uTF/3KGX5jrmA/3dZG3+Rd475OlZm++U5bEi1ZFSk+LWrFa4FrOO0dSRiYh4a+dQ5tx2BjjCZ31n3CNgu0uVBDnr7qNLRZfa1b+P2dyd/kcXbS3lnm1SMsSPJGcBurXH1hBkGoYWITeE/pAnIDTfAZfiFQoxW0Gp6ttWQn4rdAD9O0zzRVvF3pZa4JwsTcYhAEhC9kZl4j6AyONC2JsZGSW1gYEnzFALlaJRgYZmaoiykDSJNomrIkcnaLBCIzhKJpexSKE6Houq6mCBrMuOtY+5utaZsJ+8Gu+9jj58n/FsmJ9wNcBxhDEf95dRRyA9ytOrSrAY8KyViK7J4lgQym5DA0MBQzw3vgY5Mos0Eq04ZB1IuBqp8Iq7Z7cCujxr8AosZinHmoWt0Qyw05OWw86pHQrV83RoItKmdYmB9RxBjm4nBEB1vB9ju0cdaaa9SPZLS9mTb6cTNhoYdSDIaG5CygCBpExmU7R8F/aK7jVrezjJPVTvM8W3fH7URbClPbMLqecrL7/Z6Znz9/dnt7W0pZ6+KuOef9vMs5pyRmI5YfLDJNN1Nfu9EBPIm4m5uSG6DELiAw3nvv6R//E3/6T/5r/xo8ozgyVqM9Q9cqU5onAaDmzFSrMfOqdcpJUgpSrfA585y2kcAoSg9KMxLqohRxKgDCrc0SM2RKp/1k8GhJZR28l4QhzPje+x8+vz5+4hOf8ONC+/3y8sV8/z4ASN7tdwAuHzx46wtfBNBAqktZnj374IMPvvnNr7//3jvPnn7wla985W/+v//e+jf+1tXV1b3DxWc/85kf//Ef/92/83e9+eabL5f6c3/sj/7cixcPHj4E0wcffFDralajcik8AbXqRuqGWlQ135vSNKU5f/jiuVl944031vUYTQsBPHv27P79+ynxshR3vbm5yXm+unoZDnbOib3RNpRSEckdFhFxsGtV1RxZWApjHUEhySJAkwrDAQhtYWbMgRNrOJ/YU3YqqLVNT0JXtZRSuJGZpTDHnYoIC0F5rMawd3eHmSlInzRQbae9PazWFltt9U0SRPIAgPaaiBwK3aBMR7uJZuDGtjez6Ck/3lE0FspA6DGzBRkGotSFyMjBqm5q7OxGnJhETMTVGGbkiYXIlBhArbWayjQHxM/MmFg4kOFwqDeuVpjZUkpZj8ut7PezqxEH1bgYcTEtRX1u0OJVa8O4Oplhv9/f3t6W4xE4RbAY5MwRNgurfSsNXyuIw+3rnxkhcc9/cYc0uZm71hbhyx1u4KpFFRFs6othWLdNIk85j3jeycAdwhBg6ikGIqbRXKi3n9+oisQyylOtU4qpU1sD5hBPQimxQGLawRYUR+4U1XOJRTo+tp2ZnME5C5MnwpRTzuLu63G5YfGqVmrlGnQvjcozWhhpL+jq9JUAIhtfGobTREhySlkiPpVSSmlyg5lBjcybQ0Hubq5eK6vqui7uZlqrFjcV4WnKKSW4T1M+zBOcjsrMvJvyfpfmlOuyqiptNBzYA3HXHU7S1n+Uos8KOv/tFiozkD9Dm7TlF/dILUEXYW4zI2SCwBnObpFROlkGQ2ENVeghCjbUaGM5jeVxtj5FuCNmfbQbFYEIN6bfUJ8RmR3hXQcaAXFUXgQK1925FfR1V9m7nUFA71ka14Vr79i60fdhDEXKjTvHDJG5Wm2hvSgDidOKtBBk+CjDHffoo9024OnG3Ru5CBEFY22bECIyd/MTiHT4tADQ7C3bGipD3/eQXfiuZyZPQ2/axk0OobEJwLUJHSlEJqh3SGeLFgEdeG99UIJW9BW/bJCqYeCEDDc/NgeJqLnezgC34kxnIhnYwY/glbF+uTvvjIm/8+XzTN4P82hxjdMAvOdPhmCkkTkM+VoRfB66wotbha9OIEqwiLsrevNliuSyd0nrzGSEqJrofmYvqD0NiQzO5LYxg7cDfq2zYT9cn/D1MNFNyeDmUA7IGYW9G18tWq+lVcRmQEAM2jFfoi5AQtnBDkQm9FLp2rEu643ZtOMnmA7IlzOxKwzPFjsyWIjYQOJoYRYrpTgU5ilYEpzY2aCJq7u7OYE4jHAIjMyMnYnZzKq6K1JKnOecZhYhQ1G4kbs4GEzlWNxJ4qZrDdqCJiiaHGjTEZHQV+bFzlf4dhrbF7ooovNnOnx+BwxU2d1RyKujmlVuMKugsGhQcw75BCOAw5pKBGOwu2q/FDwgfu3wBnw6xXwc7gNE0xbfBg0x/gWQe6pmfNqsOG4af2O9oOuWIOZsGgQttr4OG2MrPAfHCvUQW/OVungf4wm11Oj9Il23GZWdQCJ3w3YdBcbtHAYzb+2DgdaBYbAXoidAosU8ecsMVK0iUurCzPOcrbODAnBXIk8pOWjkkUbLpiC7iy52qno8HsekeA+UqurhcGjrwiwMbuYIrVcz3e/3RHR19YKUaq2jLXIzKKPg0sDw6GK/LtVqcSbmZFqJKCVYoturtVGfs80pH4/HP/rH/ks/9VM/8e1f+sVP/czvOS43u8Nu2lNx5CmNKJYIOZASGzBxAgJ8DQD0quwC6DyktP3CAIptAWNjW7RKVxptlU4C7+rmSJIfPHpC+wuYU57ArOvKKcHJociZtCXHUJVlnt/4+CfeePMTP/nTEfbFcVluXn7329/5ta999Zd/8Ze+8Y1v/Ud/7a//u/+nv5hSundx+Sf/xL/08z//81/4yR9/9LGPvby+Xp7fvvnxt7773e/cv38/eBfCA4lAaq316bNbdVxeHp48eSPntKy3nPJaCzmeP38O4ObmiJ7um+f9zc21aRVhIkCtnsgJ2WI3Eq3rSpwyk03TqjWzpJQUZ0SCI5oymlKEHRmBBtUIKbGTUtffjT9ZVQQpTb1Jd8uKNOr84xIAehEppbihFSJLS5iEoRyhl6rVkOZ5TkmSZOqZ8xEQUtWgZWJGEmYgko8gSimx8LrWRozkYY313hUbW3P4Sz56XcSF4OSW4LXW6kagakbuiaS6OZ16ZzNzALSnaaJkzFwNa1V1AnGGL4UBqgFmkNZ74ISdcxB7UIu4u2lZ1Gpd53lmZyJatVAVETkuhbJN01Tdyo2aGXFS1aWWAx2meWaR29tbNZ2mOWRC4wcOadPzwDTIeAKJEAiFgLITqyNCTkSUBo+oOwgikpOY1VIiA3YKuVln/e2L5zTJ8c0Barvjl6L7eDGksfamabq9vZ0mJnKz0FUurWGm5iy1ru5a1+Vw2KWUnj9/rmpTymZVWHb7SUSmlB8/ePj4ycMPP/zw7bff3u12MDsej9M0uVrRtWq5vHfx8uXLcOeYQI5WEp041BID05RyzpENK6Uw8ypStIrkUC3Rlcjda61Eh2maSimqZV3Xy4uDCEXrTiLKOVXV+5f33Z2JSikXh8uc9+Qw8ymltiomNrN1XW9vb90sp3Q8Hl0NANTiXCnn3DpxTGTzPOfLy8t59lKKiFxeHl68uCoKAKUsnHcgyjl3MhsxN1VdlxptJ3KaAwUaAwi1NU3Terw184E6XtfV++MLXXN9/XJdPaU0zzNHto7MvJpXJiZKzKy1bFlVh1s1aFdHJi2OESc6qV4zd8/dNbWNJ7kuS6yrVnjsrUKPRdZlSSlxSo3e3WwgCtCd0qYMzrHFLRnurgYzm0TgTjDm5lv3vb8Zp3GTg0ycUiZya/1viEDCSKkeG0swUUcYbxrWoydvT24VTklOqHk0VmUa4KsRRtnm+tuWD5qHsBPklKd1d3jrshXBo608jBMTegl3eCw93xhtBZkICCLREdmJRE2QTlCvHUAkLg3oXpwC0R1Eoj0QqMcpWnLTw7RyjDQWA8wk7sEmVjt3Ept7wPN869We5+I2UxrjG1C0nkOLb2696e3xEc0qxhWJzn81PHJ/zfc3+ToOg7uZHkTkZO7sCi+gxbSsyw35Ksl9VU0qkuAO5pYF5pE/Uri5gUhAEvNNRAQDyFC7zdtCEA44FM6AwkF0luKmj3Sefzg+4UedImIpIxSFZli7oDrUQbXqLu3qgjwBtrDfqlpiA8+wfagQ54w06bXVo2YRTtn9ytU4rTknocXKFest5kdIn9il3bF8kKa3jczIIc5iUVNOXthdizvgxG5sBldxZ8YUsQtmZmppebgnMrBorbe3ixumaQL7ul45ZxYhSTwJceOgYYA5vXz2MsuEUkE8pXk316cffvhgEqiCCHJas6+btNe8tzV13VtZRHcqEeM0FAAgIzeDAmqu3vodViEHKUzB1hImASx1DbqSVmDgSiymxWo1s+YEmhWzKfUquZPD5nCISK3V1cL28yga2gCFNi4fcyff6uJohB2ZNx0ghr6Ik3Si/nZ4y4QV3hxoqQUMSw/BS3e6xCmXOMYzFmSYKEXrsizc2yhsBklDGtRaRcJSKlFfI0Lu3tzZsCG8e5NNn43Cmy6NhNkhcFfQEOWhOLpQC3r11iFNRNhpWJbonHvD5xyzhm7ZB9Oge1jAJ+6+4JXZ7Xb7/TzP+eZ4S+TzPMPO4H9mCGcXpomJdhmAWzyGqmY3RUVESFP4x1WVAbKLi4s/9Ef+yN/+//79y0cfe/jmp9zgAEvEETbxkI84PvqT39jxEaEpDEqB4/H47NmzT33m08vV9Xz/YuKDG2SePHQvEsxdArURZ2Q4XAWmJAwC9oc5yed++tHnfvZ3/cH/ouG4vHz+/Mtf/ur//a/+x3/zb/7N//W/9Rf+h//6n/unf+/vW4/rzbJQnp5fvfz4xz9+e3sbq01IzBD1xkS0Luvzl9fq/taPfPJHv/j5eZ5ub2/feee719fXx+Ox86naWpfjcbktrUisGxyNHdGIlmWxCk8aaLcAfJOtJy+r4/QsqFkACfbMYBZpsQNWK51yBRSujJupt7aUMDQCirZj++akwT3BHYJOYBaK0AsRpc4nyY2SVKLduW+4Rtyd3azXqrV3OvDa3c0UwtxzR5bMXaJ73jbv4edYshhAwAe5QaObfWlmkTBs2XqHkpNHr20ia40cyZPXyozLw0XOk8Jvj+vtUqq5qzGzelS3EaG1QOxWmJtXs2iRxMzMLiCzoiE0TnRYRMwobhIFvZQGBSsRLcuSciainDOZRjrN/SPjiCPSNMTf9h0iitYF0WUH7iwtkDak5JR4N02J4e5GwbnaSJNDzjh6wgGQ/oyW2qGz/ZubhGFkn3i0KkmJ13VFr/xsj5vYO/9tw15KmuY5MA6laFT95ZynJJeXl2987PGTJ0+Y+enTp0QeRVDmldj3eTf0h1odk0nky+0xXe6ps5jMeXr85OG9i8vDbr/f73POeZqoEFiKrmY2zzN15phaK4V9whz2djhL8zxzL4F28t1uN8+zllpLubh4MOW5VnVpa5LOabu3Jnt4klFmPKesZV3qNUMf3nv4sY+9Ucp1KSXIb9ZSqrkqipOojjzP9pyx6r2RNvtwV4YinKYpJY1z5pxzzseyjC0ZXWdS8pQ4YLZD+44Ba+9J045edN72eB/M1qwfEc9x43TuSY69PAbcLodTv0QQsQBk8AoNtE8E9Vvaf5wB3YIn3jQzJII7uxHxaLQFAGSESCzfcQi8J7LawAOWEPYCzFgtsTRYKTdTcozhdJ6zl4yeAzzdsgEiAf8ebzY0mLcAsW3d3SbMzwic0GyjKCnsVX+BLA6IBZxCelCjzOmXieEZjbPBe4N4JoCcjBq5jndvi1ryvC/hXsfvxGhWJwMQ8k1B4FgzAeih3m47MvkKQj1f0nGxDWlqfxrD/TutsY5iaImULcUAn//2I4+zh9/O/3o4H9OrZyIiUixEFLqFieAKqiAzXVwX9xq+iJtAACek0iC8kWVsCfFYJ0Tw2FxwgKLsvqIlb6Xlb51gEiwt0f7xo+/vh5wY/KjjPI09WvhEsKMYVJBymgCkDKC637pdU1SFcAFlgJwyMIMfyeSwmclBtTkThUTIbSn2POG+2AXkEvYmlUufXhAZkYMrYI5V4S1v28oexCEBjDR39pnUzKozIyUIwRxmoKCycRFxdkmchKujqBQHsRE1/joYoRq5NTC1MWrVtay3a13VrMIqkn2/x/ID5tAAdG9w5Du8f6lVqIIcruQFpOTqXhjqZFAFGbQDQGK/ROosZKB5z2iTE5Gbx8LqdINNnfdaPmaW8/o66/LQz1k94/3IaWGzubZ/3oGGxhciSottJUJ3fyIRONyuIRKHwTMcP/fxsl2UOiXp+PI4yZjrUhf4ME66TYeWgWROkemMMi6R3OLl4S9uBxr/5jCCe/ZZRMCna0ewjImE2KJReIe7WG+SJoN5lltjqHAJRuZwTHd8v5QCjPDomV+eUpqmRJTd3dDyxeYayI3WzS26hKH5nFNKKU1mtizLspgCrpUTiImF1IrWwjIT0V/5K//RJz79mf/yn/qvfOlXv/p73vx4WXw6zOYbxEpHvPj3FUI/LPn0uhQ8ADGzUsrl5eV8OAAoWiUndwxQYojtwRoe3qQ5gpunJcTnfYRTQMnE7r31id/7I5/6vT/3z7399tvvfedLX/3yV/7iX/y/fOXLX7rczYd5urm+/eDZh2FQ5pwhbGaiyVrj4926rm9/752X11df/+Y39vtZEt/c3Oz3+6DTMLNatHTIQRJGT7iPqWVmU1RTrp5z4xftzx0tnhSqhCgy7S2mIJFaczU1rW5KNCNq+SDMbMFCsdlJDoVH1FzNagRfbIPJJCJ2MnXIyekiIiJhCMhiKfYUllKrMmq+jbvH7xrDOgwwiR5i3tx2GeIAVBvZxMk2wia/MY7xJyLWzsRJOElVNUQIhKJ4JcShmbkZeUQ2MM5vx5u4tNa1rstqbt4Yy8jMmdxBICbvXH9xOiUiEsqJZJoSMfaIp5ASB0NhzLBWV3FmJmFScvcwiUopIJqmaZ5nqiXwAt9n/fvmQA+YtWmx4d6DiOABFSaYV0SjJRv+Pc7C8+aukStgHh140B5uOBylbmXOCGONDEe8MzorqFVvWOcAnwSqx80qkYtQFItm4SmJVV7XNcs0zzmxTNP04N69R48ePXpw/+bqJblpbZ6JlUgdC3cr82QiM6RB4kGIbsIKIEAcHs2miBKziXDKZrZsGt+jxwmHugoTZ57n3X4K+RArXF2HIhDiJOLqMnj/AaFWGRtSvXksHdXJzJNEd0FquduUmE80Zu5uXgEhYWn95c4Ss6f9yHxSOv3foYCnJEQW0kZEUgovLpCbtStID8UUqzkc71AWvT1k94StzUDjznH3c2qW7W5C12sxmPF6q8jHn1GZNmyF+Df1DNKINBOEwFAbpdHjotgmdrpPGA8T1hVg+9QQPl3P/LQ5bz3WAesU7Wwj9eHuKebBHbWHYt3cfeJGt0Xnvh+LRF4nLuHU32cmBztGIC/eFwmB4kxNVLVWFnSCKXZ+qJMOtGA73JDldmF4N0y7tcPackQAF3ttGhGiu1CAbyHbmyJHOIHm2osGNZyxXhYeUovaJAeI0UPuRqiOgArvDTA3zttmARu1Rg1jJkd3kKl/XR1DxG2fLW+zix8Vix6rZvM92l5xfNRRjtQu2kPJffxHN+bheEf/AxQhE3M3Bak35qEw/kp7RkHKLdxsIknNdCIjixRxARFa5jZqiQXOTgxyptRQvh5Q1XGbdn7Xd0rXfvj+4XleK+IHtgEDr1rXnC5tsPKQu95avSKqAIHvgWa0OmIBnoAKUjUssKXWI5yJ55Tm6qr6XLET2wEH+MNMjy19AzCGAaujhPNsMJK224iYCDaCnpVV3Uw8NqBGkgZEEU5te9iJnMFOsNmdzElYeJ5BGa6wClp1rZ4SqqLYerMeb27D1YRFG4yPmjDuyF7Ga1bnFhGKvpJH8EvBFS0DX0AGRHrQyKuTkdfW1AvKgAentyM4NBvXaOxUL3BnkG/klYgMw2DA/tuwThX1vQOTnaT0VoL5JqGF81jw+MJdMXUemtnqizGGceahNYaKOVMBXSHShoBte8Vx/v6d0xkGlXokLCIon9IUri7YmTmdJuJch0WBqbvD3Jqwi148LMGtiSB5aFjbHkGn5pR15cHEw/Lb3ga6UeUbn5jOD2wsobDLg68vUpxqhdi1GCVzMuHc58LcufjKLDmlec7uTjAhdnchF5F1OQpcK6pZBlhEq/5bf+Hf/uyP/cSP/fTPfvXLv/zFH/8ZAGVZ59xMcOo4mVeP17/76z3uirBXzzZkNMz3c/5Df/gPpsQQgqm7MrfCRe8/d3TiESIDiMDpHI9vvqzLNE3E4P0OgK8OSZ/47Gc/8aMf/52/7+f++L/yX/vLf/Hf/x/89/973/nut3/mJ37iww/eV1XACCSeAYRfRER5vnjw8KGqSiIHXl5fpZRKXZeyltbHwczMQXmep3nW21tuvFWRZrQoywqz4xQHdYW3kDHQou/sCN8ubF8iEj9tpDtRmdDWkQYBTKuPWOOZoVl1UPoyC7O0ndVlp3ujI47QknBTobQtFzwVycf5PZBaZI1xEd1b4FYQadv7GqGg4fbgzI3B9k0zG4DJCKD4SSEj8oROvBQlNUBDqsTTD25s02LwUlazyoFrJ/a1WCdCHBcCAHaKhnNCDGNO85x3OTFzWdfjsoiIu8VoRKRoZZVE0e2cV62kZ7iDyOEM8/ej1ModATpeC8iapDK0nRn2ZNilreaH2O+cLTK9zA2YFP0J0cWOB9zNqCdd78ji1omBOioDMNWiWkQaF8uIghERemcC74B86zWE5Cc/UxLNc56SmNnxeGzJKDdiFmH2gFYSQBJlqwQiD4TfbjfHaQN3WnV98eLF8XijqsGHFMHgnFrfwg639kA+m9cYj7LWKuZVRjd2bvnt6/VmXdd4XroWpTSnibhFZ+I7E1BSCW/wpISiJrzUmiohMbNwTgLnhoiWHF0oBWEaJ4IJCWnZ9ktsHAd+fmxDAwBMTQnsJzrv+GjEcZq8c6hqKYsqmxdH7TxqvUWenpjf4gyq3kdCtoG0jJ376oolopFsHDuozQkG9wrGVcg1rORhgjfFx84R5eSNKdBfoSv5k9XfGar4TqETkZ2HXQgAtEtWR/Mc4bJJHd2xbHrEfYyE+gtzJz37aDCCeM+gNkAqb6xD6t2Yepp0nL/peuo9hYisn2crD89GQqc58ejqMhThZrbDnuj5wzDrOW6/3c/wspzZDcCGXsEw7JxNYQeARpVJBqTWgMEZaDgTgjBtaOVbRDMMCcVw0ro7DfRykdPzCk8MGwVzbjB8BPH/9t7Pvr5hGW22WrtQMIp5WNltbESMAmJ4ARgGwOAFpmwFrm6VGFAjlwYgtjXmizm1FCCJD0oWSWgBivHf2q4GBmVQ4vZojAAPB7IHxXByaLfPWNHafvzwj77UG2wkZo9b8wy4e9VlPa67y0szI4cZhCuze701LGaJ6QbpPkgcbEioD9WvwS8dt/AjqIhPjB3plFyNFscz1YPUe+BLzgQ8cqhiATKjtBBK6wzF5CYhltxJlcl1aRQEZl7UOAirKWB6MLeAUpFwgLGT3yNyJ5AIaELYEaRwWm9LzoybFWutx0WPZeKEliG3zXy/Nld4gkhs3rS+y5zaGXpZR8d6esvkG6E6DChBZsusDS3s1hYFglY9AY4QU6EFzM17bRGMtpwDgNXa9FTXzrXWojaAYDRQSAARVW22lp/jM4e34r2H3FYvjApA9EuPfunDGwoFUWvZfvO08LprSt11jEtsz/ya5Rpf7o4fM4PE3SMF0lz0lrGP6EAoU4sEZsqcal0DPyqJSrUwFEDW4ZccQsOIoBLdydkhRIk5EZcg4VG3qilvqFStK9weMhyD9o2HPWyIYYQJTsWUI2Ac/y+lEclIknme1djdy6qnrzt1tlTrCZwOMBPBBBHZ5+yuCcbktfISRY+Eh/fu3676r/+5/9H/5n/77+xn+frXfuVzP/GTsziQTyJsUwhhm1V/N0r5wz5o0+j8U5/++J/+k390rQYY3KZdrnV1SUx3Hcs2zvPuudY6VtG835mZmQoLAJNGZFWujvni4ubp0z/yJ/7UT/7kT/43/+y/8q23v/Oxx4/W5RadiNKd1I3ZmfKLl88ieXiZDxcXF8yssKsrB5CmyR1rLbUYEe33+8Ph8haRKM/xgFqbAXA8pbEYRgLZN0W0ARxFZDAoNbT3Bmw53MJ+s+i0Cy2VQWTghuKz6FpewSxC4rSxCAP06S1a0cjrmaMBr5lpS3zE6o0plZC87ZGdQtemVlQ51nzKcCMzS9R8j9gI2zwDXhENW2NXVR025MJJToVlYNTI+glwDzy1iMDTNE0C9wR3XdZlWZZqzikHtirnhBCFMKKoM27cPMKUWYRgZtVXJKIpD0c8/B7KHq6Rr66q0ttIkvUUXG+8SUQkA+xqr0as40UsDw0Sjs54DMAFVLsX7eTUaTndAWThnIUomVcigrlwOwE21KYtvk+8sTTg5uaW8i7W5FbEh/NjLVHs4au0mXewv85a7WcYqihOlbOIEJMz0y5PsXdqrWtZWE5El9M05Zwjrx6ryF0pwnDMxE7h9rlP0yQidS3repymyywpS+u30e1wWK15nkVo0IcMv6tWC/RmrXXU6IpIdJIc9+g9eCmn3uUtTrGlvcksay+cC0Tx5cXBrEbn2sgTVm2PvppWVSNSNMRTbw8Zblhzos0sKgheFW5EZG6lFMmty8hxaQW36BjyfiAARLWuta6qxb30yDS7w7QQTZsNSFGvyN1uhTVQYnMfBsoIGC6Tu9dSRqAzJkpEkE51E0ytOotArqHyu9Uykt5m5pEboV7r2gdmBcAWQEjNDghUWFssbTd12XjSyxRejkeseUiVcNfce+QTLemPYQMRg3r3KWosNylYY/qz2D6ju/shGEciIcsnO2mMijqjOnqCcbzupzhhlzbvN9rAccJWN9VuE0EZRH2bE7ilFby7ZFFw6Ce/tLPRgGOsLQ8ZWn5riN3xDCu17lzB8RiPrHbO14i5ayD3thPjPdoe80+nRu04d3KGxXF+Xfp+9Pdj7janKptI29k3CerwIZDDcGTW5vRar331Citai9aVtMS4lYTd4O5pMLpVqMNTo5yPqm13EPnohElkdIx7IWQKn717y+M2w/c6m/vtqNvkvL704D/NQS3UEoc51N0Y4Z9yOCRXL1+uS31w+SRxsop2B0JWF/Mj9GqmI7hCMpAcqn7PGaAbcjVfWEwoE82oTCAhVRy1fggcZNqBHzseO4rh6L4YLQwhTIIjYzW6ASrIQM5sImTm5qfSZS1VVQUimVUdIOaUJXGilDNSAlG2A5g3ybyIrWR4KUstyri6XZdlvT7qsqb9nsjBtAEqWHhjhHw+c3ilwhNn3mCnEo3uoN76B0JRgYZpAyqgDmUECsabT2jRbaJZPEAkBqNSzdyVAfOgRmchciB0marW0mv2usdlHR2KczkZVQ+qm6DhBjcUwNGtI9fkTy9naDKtH8OaPbmmPWqMc7EWl44neDIO/eSLDnN3XBG9TUWcijdwFbSQ0+n7IRV6UDXeb7rM9JXG9OiSPeechhnNELQGl64BNWnPmT2YtftC6F3CtzpsHFvbaJCboxt/1i1E7tH9O7ZUi6AJmNmgWjSQrpu5NrOmhnOWWqPvRxUQVMmcGZndnSzLlLO7hpGkhqvbm3sPHr73wYf/yz//v/hv/3f+ux88f/HO17/y1o9+3izHAh+RuG08ZAjpj0p0/CaOIeR8ZJMJ0VyJgb/6l//y3/t7f++P/Yk/jnK8XZeJ9ilFhvAuXmKEIvuODQypMjfVFzT35kbEkuGGWpEO90A4PHn89O1vf+Znfua/8a/+t/7N/9n/5MX11S6dQZvcgy3T7l9eGPzq6qqUZS3L4XDI8xyMLMx81kdL2JkkT+4hUwRhZjRhQePfeO7krSkHbdtI9OUq3BYPe6PC64F5jcYV7u6u8PBM0t2+MZ1ocTiB5BGQl0ZM3KRC5DG4BfKJ2D1AcaO3W8wJzEd37L5uqRlYaKJQRNwCvntixxk3BaAlq+6sh82290aNRyQC5rrxfikCfY1xlBtKrlcLsYgICfimXKvq7XEtRUlSSgkkppYSGxzqTMLMNR6HWmuHGGAArdXqEU7waZqIIMIakwEwuXllnmNCZMqMJKYtx+InY19yYgEL/LX8ZAC6GbpJ8rRlMErpBnUYdYoRh40AABywyM6dspHU85/9nA2HNq41Lk3bdUinECD1wHB8M6Wk5VjKKqbaWqHGD43CKmRI4pSEhVKW2aeyNpEVbTzneZ6mKWUZ17VNPhlkI8E4lAp1TRakMkS0ruu6JgCH3S78urYg6YR7DA9zuGpVS19URuzsiCaQ7g6YQW9vaxQBBk3LfndvStlUqU+LmXGfSW8FfmwdeisgVS2lHG/XPImIaPUXL67efc9rvbo45MdP7jc92he5NSbhk2oM7RUu06uR0fhTOLutRDRNszQ6VRWR47HEPDQKNVhfP4v5/4+9f421bUvOw7CvqsaYc+29zzn31bdfpJpNUs2WFJK2FT9gRTJCOfoTgDKcwIItSAYMyPoTIBEg+EcQCDFkREGAOBFgw4aNBHAoy4EiiYJIyaJlkU1b0IuSLBsURUoi+0Wxu2/f9zln77XWHKOq8qPGGHOsdc5tkpH0J/Lk5em9155rzvGsUY+vvtqCUaa7PELsdLNkaoW/DIM27VYfu3iMw6iaMz4P7SNy4/uq25/TvQxBjuROTdDVWjtvyq5hgCQiME0D7qPR4MpQUM+1aIal827eEY3IW5hGwuFh8o6ZNLNGP0W9vOcY8wDej0rxQ5WgfRPGD212ImzRb5ujjvHowHrgSrz3lwZ1ECIBAiAI6PKMUCWS3U5otopDOpNqU22vUKUWhi/BrcUXw8RVdOWJWiUtIiKGIdIxImB7vRbaTLYRc++Jns137O5Mg8QCRAIfBdzb1NEIdQKAkxfsAoq7aRREO3wBWI0vXFINvfSa9Xf3ct2LOJsafY4T9YBYa0bpbySA4BVa4dX07LWQGYm4kjCgBqcWMm7OfAQ1j0edDxNiN0I/+qPcozdq1siAxxp97KLBW2I7DRDO7Ir/x3vt1iBZ06QsIkf7Fr5/er9tGz7ZYrkNpF03coNursVlI1MIw81JDBmeHSkELbmZV9EEElBKSA5Ue1B/e03E6QH4NkeFHoET4UT8QHRkLMCJW3y1RLyO2YRQV0tMYGIjAqsqS99QwsEfzhyYMQERbAUzwN5MLwUYkrygbpuaHJ89Pz2ctvvj+XRa8wGwFiHct8OLEcIXTMFpUEPfi4Nm8O5zNyqiimoczeTqUKLqpuZRTsNa7nqXchy5k2ZWm+swtK7gQQofTmCXVNVVJcUMNQoMEUopC2joh6HBUai3Hakxhxk+6pqOABr/+hRR2HXF6SvBrjde4cPJeGUlds/+Vhsk23tA4irwOJ4cTwuOgB6BHbiQ/dyJhyzLEt9Kw5c5uEYBuBqlNGrTc6fUH4NiMQHSEtyJkGXA95wcgSkdR+Po2NDCo15WXxl7hMTKfsoyEzMjGXxYji2sdC7nbdtqrYf19kpR4B2Ya2TEDpZQps1MI82RW3qPcBKRhQhvvvYpdSPJX/jCj3/2s5/5Hf/G7/z5X/iF199/kl79zMTB3Giy5yOG/uFE1Eu/+1FLzwnvv/v2l778C9/5nd+BRW74oKZAKtsxLzcvxdATKnpGZZi10YPT+f6w3kQsqJVeYCTGtpkYw+rrn/q2h3e/+bM/9/fWm7tEBt1I9sxzdw14oVpZluXR3a2I5CUROcOCxI8gYY/lBSSZmVWL1kb/yBnM7CANO59TiwwAAIQQ7HfBCuhQkLMgoFdmRqDBCtiLYWoU0xNBStnda63eS73Fho8NCkAbpq0CKXCh41Se9i0nSioX8DOfyEIADC+OmaXdKA0ttj0yPFXqVE3J9wU/NrP72Dgtn3CfcfcZX2qNEoNSSu7e8KKNUAHeMwYD9mqgVgq1KduOoBhkYbk5gFJe03JQ99O5nLaeA8ZgoewsBBfWoOd3SQwXMVit9Xg0AMuyhMFQXAMRETOiqs602s5B3weEos2KuvIqrRTEyxf7sMHGF8eAoFFCdHnW41fkZtQMpzi5ffKf9VFqlRiwO/Z0SraGXlYKumoMsMtuhIWv/cAww+Q0GVw74flq0llEeTc7I3DFQuu6Qs1KjbA3kVfdwoQQIu8k1GMEVHW5vTmsa4Bviej29vb20BhousvGWfaFGkmb0ewGAwFEJDMOeQnDsotN1FoT5xioRC1xgJlFmKjvI/fhi2mD2fGe8+p9/vz+0eObdBAlKqUcj0f3Td0dY/4IDQquQCN5a+alXzzq6t++GYU4pdSMqzE1c5jXprJGQOid5q4OHQJz9EJVY3IILAKCzothPozHcTa+ThTMBS1heE/L70w27upTPJlojwm3T7xFG7nH/MncOb7CBG+llhkUYMexSq1xWV3sovEJTSGiFuWzAOWNEGJcQ6gCmIOTVkZuWN92sysZPYYzLMBp+4xBc3fuC1k6eDFM3SYVQa0mfXfQEBIRgTwCSlPPRlbbxRWblOBonJ9GRCMhE8BkfFnUx2vj0cMajRe16f4AAN3LY1xGQvqf+6D2ARnddWpFwMP8rdS01JEMeREKI2qQzh6I7VQ6zXjuy/UjmEVfGId9sPbPcX7xXh4cBC123DQddxA2hC3qcAVp1AGvXtUtUrbBRIGvdXOn2lITJESNg8mg7kRi7gmAsxGRurCzUSGSCB3CFb45FL60mJVHeRYgTv7mH7ho/Gwe/6O8LqxBdBNEAQLSCHAdlnU7R8YytTilEUpl8lYdQY0aVCk73CXBE3RxX5izpE3UgApagAxwInaY0zOHAc8cnyXLhBvGxjgybkifgQSc4KO0Smm+bKaUw3J3AJKJo3yUQlWDeSkxQRLArcYKpfiCIZQHYyOonx6O5+N2WOSsOB1PWqudVUs1M7Ry8D4pvy8dwMtfLxJfzQJs7M0axOCV6dagQ5tbN/ywqk7Wso4nBxOaiz1kkTO6Z8rc3SgIUUOcmrs0wjzvSVgjjMaXmXhDCexx8t1lPG6b/52kYPtw5Izg0gJCF/tDJZgHZTRgeMT664aE3s1OTEMxSDeAYIBozYgSi/3rITfaEd+Vz3CV5hiyNBQm7aw7RGSOUop7U3SEuEEuzA/LUruG10X2iGaa+WTm0ijKgHmw4ucRFRy6WjuQthaKHSYgERG41m3MSosziKTEdGl2SgK8GY0h2ppak9Ks53mrCWaqumTOS373/ffWdb25ffTxj3/8T/yJP/H666//wL/8W7/61S9+x6ufQZx0aKeCdSfgWPYj3IB/OLPw6usOilgadtQnfeUrX/oNn/+e21dfLU+f5cd3Iou5rstqzRrcbcKpgaO4ihEaZf9hPfS9Nyj+YeZ5SQScjxCzH/mRH/nLf+2vEtG2bYvsweixMSzmT4pqTYnXvCyHLCJ8GvxILYsGRKWUbduEuLq5e1Lqru2+MSIM0umLYroH6YLvFLgaCjeGtTBhkCI+MHbgUApFRCS8/zsYKbaTT2l+behIhgbZ9PbpYuYFvE315Wb1aH7OuF9SUncquxKZkwSzztAa3QPjPyW29RhCe6Bco6y3bZPBit7TUbxbrXbpjonFH+z8xMmdXJKT1PO5E1kW68iEYalCS5OMIilnY7hWM3v2wYePHz/mnACwUESfhNjNi6lUDBvDIqXdLPobnKVtWESKVnz05ZNBOI8G92oT409Es87b2h+uLzMbXJFBG7uuq/a6ICP41hZMFF5vi72VYA1Po3mvPx3mnxkDgflss9+U/raeh1xy94i/oZuOY61u5WxmiTjiaVl4XTMRRUptznlZFiearUpVhXmQggrIXR89evSJT3zi7mY9Ho8RdVyXJaVEvVKCqt7f35/PZ5/K6EGYHCmlZVlyDuT4bhA+fuXJ+bx5Cy2uYU8+fnxXTWUioR1WXQxy3DbbhACfTwVe1pVfe+21T3/6tW378O7uLrZl1HCvpubhOvbZIGyEINNKGJNlk90uLMxQ1W3bBslbLIDwU8wAp34EDKJOawpUVHAwCkIa4SRRF/aywN2llCDMB3U4FLQ5vOY1DPMhY4e0aY1PaXi758emZYHt0OTh0PUW66ZYb00xirN4rnE/goGuDUrq8X/76UxE3gKwsXtIiHzikOdeQHWscG+RrmtjL37CdFvbCBNe6Oor7h7H1Xz6AyCm+R7tVUZkNgV6DAAvXPNI7h5qctN5HnsfyQaGotnZCBLHZoGPBw7mafmIA75LgBgdbkwXbrQHKsNUbnUudvDtLrUMwKUThGLFdnOzn+9TumNnT32hPdixFRenEenLboeqXeRdWztH4McwHsiZDKZGZuxWytlKdYAc7OzO7GxqzgH2C+wBOTk5O9xJ3IxRQRJKKjPMW4zIXRliUPcIRW8LZ2vt4H23Tu37xxok3Cuez9VB9uHZ3/7mm58wfRsQGu4DZhAl4krkUfCg+2cBTsxAcl6ED+IFFEXXBSaoDKxEtLAW3oifAvdBcCsEpoUoIWCVWkAOO0M3eAULvDY0cqowbZlCUdzMEMXuiCgYryEJHJRThLAEmpFnDoiq1PrwcDydTreUxKhuJVg93V2tuCqlMAI6vPwF8PH11Y3qi0Fu+ILrdP2LbzWvQhNo9OKzG7ys968jIGwrLcBwIZ9paKQNL6dqZmQ+G1TohpaZDW/x7LvHJXZpagBdVcqlzkzRRSj1JsWq3gOGszcTRLvAb0pm05fCkJmveEJ83hpPu9VaygaAKXUZv4vibvq5u8XXa62JeDUz8+SWajlridwthoJERBIzSq3HWhdf7pakKJyxeFItoCQi27YtWFJigL1Wt5qQa61qRolNvEU+mIiEOTFDIFbATO6ABlImTm7URZXcJZGQlQJUd91OJ2YwLZk5peyeiKjBgn2DJ7TIkBASDGamJ2dP6ZDdcTqdqpuTlVq26jlLVMQ5pIXFRZxQD8nX5F/78t//ns/9ukeJ/sR//ke+69Of+s7v/O7/7m/+xf/pP/vPg9lqTSkbqFpNkmIbdJmkEy5l9iBOwv16i7Q/SfjM6PoP3L26qupqKWeYvf/OO1/6+lvf933fBxEjBmc4mGTbal6WKxHZPZAN8so9Whi2oZUz5zWWxXnTtCQnbEq3dLLzcV2XL/70T//wH/uhlevh8frhO8+IJXR9lpzWQy3VwMvh5pZvI4BWaj0V5rwWg9EtkYYGzMxWtdYt53xzc1PrBgWBEvGaE7GXYrUq3CRBVU/nuiyL0Y2DIIuY11rNjZIwscGVmPMNL5liP1dlIiKBqFuRrETi7gIiYSMjr34+Wi26FRhxEmJhUErp5ubm4eHkpBocGxaCPtDbyb0lU+lWRAjqKafm9XPzAjM4sYE20yWJOlU3hSsc7uJEVDktxTvHMRzmTL6kZOeifHZVcVNXrRXh5oEzs1btUSZx9IBJCBpnOFsEezybb+28IYczeHHzzez+eL7JArAIJQF0y1ytlMyyCpN5sUpqJFzPJ9w/T6YrZ8rJXLdzqaoIGmIXUxX4IvL47iYLay26nW21++Nzf/Dl9nB7dytUj+fz4Ua4WFIztdNmacnpkKyilJLX1cIPzLnW+vC8LBnLsiw0dP3hPBMiWigVKxmSsxARg8UoMTuvhkiT5IZEEGIjBjXcIoQJaiH96eF4JKJlbVjKYKqupkw00JXurTiHak1pic9DyLKEOQqHMzW2w3XNwbHkrhGqRDBZNcpKC4W41no4HNa8lHIWwprT8XhcBEuiTI66WXlwvfNyKpvc3uR14SVJAqnikA9Vajm3/Fty11Ie3z26rwVJJJGp5pwWSTc3qzo+fH7Mhxteb29vD7XWdXUmMq0pLwv48XpjTjhrNibOC2VhPpWjulfg+FBE8uPHBzfetrIsN8wp5/zw/AGoxKVs5fYR396Qbk8zi1DiZXGClUosmRMZiNiZ8+GgzkapUIRhSevTUuywPtayvf7665/4xMefP5NFDoAkXtnTqVYFmRulnLMkWgWbIKmSa4txiUhKGXB2y2AhSWAHEeg5aOVUfFOthJrEmbyUM2hB5g+PR2Q5lRML1+29xzdZQHauGZKIM4twVtV0OJSiplbNiQN4XTZzZqba3Q0Dtwm4e8uG7efr8EoEE+ClFWTugzib3Y05iUiEjcxDqW4qCBFxz/Izq+FZ4NA/4CSp1sCOkjus1qGLyOEQZQy9YTub8Qs+BNtSnAJN7WIyVeI4wrhspW415yxLHlUxosct/L57sttRHvkGVqt3YDx1YzVqAml4oEHu6Ghq5iQoBiIwRw8D2h4UZRAGEdS01nD4SkpIZu6AThZdr+XYjjseqhcRjLaYKiByJNpNMpn0+xHpTpwjKRxhxLQvNjO8DRi1/rYTtUVKIxBh7fHx7BZejP8R3lmfO5rUK2afwpyW2JbQBaKs39OM2+vPAdv9v3P3qIF5WtDFx78uzdTxthzh7hS98eaxaP9FdhgZrI4gDNyhBnOULRnIwfAsgG+1nsixeIZIlO2zczUzJTDzcnOAGkzBipQAghuKwbJ7EPNUFoBJ/Awozk+ZEHgVUAZWR6JGlke9RkVCUNy5gBjDEm39Nof7uci6AKyAwbdSmXllcdMW24RFeA/Nzgt9qU9oT00sWkGZJBORDaAxE9GjT37bG7EMZalqtdRjxRvvHSk27w3fPRJZqaSw2BRW17K9ofbEzFTLtp2rbsuyRJ6zeZxKh9gyd09KzpkJtYCIU3oCfuT6KlFFPiI9tfJhrR8Knxybo5B5Wgj1BKugDGQ6p6S31et6INwCeYOf4Kv7HeEA/wb8DeJfs2Ctx7esftnKL+rzD5bjw6u25ecnSibk90ZHuz3QJ26VabtHekbpIHZXLZE8cnLAFC4tqSKS8zei6m08Yw/A3cwN1CP2ZIBry89UkLFGQKhVmnF34gXkEAcMWsxqLE1yA8F8i9WPVs1kcOT6ENcUkfZwnVvfCkSxRcPFQw6CmZla40U2NzeXlPZN11is3N3RM5C6nA/XlhGyqQEcvshaa6lnMzscDmGw5JyIGkI15yQ9hBbKDPppcljWsdPDWRzroUQNBTd3EDGxJwpntaJnLIYzKFyrZgHjUUQaESQqlCVZ0LEqqlbKMYa9VRke+CUNFNw4OEDMjVgvpdRKGw232VTCHt3l3+WktX97YtXw6HPwAF1GVFSLe3J31QpAS2USr1pKYaJlWeJ0jOph7lS1htZVigFGU8NiOoNFULoLuw97uJoaShZAsCkwcy367Nmzb//2b39+//TmcPf06dM/9If+0B/4A3/g+77/X/i5n/nb3/Vrv1vyGnORWeKs2cWvs0Fb/djLYNIl2PrqT9/q2ra6LAkRQWIpp1O09jf/5t/8sY99DMD6+DGA7Xhcbm+WZXnZK1760hZIDGuwlsKyLks6Fzy7f3jt1Vs48Xoozz78j/7j/zBW8PF4XNc1WDqa5z7lhdjB67reLRLqkQMOlFIMXmuNMyh8Ht4JFUSEcuYWYUCtNXL5Rs4hEYm08s3eWJiwOkXSAAEAAElEQVRqH4S20mI1XcX0gFb3UhI3X47tHL6A55zN4EYQ7vX2dud0b1JjUnF36QWRW5eHcyWUAfRvBRVNvFoomC3a3nHMeAAM2dH9OsPHc3VPR5f5EGc0YSbnOwEIWtU7au6z8JIhrwvDtehpOz+cOFnNBEqJu3MrDEqt1Wr4FP1wt9wc7jatH3z49Hze1jU/Oqwk63Y8le2k5axF1nRYloycSjmzQKEkTKYEpMZl3JKw+uw0kpIuKyh+dYOZlVJ8whSMH66Rby+7fPgnh9J2qfMNDX4AKeecOusv4il728xoaIGtPeietjKEWNtLzCml43Ye6W2xAkO83N/fx5MjNjgcgaODwU06vnvV8nFFhGTS0ds152APl2ScQDG/AaOMqGytVdLSZLzuPFtEBJSxGseAjPgqOuykvcLcid3dMEWk+3qOQQ5xamZ571qrCIWO+Jjje7Gn+pbYASDeYHXXobPmPiBEEgjl7jUmGzPezk6zlJKqrut6eig5r+4qnLqvdzhfR5QMIhFsFyIy91qrzFS00yqNgb2au/H5uG1sc+4IIu/5ltSpRMaqGKcqdT1g9L0Fymzv467/xw/WSnLt7Qk/hdUxdBQqE1oim7dkaSxLbt9SDS/t2MFjR42I0+TUd2KUyShtN3T/5mT2dBSbGkbLiai5hBGtGVzNjReAGcwwYyIbj+pvcXfvLiR0dS1e5t4SI/1qQKZVtI/5RPqnqugByfkeImqGn7taWy1j6wEg6sU8LhZqBWB6Mf7jGjM+G6lENGKRbdT7BrjowxjW8WM87PKLF9no+wLzMZjcVWcA0N1oDAuwrVVqhlX/UzcU43hVCzcE9WOxlsKqHPmBoRMSQRhVDR7VlzClCiOg0a5ggTmgYAIYjBbCIu6ko0MaxDLm4elAExQvjhBxXuA4nu5JEqe05uyOUuq6pJlyJ/rvL9XU5tm5/Hce9LGohTMSXn/9dXcnkpzzkpfpMa8yaU6avNWh0YOqlrxIT1avatUHl55wSjH8pmqm7nou9bR98BR+r/rM6WFZdDmwUDJDzq+CBPkMv0c1qLoTpZSRQIAbtPZ0Lgcq6AnqAUggcRct5ifzs9ViWqEMqCm5VYWC/DocTS+P03/UdRGCo1YL5uLDbsXZ2BoeyR9WidoWivNIiJjJYGOdB8jbAXaQcEsHj7lxhju583XUpBmQO9kwtaKmQwebJXl4JGgnG0c43tw7kqvrM2rFnLQlMcmVpjcerrVB9GdtcLaqaErlQJTvGu2MGnvu3YsXFkgDgjToZSeYv5qJccr0AyWG1lv8kUGJxVNSVXI30xZUhQIpymxx9wXGV8a5KyI5S+8n0Av7hIrASeZ3C0gB9z1RYch31eLuRDynYKoqiaTE2xYz7kNL8A4iIvKBM3Ezavxyu7o/ZnckSMx6DHf+wEB/vf/++/rY33zzzW984xt/+A//4d/z7/wffu33fP7Dd99585OfLqcTp8yJztt5XdYZecJIsw9vusZMfDTY+mXfCWvwfD6LSJJERGldz+fzD/7gD6aUAuMrrRret9Kex27tKAfuUHeYm6SFiB3Igtef3NaKZBsS//Af/+M/89N/+3aRu8ON6dmYI5fDzIgT58UMDqzrQTjU+sjBbzZ/zvl8LkN3GbaPaguFhWoSiXzoOVrtlCUeKV7uHvplEqEeIgvit1DQYwrRtlBosbvG0NckM0nR6u4Og5NN65N2PR5l2/OvqFUqGxtmKB99FwlEJNJbWaTHmnyc9NaP9usE0Vh4PiEEpktAmYXYoRYMvxg8lu4CCrjYbAmFmGSwETeODHT6U/hW67lUkLIwtzJohVkkJwD1rLVu5HqzHu7W9eb2kM7nB+YCLMw3WZbDevR6VMAqamXTtCQhJM45i7qda4kgG5MLRVVDJoqBtpbySLJn8UnLizN3r7XLU78cuuvNMmucl39wIo4cGI/EgDa9bdcPLAQQjn7my6zCWUE0s/CsD42wK3x01Ybxc4Qy4sP+cBeRWutgEB1FaWlgLOUiDVUnrdRbvKW91C4rkbTBUUSFT5I0/KCjncM+DIQJj/RrkYGoHAs7HCDoNmocXbGj4ytjiZoZebNS+HIgRhusJa3qmFAiSh0Cip7q0KeDxpiE58CBAeK1oTRMav38ojAI+zKw+R5mrmZFCxFt5yIiqiciKqXc8IW/CWSqHmrT8FYQxN2HZn61Sa8W6hiJ+FV8Xx7zQhopJbs7Jrb/5Ouh6RrfjdnnDkVWVZ4Q9Xurpty/3ei6FINzH6j7bpiZUgrTC5FyM83d3rWilAbwCTSqcTQrrNmBk8w3hJo1zRpG5JBGxMDnHdjISMeubHUjwYMndPQACmPwzt8wy4Z9p0zod5/tkOlO6qw22tmko+P7vF+4q3i0fTyEWoggbt6LJaKrOt2g6/9R9yxGV1qr4q/XebP9FfP0od/fUqquTpGXKAQ9OdA6KVeLtTV/Kdxri1O7D2sQgNUatl9kdzSrvg+IqsLM1TjKEjHF/hURdgaTtMxPqrVGpfAAWgAt/di3UNSJyUQyJQnIJUpxJpA4JWIHJW+g5gjodBw4E2w3Slp40KcU1rohLTd5QZKtKrE4jJkfHk6pLc5GajPvmhevsX12YXuJk3R4eDggQuzrsvbbQkMjh5tC/A1E8gcHtR6SO4KfVAA4A8kqogvMxTRCRywAQUiMlgwclmy4db0zHImOW3n+3rN3n334wcff+OQrr6xpPXpVr/fbpqQ5J2E5gA2moA1cnUFQAIY31W+y3wGZNNUz7Ky8eS1UlATkhOKqWsiQoMSGqB4Rgb6e6+kwBkDyUpvaLzdLW3RNk2+MJ9df8QbaBBnDSy0i3BLIwIjCStzNtubDGuDqmM0xQ7LPVQPI7Q64/Wzt+tusZ3RJZe5RQwzTQRZm2IX+kBIEu8ZLTb0MGX5JhTBUjin5Bf0UuPIgj5YMlXWwD0bWsWrpfsa9UJNqYY6K30PCxP+aRfFoDtdwD5HDktW9SOCSUtMmKlQ1sio666+rllpJKQ2FA/1QDwXIp0tVzat5Egc0WAhlGAwiEoKPe0LLeGDOGebEwWLP20bB+uDdqx0uWEXtCv3unofCzJgonPTezHoDIORGPFvqTTEbBAbqpZT7+/tPfOITTz98/vbbb7355ptf/epX/8//3r/3B/7d/+Pn/6l/+mf/+//h1/8zvxHE98/v7x49QcfGEBHFGNGVU+7q+lVD3s1tWRbqOaAp57/+137qJ//Cj/3e3/t7H7/ySrq7BSBJTPW8bYebu1/+ga0NHP9/3k6HdXHg+HBa11WEtFYI/Q9/6S/92T/zI2+88uT48OzmsGzHimUCHAMkgl6uyHstl2bkuUe47Hg8o2sz3nOoaq1kzsw553AieEtfVdpl+kUQJn7w4aGMqCPtuKxp7xmR19rqVTBAJMyeODPz8XxqQUWz3e89cY1i8k0MhWD8fKWY7npDVxu7ItvLmqvppb3Xn+PRBZGhOEJA2tWwWI1jQ8VXx7bqjbSJzbz5S/uvjbez1MqZQUSSIByAjOIelcJzpkOLM2zMWJbllVceiwhpFbdDZl85C8hKhnkWDpCkF1dGdSMnIDHc3GtR8hWLNfc2UacorAaGkTBL424N7xolFmvmvenubp/H6mrpXo1AGxnaPT42RVzj6wIKbuSQuRE6U62zxJ/17/HkWQ8evqcR+ZmVxab6jKy8qalhX0W42H0nxaJdK7VqVvqFfjy476rvTnczmRyqau7btrn7CDOa2bZtAOKNsdcy8zDzZlNq9LSUwrDR2kgEjWS8Wis1fhSSJC3uPUethckM3QDrAJWdXzv6kpjju9Eqi7CwtYOjbSJQSuIso+eNsQEeMBeFCxCkTaFjhhLQN+mF/Tnv4hAs0aomprAMMWJmcXqa2bZVAoeBLTx805P+NzZwg9k003qs3nYsTjWgRmPGQ65aaJ20YNzGnTrFR01LIuqCFD03fuwFn6TiaGE8vW+dzs4a/03xmX3QvIWAPPA12Lt5tR+vlhCFhJ0iTuMMjGGHd8BkDOmUdE1d/277JeIgjb66qT5mlpcO2Yrjhi8b5lGFek8an9t55UnBy6/de3glDcaUjR/cnQVmYRbGK0KvYlNtoq9X2Y4b0lQgpFnAL1OZiRnd2YhpPbzYjH0thcokPM6mUf/Gx21TvLE9sT0niMKnsJpar/LbyufG5xwRUe8OEnM3g3lYaeh0gz0s4R0h5k7OQWfv7mF/OriHdwCQMDGbVnfA2VBhVSyTCIQhEus+4kLtBwzpOB16RAC57VGBFoEIpYwTTsfn52MFvfvee2lZnz17xpS+7ZOfMjSvLqfUCgG9JPttvGT/03zbsEGIokkU+VrTndyJClrDncPc63cROWf3FrAFwSn7frI/uIMpMUMkEUF8wbIABroFHgNnoBz03vRW66Pz8fXTKqvfu9VarW7wym58uMmuFaTE3FjnyAl+xpvAmvkJXKnCT5U3Sb5YPauRS6pgtUruQp6lOhlI4QVU4ZVoiXVGjZDiOmJIUVJovy4zBmPuED4dEFHoA2hk6d1M6UmkFsj5LodNkcY4hvRoPpXwyoTSyNRxorF69n+HWDAXEQN42i/NAyLkjrB2J1GgwhzbovWFDGYOZY6+pCFzmn0xlc+dL0ni7g4nJh58GX1x9yY6mKLWQJjEnRMEICM01YI5as8NdVGB9lHbzR6GX0jk6g6+pKdy9xSrkB2MTlNm7j32Ygj6RAaIaeIAcZ/QPWYTB+McJyGiKOhcazWvsDSWgqqKXJC8NQkrVAJGCEfOS5ZmSPTjGJNrGd3/145Sh1KJXB5iuFogG1pjulN2PlDHq6tu7k7UEJLbtr311luf+tSnvviNr/9f/k9/8Hf9rn/zt/7gv/LNr37p5u7J4fYR3ACCQb2FiKJCgjehdH196xDeS917aupqkjMc5XTKOYP4K1/5Skr5f/J930fr6qWomSwriG5ubj7quJsDlPM9CqT1cCo1C93eHrbT2TZdDocPvvyV/+I//89It9PxudVNyY8PD0+ePIop8OYiMHUzR6319iYqX5NF3S7VoOUbJ7Gq1o7LVdU15b6yJKXE0vgPZz1pyPSw/8daYmaDq1ZVTXltHnRceFO0k+XyRH1BLVjERKRO5hCh5CmlVLZgz3NmGWtpMB71V/tY2HKhKpmZQXWotswsQg3N0uxMc3d29qFwREIaR+5boEScg/mxSxC/gKgZIFexiME45zusyEeMmoFimp2cm9hmkiruzolYtdq2hd3u4HW9ubnhw+FgVa0W15oJypQImWD1tDDJYanbycysFhOWxLVWc99Ua62emCCNMa+TIKmTmzmBDcMMdgMHgrQHE7T6CB/NkasXNbOX7ZsLQ87dzWvjae1zNKg1vWeQi7iILClfEHMRBcGH2i6L2lz1G0bga3xxJuLCpVo/m2HzIrRawzJJ3CD/4VcaMR8HpEexMNZM/3r/oRUqHDG9bu3sQ4FLOTm2lU80ubVWkV1umxm8JeydTgGFFfToIuKgTZ0DCahuMK+1blqvXtr9R5p6vIhgDw8Px+Px/v54c5AZ8upxjjvFvmihg+5vqm5RuFDHgATGizyPECtGfL7NEREFUutwOLz39N3gFiLizj4a+SIRYQqGIEOcvMyxOC3sX7mAAs570GwvhjGuZcnzaI/TcF3XFx8yr/B9a3fD70WT0t1bhcmwG33iEZ+useABkDMR9mIMY5qmZ+qcDxlCNSoI7Eob2J1ZIEwTCc1u5vYOO0ANy0u9UgUgrUaMd8/I/roxApNFiv2zrtyjn15RFqwb7O1zUut2dWC05pjkWA/j10mKgnkHMM+b5aOET+CngSBGCdAvkSdXI2oxtkmOifNONjj/e3VxZ3T1Rs68FwhpVy/oOkUWHaEQdjm4S7M+XBeRGRAcE+l/EFe1NEIOvsGIgvheVwxRG969eWGIIvpWbZC9UCttMjmzjMBgOMdUKVxEyAlReLYtQgYkp9wIeAyOCgfcYAwYiN0QCX6u3hLH5TGiEJQzIhHRyWOrujuoZweQqZrZDaX33377WMrNk0evvfrqq6+9VoouywLrIZbA4bSc04+EcX1LW3GfRrTei3ll4h5AYyKOshShOSKIi/ZpBDGsP6rvTziQBISoKMPxNXfAQMKwqIrCxCvk5tU3XnvltXJ8P93cOfFTq4vQI+hz1a06VSWjSqKJjJjg7LaAF8XrnDJoQb0v9/d6fxIr5FSLm3EFk6Gqq6rAFgm6LIUrrII9XNmGSLV4qZ7r1BOs5tO2r9lG2NYWPCJ3EEShTpCzCLhVePbqDm/op1ZhKMq6tKBV53AnwAlwc/LuId8dVegb3MgpzCdCZFA4+3Dxt3t2eeLD5vEI90EHQSF6VV6HEhMFe7N7FJ223SC9HgHuQXXuh/4IkIyj3KcQxZ6SADUzOAJpDDQGz/6K7k90ClCDWyQsh4+Meu+sW9Pt6Ezh1nVooMNdzUxNI3Pa2JkdEbDOkpaUcxZmDqbKrvE4GlniztRM7FHAbF3XIXBBNlibRrd9onGjrnVBq5mBLJpXaw3DQFVBlYi0ZYFz5yGiyK0kIrgBO0gJMDTkf0CzcgxxKHOhH4QF/+TJK+fz+e233/7kJz/5xhtvvP322++9987rrzx+/v67f+yP/tDp4fn/8gf/1a9/85sMqrXePH5ViIJKmbps+SgN9ltQYn2UIScsZQreSl6Oz5599atf/eSnPkkp6XZ293S4BUEDjPERz6HLtwyzkAA3pJTI1K0uq8Dow298/f/9R/9fX/3SFx/f3j5oXZm11pzFqlpVbryaThSpcyCSPeDQqZA8nAgp+eQPjnpry7KQ+bZttW7MiH3OzGZ1pJPECnFXohRR4vC7t2AFhFsC6j6oNCUcEglNDmP3xtspIuFW0XrhwR3fZZao2DNWzmg/hs4DdecxmgyYVRh5B5z1z21Yov3rGvX92ofoDWhbPfrdzuFQvmkuSUe7+x+7ZIntA8Dh7LQXiQ7R54TNXGo9luqJmMQTKWirplW3h3Mo+mte13VVLSCknkINs8R8WJatGiVOiV0l6lOF2liqljhyiZmkmKu6Odm+0gPR79VgCBghoK0ch4iz89DDBiLf7EIaTAPos3Y4e1EE5C0coYGFJmqEeU3RIUoi6F9PSYQTCVPUYXN3Y2ImZvK2CodGOOZ0hgTPKt1whw3Bvc/yyzDqtXOCjY7nLMvSonPMexYfLmnN4pmRGh21WDHBTXPOt7e3zDvefhwwsS9Y8nwaRfTSzJhaDK0UPVc9wINsaavltoU3KfTVsOe7GHFTK6U4aBS1H0PEzKn7R4awZfKHh4dnz549PDwc1kdEJJKYpKM5zB21vyGGEsHgEeNWa/wab0ePHLq748L08m6JCTdmadWBKqQrDlQiIVJ3X9fVLZgAMZGxjUe243NIBnrBbTHM5nHNS3c41GbJQ5dRsjHXL/47YK7NUzAd4/PyoI84B8gcaCGK+X700slD7lGtGIAd2qkTzYxUyX3HxUToWxKa3hG6fNt41MmWJq0baAkjs1bUI2+8+3HIKcqf7urBhYFmZGEHqVtPxey5+mZVRHqQMwwQd/dAm13o4O3fxsQTGyY+N1Wklu2GcayHyco2WKPQkmgk0Pr7bTvzYYvjNeU3/p2cX/s4dNd91Wacj/gwuluQdjV6XzAynXT7wosl2llW55XW3HZuGEm0NjQCHVQLe/ZoQMrV1FsdjyjDq2Wrtboai0RtCJskJxGB3XjmeGzHtnd7B+RR15SopUCSsruRJwifHx5cFuJEKVFeWIJYmpo1GN00c4+aFtYAl4SB+9Vaz2Wr733w7MMPPv7pb1sfP7IWbuzESENpoxZW8o+2+n6Fl7uHUROGSp+vHZllH6EItoXZls7+ibTIWwvutknm/mzJbtm0cp+q2ycHiIFXlhU43+YHX565HYt96HQW2SjBKbsvbI+JHoNuhQioOH6wPX/PHj7MOAPqpVbV4sbExbyaMSyTOap7Id+ACi8UvqOd6tYwJdPuLtsX/CCDZmaYi8GLSi2+akzJXcFNsLDAzT04yZnYk3UHhEfReoSHIgKO4AhdjyELI8jCGHYCaPjcGxucwnZ9tUd1HaS7J39ECKF26WMCQOQiZFapBzPdHeDmeWxohYb/j70cZ+N8TKBrFANbNItrdyfuaY3N29YKcDFzYBMs2LtjkImwV7ih6UPwXpriwi+Z4O5WrWqYRt4B4pklbmUQXN3cA28KAjUCiUVSYmFCIqaOjydi7x47AUUhe5gTgUOx5Thv5gPMwjXJTLXWzCQi5lVL4LtAbvOhOaYhRFc4tj0lYSaqDgAmcVxEHiniCeauXd3ffYFtIFKqWkopn/rUJ4j4G2997fbmkaoen33w2mtvnB+e/Rd/5Ie+8bWv/+5/6/ect+1cyrKsxIkkJ2H/VSUI/oqviHkCkGWB1i//ws+z2/d8z/dAWOQGQdOmcd7IRz9mN5zCGvT+c1W7zSwi5+cfrje373/9H/zxP/bH/tuf+Au3t7fPP3j/lUd3p+NRtdwebgKfRiRBck0USaXEzKWcfc/i4JwzmGqt21aHou/cXAMppXreWmOGm2AnyLaW8Uf72m37gT3clN6Rw7GXGvy1V4Aws6h2AGfrOy0C8d24IlVVNzZG52LCpCQxM1BDqPXTdByo1o5KHX6PMa4Y92PopnD3XYnErrQ5RYDCK+0h+3A4gwlhuXLnm4tNQuRMTSl0NI97CJWoGOUAg7QpymC36q5uW9VTTUDz7pdi1UiN2Eyc1mCyVnWFECMlyZyWlbcKTuAELiSJHCSVsbikzdzqVsyrA5yJqZjVs6rq1pEV1g02A6OH5ibNylU1MEjc0Ybe1dCxMK6lcNv7PiUKzHZj0y2YWbCX1c5ZiNysvaW7zfZCOyGyhyzuYtqHijzETnNJTNFjZvYLpHzrUdw5Sk3MVgT1YPVyWA+Hw7rmZVnWdY1l1qt1mxFiJXmHiY7kuuhXhASr1GzCzDnnw+EwAPDjTBpfcd8jsaNIOjMTjJmZkncLNpZQFOeYyKybZ3HbNieISSsHykJJcs7bts2TNTYpah1vL+V8Op1KKcuyzIYrwx1cba8CYmgZHVca2n4G7xDuHgzp4z+m0msVkUg2riWieYNCs7mNwhVo7sLJm3u6idMRXRmTS1Nqx1DZfXJ7MfP5vE1KcIsVo5MSjWUQD4z1hpddXRbtNX7GnNL4/pRqMRbqfLrPnyMiILQfeSFYRi/azt2R6m1z2ZSx3yA2PXVHVZdWDzAUa+Jha4We0fscegeYB91L05CiFz1yiD7iEQkkomaje/f69ytRU6/98uidj/UxGmMwx4ROMxs7tC2Z/bt2EfQey0A6y0pjTQE1hbV54YTc4qTgcOq5jjb0f0N0z66BaExY7UYcsS+Pu7yL+q42N9xv/DeIh2aPJIDgtW5HFg0gMEXuNtxDqUNUdWjHlUb2YHPNxt3Vh6K8a8lq27ZZ1aC0zSzdTiG3SszC4jC0qrgc2jl6DZU2HapgapFWciZSVHJyN/Kcc3ZKkEySnRNzimHathqZhIEQGXKeQEFtSj1QYWaw89/8az919+TxZ37dr0POwgRvwAEfBBzUAT1XO+dXdvlIV2zWz8Cdxrps/HkNzuMA3fevvsQ0nEyjgf2JBeNX87sVRKyIGCRhfKYI1TEx/AAXkCEXoqdEH/Lp3kU5b2AiLK4HpzeIPsZwQYU+rfe/ZA9fX+z5ysWrumpwgSpxBRlcCJLYyBxGrrDqou4WRMISw0AWVd9wuVs/+mKH9hheU8nCMmwDOIQyhDjA7RSKFUkSYtf25xaKDXYSB5hhHsxjbZ4cRAxv3tudGwAAwWp4PjqTTfMME/leXoKIiJ3g7BzyLYIZUQgNzbVhaEVHbfgMzZSF4aG1jjgE0DkChoSZj8Wr8xRdCHNztoYAoPanCYs71KM4IaI/XfG/1pmjk8zkLu6eVEvQHtRayRFkYomYBRZFYqjF1pjZq56thMs5HMzD3hiWMbHPa+F8Pm3bOb5OgTgP/LTMAaIW7mPmRIwFKTPMay1mrWKPqyLO18wE0aruXlUpooWiIfioxQlDn1AzT84eQVVDYuH9asSsA154Op24ZX/6uq5bOeWcnyzr0w/eefWV1x8/vvvJL/z4N77xzX/z3/o93/49n3/23tO8rrLeSs4kQmilX16+6n8lW+Py8ii1Z261khsxf+Yzn/l9v+/3nWsBxGrhnEspy7oCvNWS04sFc8fVcMY81cFl4CYzDK7bent7fu/t/+q//NEf/VN/8tOv3T5//lzYT8d7LRVk560s+XA+n9HRJmbmBHIyqcQtakFd940NezqdcmQKigT1agNr1QpYr36WAISPZOyouKLZ7tSgU+29RdUDVzZuZUnd+7JX8zQzcrjbUOKHQTi0doBSYq27frYP/qWCMkuEMGMcaMuYiEHMjWbjaqKp59+6O9FO3hsKvZnt5z3RyKplpq6tXQQTwib0GUZ1tWamLtRaLUtU6VDDZk5VrRY9hQGQiciJAapFH7b6+Pammrkhp5xvHmUlBzYjpcSUiN3zSskVOG7b8Xhcbw4KYU5G2LTqWavZpmSIQCVjcuuHGoAB8I4xgQJIJELBouPqPUdlCIr4D3Hweq+Ltl898jAKR0P2oXPAIhszpDZzipHRaiQcGcgN9wt4MBilRdXMLSqjxFybt3r2KSVZcimlmDISepbdmOV5CgYIYgSOKNhBUwojcF1XWbKIRHWz0IxtvrpTY9iEYWR6K3nIPPGLRJguWGSiL8Q8GhD/xlYNQpphojCzLHlZlvjitm3n8/nVV18Ng5Bo17oAOpdm+DkTiFJKBFHVUkqTAzS7J2M9B2PNbvMfDoey7YOTGA6BoWrp/hoDWN25x57ALV8NwhzcNty3RnOiD8ugR/WNlnxQ1WVZHp5t63KTkgE2pZ2HN5oBP5/PAMBC1Kutws1syUTdOp0PWhEBtRLA5g1mQhO0+wpNem13RYUGVThd3RadGrmCI8hMvTjVi4uNLi3VcT+AXtypfYcdNjm2abKUdh/H8EmH16D7d6hHWWNGGzXs1gxgDIFJLcjWZJa79q8zs/Q6Whw8ouGqn9zUV6EzIr6wLYEgkEKUb3WHRE9aALTxinc7uzVsSI1uXIUBRF3Uh3o1E00NhonRjtC/RgqcoVKzAJiMvE1NU5bcHchCRNb5otBDYfFr3E/d0BsGJyVCwHx7fLIdH3tYGAgYoTs8WNlaI2dnfxi0aL7DODHRTxm4C9Dp+wMuZHCH1RBD1KO7jAxyphj2UMXdGFY1ACICsogPoHHCDSIrizJu5hGEiwAjE/HYYg4yJ/EgiZHADwb8LR0AqYbtrNtxq2bq6u43t7+GWZmSiFPKQsQsROK2c9rF25skLBvUwILzhiUDBJHtfF4GhLv9/5yV+Ku4NKhhxvFDMZz9BISDeHf6xDB2i3HPR+gO/UmF5FgB3TiksC3Nm9HZkYOjjAvQ1P4CEjjBF0CAAhTQvSzZRSgZkBxifit4Hf7xTPcJZ5S37PyLSd++lROLQ41RiczZnBlMTgJxHhwcLfLmgNHckesR/Faab09cpm7Yh5rKfTsEna0SnCgcMcQibuSmcCJIkuRkAw0EYPewu4IA7rS9PdLe3Ftoez8c64Fd3ouN8jRlpatw0AhAxmmupZfHmDxQzJwkciUi2MBJspkBnFJ6AT4KVWXs5G2lFOsM4eNEGMPVXxRZKtKIIQeA3/czgjncuIaGNMcIMgMIZBlzoN5DMvPwWSdEPQkRBkkSqNdahXnbTiLSGQFJRNhRSrm9W2PLaSuO4QPMGdrJIDyIeyRJaiafM1Mpum0bM+d1ISK1BjcKYjdmNq+JJJKSc84wq3WLszWitCklU5RybuEgYakaQ9NyP9zL6byLJFURAZFKRLSiAEhUv0G4kOPwDnf+SU7LsoRWcT6fn58ebu8enU8PRHS7rD/9t/7Gv//22z/wW3/bb/9f/Y7jVraHZ8vtXZbbEOCqlhqkOVSmHWLcWdrb4vxlr24kg7P4ZjD/0R/90a9/7Wv/m//d/1bLJikBvCw3scTDCW1o2MH5OXGGhJg9a+NbMkfdalqTlqMs6ek3vvZD/4//9K/91b/82pO758/eq3ULu9mhTFHau8b4EMuScpTiY4ireQtZwd1rKQDSksPeCy/Duq5Lyt5dd9DKnCM+5i5mNVZOzFRKBEjVLcnCzGY68JPakG9CnM7n87KuDaPlXsq2nWtH/ybAU0pZxF0HNUsYBufzRiIAnbcTS+qKrAOLWcsHE5HwWRNRhJTdHWSHw8HdJQVV4x7wiW2mWqRKBARilwamm3vlA3eP7RR68JIP7p4zL5bv+5qMjdPjaVG6s7lLIsTKzNu5xm2hVjLMmbjBBoSGZGQqtUpeme1cqyoVrQyCLJ2zFOwQs0RgQn1+XnLeVLFpYl4fPQFQVCmlzeEGc1EtqlrMLR+enjUtK5yqunFmTqbVkqNszfmEXUy7o0HTbTe/U8rruoaMDKAUS9R4AjOZ67Jmcy2nLVJJQzmkibSjC1YNUyd+LaWsaw4lUUTK6Xy4aUUIw4LKOZ9PWxiEYUGt61pKqcUiUYN72ps3d3aT+OfzmTrEH5Egl9LpdKLukw4zzN1iZZ7P5ydPnpzP5yipcj6fmZnYHcrMy7KkzDnnsLs4iZMROGUm2c1+M7u9vT2eJE6LVu2AvdZ6Op0e3961gQ1tUmRd1ybxmpTzKL3A4h7AyK74ppTO5zOoLr2sbQxsKUoizPzs2b0TjHA4HJhZHVCtNcqEApFLyWRVzayqZrN4eNi6x+PxcDgk5m07MUkpFirao0ePnj9//rGPv5FSMkMtVh1Fi5McDoe8LvbMVJWk17eYWGHiyC6mQpSSANi27dEhJ0qqaq6RWlnKU0CWZXn27Nm2vRpNIqJaTfYYHW3bFivHrXkbEWgChbuDGvvx4Om5Shm1jtflTpwTh+kQ4LNtsyzLlRUX34pDJ0B+2O0YWFUnY+bEwaxHcAi1FAm+NHJi2V/5JmKP1GGwTUHUiBNGm613AS2xv5GEh3q9czjzwC52IlDzCECNox8iUJ0LYISSknp4EMyuRi9ED3yCXNALoACaDKHxIWMbMB+Eew4OQCfwBDGHbRpz0T3rcShHNM6CCBnUiijGA8MLGRc6XDz+Su4ta50iXEcw78XQOgdiBEvJ4VpLCWkQbvQYdk7JawVd68/uztIaNhqzz14PYPoUt2R09AFGtDLGp1fiSS210dTULGW4O9RqhLUdIGPHtm2JRcK1DTet1Hw5QFWtxaO0LzdkkEOZISLcXbrefARaq3WoAnEU3lNlr012t4BceP3gDkhGZpxPEIBpO52Wx7cwHI/n+1M9qzkJL3y4u725WW9u34jd0/+lFhwSHtsqfhCRJ09effONj314/4CHB9w9wrapiHAa1uDF4L/40a/gkpdjszom6MX7cfiom1/4Oa6yK4x0QVFhlw2I9pPAoGjhsNgH0FLPVm+WlXB3ts1ryvwK0SsoS17ehb5v91/R8z8Qf485CLyslqMkdq7bphWQvBhqgd0G5Q83gt+m173Q0Vawb2/XsEakw0ptuGwcxb25z3v1E2cQQVg4YLZqJQxegBulSSC4JaEVhCC4mxZYePMZ5HBtzixHSK1yOrV9zUQIwGP/LxLliGEWQIlZGFKLt+1HrburWjxNO/mnwzrpiRBR8c0NZprSofHcQyPJWoRTSuSNaA3dX4zum54Piz2rcKqm5hMiiXlRqzpxTTNnZs6yDBfVbGGG1RPn/pzE2E6v9uJdQPVcHfbh2hSWlHZvYihyQIP8tqwYAYzHODKjQfiAJJJFqkhKQkRZEnfihLEXnBylYcsSM5Nbg5c7cwBVglmkGX5kRh0D46HkwliayRsMcixggQWntntaDtLo1xGjuSxLSqmUc/O4pwZ0jHvMahIm8tPxXpLC7Ze+8uU//af+1Je+9KX/9e/4nd/++V/33lvfcH7/jU98gomrno2SCAhQ062eo8Qz9ZEdcI9f1iZMKcG1njZyyLK+/9ZbX/iJn/je7/3evB6IqJU7bJ4tGtKhEVr7+CMA1FqJwSLCOJ8KiyyZD2vScs7kb33pF/7If/b//Ot/6S+++cZrz55+6FbjOPIpaheANafIfTc4CVyYJLFZocvODF029L/ovnWSGN/hc7sbezr5aBgMccUSD8XULPhJ9iiKqjIRM+dFQFabt3TA/HY1aPfQ93wSmuiI9q1Cg3q4BaNG78aOCjXBmKopA6oalZcGgD7EWbjNttM558xp5s9oQ8oWaKso79b/ZM4OtFIT4VIEAZklETOxCxlxSAYmDiREb2Ez0EFGRq3Mj7nBlUGKRFxdAggQB7wQJzYhBtVcNRETLI3KH2qhl0faTE/oghOqc1InUFU4ExtVl4JEVIYUa0ue6OJUiOkIl4naKBMym3kxPpi0n3HDvNI4YE5oqYBExI5wYMUi1FrN2Kt6VUGEAXzEA8e0OhE4yeKZeX6vd4OQp8/nlsQKjPSwuWGxOAdp56zAhe8w8JYj4jeKC/Xm7e+KGHtcOWezamZR7Ij2WoJQ1WAZpY50ndfbMBXauIW7x93dU5Jaa8QMvduKOWeLelj98qXHiJgiX2cOa4zox9gdoztVi5mZkTt1abCktKiq1r0wABtVxyAPi9DrJBkuSkK5uxJSF25mkg85LOHjw7NSyqNHj54/3dGhF9Nt5EbojmR3cjdTj1OpR4V8YNe1NINqNPWlv47+ysRUNF86sH190LTTX82fjy6PJ9MLR8UQsPPTxjk7Ld14oIeEA9A9+y4Irn4PzoAYWAo2fFdQBPTDlR4vclf3MYzRP4YIRd3TiGJHP9Em7LrRYShSVNGIDIBecgbAwBNiOghA1MppYH9tu6HhJX3yyxGFG857xKJVzDNEaWVzIkQRxhAQjBa5gloI3pARRMHO1wz0xII+pL2EsndA0hyP9R7Pa4NPRIkY5lHsIRFHyhRqQOxemFkgckDGr9i3QDcK+onfIfexYN3NEUrRtACYmaJMvPUjtIa5bMII7dTN3XxJOdYI3F3VzdxcoaIEVYrIFLV5NLM44MaUhfQwioqCNLofP0QcvCVONfO+LUeiVY9nUS7lWFRvX3my3N6ilG++/YHRgfPto9vHt688zncHkLvXOR/v0uB6OVHDkyevPn12jxCwOUcs2gO84vsz/nGk/Lz0Ik+//E2Y1jp3BOZl7+z6dusfs7sKOaQAXs/PTe+J7PbudYOfq9W6rOk1SW8AN9AD7H2c34G+I/TU5QhngM2IhM0LdKsugDiYSJzYieFBJBSVrtrhTgBdNGp2dtBFS/dq2BGVNYKAvCM7mr/GYAyGE2BGRkhBCxnkOy2+GvvY3d0kIqhR6TKiq4g2+sRtgrQu8RUBWAREMPNOGS2X6qcN3xYQasbw4XhLE9iD0k0dFbRjLjgjwEZGPdqE6SxucvVSXFyvAvf5oGHm1KJ/k3dvIDsM1H1k5uaGIHqMrowHRtFCFgLtBYZ7e9BoP4kIE5/BqOOOOTkhEec0NbS6p7Hno2UjsRihnhq5a+B5zKpZot3zreQ0dPcY4pAa7k6+Q62oe16rgYiuOrBtldxaUlmvz8hM7grTcEdxh5q5N9ofVa21iAQhBYWvy91jFgdxa4CDT6eTiJjzutLtejgXe/ebX/vCj7/7xS9+8Qf/lX/1B37b/0LWw3vf+CXJ+ZXXX9dSTCsRSUrSHFG2bXVZltgNdCmAvkVM/f7+/u7uERxQ++Zbb5VSvufXfo7SLlDcPUjZrqTDwNYAsFIkCXHUjPJHhwzY6XifDjkl/OUf//E//Sf/5C995YuJ/fjsQzufA7bWbJMeNHN3sFPbydJBAkbWi8s1xX/XSIbbmIi0amCM/UKru1TKL5HTw+QL3yoRBcSXRZwppRYTVtUkEnSLquooRK3uyvCjxAPjhrF7I2g878AXd+P4nNi9dvmmBmqeUdFKFIAy6ziZ7obpt1ctOefEwg222rLKzSosD53eOqZ9bIcxMkPP2LVAMvPaOPwSTXeisZ67JQqaGcDd2FWNyAtgPuVMwoSYmYXU1RIrUWPzlzYR2pEqUUW3edOJSJsvUqsbk3ByM68uCHzQnqy6j62M9PG2fNzda919WjHjQ5hcjQZ1kpL4vFuDybu0d3eabHKPAA6ZWokFkFjOtTn7I+DjTtWMfHdYDBPOmzuQ5i6gC+45dDBij3PsqJRyOBwwQeDir2UsJJi5q+qmNffTgrrPok1Qj1UCyDkvyxIpu0Hou+uII/2v+xHHJyIXFCDeKUZpMmkiTm4tgRMAgnLGrQE39rOnhWpc42Zq8YNhLY8XNfOyVO6ibrf9+tCN+5mF2aCm6mZmHUGlbj1ToBHJeKMWj786Azc3q6qeThvwKOe8zSiasWYMfnnWtJW8e44GVb30E22XTn1kdhtsnMHDbNuH6IX6UeMrNPmexntTSnOrxkobB+u8/tFja1cPny/qwM74NbGAKOIIDZwZx7q7T88f3QlX61VjIoBGtJPQ8HCrS1Oiwq3dAnGR4II9tjGMUchuXI2q0t5Jg9COyP0a+33u45iyXcjL2IA9dko9RBa+OrvW+ZsY6bk4+yti8Vz5brv6Rr0jFIntvrcJCD3W2i5yuDlzkxIU7n9mWKu189I5fGli2fiXeqeAnqnUmxTWXNOEzaz5KL2ld7oHGCdTf8g4S9zhxpKG/dzsYDUAXqtPaOQQqlZ1ODicSFoQpbEssnMkAe9LnaBag7gn6hOOScyct227Odzmda2ne5idT/fffOe+2uGV19989Y1P8s0jLCkMBqLco4LfasDm6803PvaVr371vXc/eP3V16PqBQhaNaU0HjMMw4/cVP8Ir1/hO3z877fo4DACMRQOt6RWKW2MU8Xzh/IBud0eMvCEQbAVVt2eABlwq/dUnnp95nRKK2A3oFvgllf4N58bCrsy3Ekalj4vcCFkkAAZJAxxmDd6iqumssFfCB7Gr4E3CJtQfA9tuFvotwqwweAmxPFs8ogHhFHWOBbicEdo+LGpiIjIqFKjMG1Mv4EIDUigqhpR7tkHpZTE4qFPxLbg4Ojz610aBe7JsAulcaZFtjxFZgoRuUX1sfDShGNrFB/ePXeOHdUwJp+4Ua0MIolGsuLSmKB8RNypf7Kr09bT2t1CZeL2pGB/6ilRTYqG8mlhcHYERYiJ6Fvo0yBTJ9mtuziH4riaPeXWbDyvtMdkGqrK1eI/da91Uy0w01FgMbW1b8395NL1FiJiBjlVeMtCgZlZrbXaPg9WGrojpSQBqTciUNnOZkYIMgnjRo5MZka2cxOHRA89KgSotUpfDWfy+JUnpZQk+XY51Krn44lTvj2snPJbX/sH/8H//d//r//Cj/2u3/27v++f+adLLedn7603ryFsNtWyHUGUl8OypNmD9cu6owg4Hu9vbm4AbA8PmeXjH//47/zX/43Pfe5zFzf5zL3/4uUAOK9dlYeez2ZgSYdk9+++9V//uT/3X/7on3p49vQm56pkWrRu+5cJ4HCVNl99T2B198gUczdLaaGe3ELYD4a+DPalGZ+EIlh1E8io2uewXo2Gmg+mVwgUaTpBS2gRCfpaTuSFrl5kZtTR1yNteFYNReI5ZkaSEvPQ5DoFWt9R47HzHiOCWuNUiJgIEQgknQQ1minB7w0AyDnnJYkIgXw3//Z6gwAkkZcWFnhRcRlT2aZ9nLKNbyE8HuOvLvAAU7n3teYhg9TdIU2PYAeINaQJyN2LasTa9gRNol7I2GNJMKu3PGmQGpEanMnFItygY8TYGydktFl60j/UwsfNCLTVnlSNS7N8tOHqE2aG1ohIAeCR2jRlenEnZo2HS6IkOaW0Rdmr0FKolboKR1ms24i0DBtAtU3K3Ax0L8mV9j+mJsRU/MsdGeiReMZEaC6JrsJe6JwUazT2XYe4h7occcJ+Bnio0aoyFtIYyTjkQBQ8wFetHW6X4XcM6yjYn5hZ1cEpJUopMSU30kFsE5nbZs7EcDOP8zW2doxRBBVVlRyHRWInmtm2bafTqWxHeOtOSomLhf0dlV+KNRxvtQZLYGai7kYMJ1erwukKLOui20OAmYmoR2XrEEoy8RVFmpk3x19Ltgy6tm3bRLLkdrZ7j+mtchFgRHdbzJG9+TC+2r+zAOmvprGM58de3TkO7Ljo0swbTxv7osylI/qHPkyFqrikkwHAM+/BC0ZXSBFMXyHmkKp2AWjsBlgX9a19I/4124TTNex2uzy/YoYGiR+/2LCuKuy/Uss3hrnp7onv2YMhlX3w3rWUxq6GxlGPiNY64K38+hh5ahla7mYQdJ9Xn4ndEXAZwwk+YeE9EdocHbXhH2EZDJ/7PGgXa+PKNh7IT2Ak0UURrDYm2nafqZoqhHtWWrN4G9ZIg+jMIhiYfNRq0+Z53h21aqalnIfoCNhckA5G/QxcnrxjCTRlYJpyc2+EnCkdDoftfH7nvfunz+t3fOd3PXr923HzKii5VmcnNofzr4w/b1w3r3+MSL72ta+//mu/C96KsID74TGGvZ2ULw8z/qO8PlJb69cwTz/yMrQGD1Ez7QUDI1d9Bvpgq++TbEs6SD7cPzwXuWG+WRcSPgArQLwy0mM6H0G3td5sm6dySPV2O21KB6KHzMQk5lTVKZEsKyGDEpDAAhdcBbqH82I3bl4YTwpln8J76zBC6t9xGzwM1nQoHTYhADcQj4IkIBPJRo0ZhVranIXhBApSyV6Hqm9izB630U6mSLYKw5K77A2q6l2ViqYpEmegDBExfKzcgW+xU0bRKwAEwY5nbCac9OK9+xDGe3sK+hC2swYy9A1pVMyx6cYxFKqsEzGxdE04WIvjXFBvZFcXOFIzS077qYPmtY/hayo1mGZgSbRVphrEGP7FvcZu89NfKhygyeAcv16cCk6oLT+K3Mi5J322JPtAiod9iFZVLMBUaVkWduPq1Wu03COs0fI+p0BWy9qMlEUdjkZrif6tSLQkJsZpO2+nsiyHdV1rPZ63o9RqIH043j15/OjNJz//cz/z7/6B//1v/pf+pd/+23/7Z7/rO97/5luHw+Hm5g4p5XVt+8UNw0YfpMnXG2W+7ObmZjuf6/nh9tHj8vz+h37oh777Oz/7HZ//nGkcUbG1Wv7yt7hqKSlnwMrpgeCypPrs/S9/8e//V3/2z/zkT/z46fnzz37Hr3n+wfvPnz579OgWVpXMYjF1l+SkuwyTaXgurKUvTlR4PJExjgUTAbTpUDGatiWa+rKHCvsn3u6vG0W9EDRvBTmzIIDaOjHXUWM7J1iUr0mxRb176M1dq0YOQ9+3uz5H1IqX7JuilcHwsQnjRLV+BZ/MlW7nbqGLiywUXHPO1HiMyDSaOnFpQskoyHjJEZgM67ubgCg9JsQsZKLKVR1EVKd0nHlIm5bQlzwjeajIDoCcbOS0GojMI4asbhTk/tEGYu/cXwCcXB0RmwSiBFY7XrsbbPepExEHZx2BmaLadwyEw4W6UOgl+66MK56ueT1zp6UaizMuEYL2rAY00pWccwuBikhqK1O7/tfsMck9dCzhoBptmH/2yakxRpt6UHHIsXkuwoJalqUUD8uw6UPMEykSOpR/5lYNoou2+McuGCPj8ICS9kTudjAMgWyRlkCU0hJfqaq11gHmRxe/Wm25TTkvZmbw4Py4v79f1zVi6N6KHKpQrdU49fOi6ZX94AimMRF0g9AjESI3W7RWPR6PT58+reWBcEuN0isDW601+OeZZTyNJhbKsfuuFCV3P51ON0taaFUtp9PJrI6zRvssX0oVCaExuyHGV6YBv5jN+bo6mG1CIgy5d7WEMBmK49ykDjSaerd/US6LZO7vpesnj2U5rZ9poerkMsPOwdg6jhY83OXb9AO02S2N12S4Wib7xLU202tgSaZVQSNO2HJwXkI9yA6jPfVrDG9zv/dw5ZXdiMlsngdhRK5inK+s7vHF6VcNHBARmTn59eQSEWMUgDFxGb0Luy66ZeZEfWa7/8U9OFqHyU3QOCKTTYfOxVRq50Xch9gBUERWbTdT97Eae6NZ40DPWWg5ru04aDUqxlR6tyQBWK1RhBATM004KfGycjujdVc/R/CEmBFRlGgnEJxSANytuxdiA6bMKA8no225WR7O27bVb/v0Zx598jPgxzAxJaQDsRts0+PNt7AHgZdYdI8e3z56/M133kYxJGJO6kYsdh1qNPwKjLWPeGNcvzJL8hrO9eINV79fPXZ8fS5gMZ07DCY20KmeGLrc5IQDIInvhHsJUyRgAUi1ynHdSi56qPa44Ibp0Sp3norxh8QPIiKQrZBbdWcRAWfmBbTAEygNBG+vQ9gb5Zg03suO9MU6Y0fjBwKo5cQS2GEgJiDI7dnCy9RApAyAjEgACTQ4RxTKQxljJjBcyeAQsIUkcW0mA9CKtXFOh65+DKXXiJxIzXJ4wbq7xwetFAtBzKL+BOaKZZM26EQmIkxpVm/GeYFuIg1pMG+o8cChAACQxGP7MyUCh36YJClG3D7iaXG+kFkEA0AU1XGcaC5P5USxN80MKfqobtrr/1bTXmV1CB1ygrUioCaExFK9REmJ8LMTMIiNQzQRUeKG+aQGBO3pK7gIHMUnfWFbpz9Ow9FOROfzMey+lEK1xVa0lEKSZmnl7qoVZsuSag3W2d38VdXkTkjMiFqFYQ+WMoEi3AGTzl35/PlzVT1tZ7p/7oog9jgdt+18PJR1e/BXH90q9Cd//Md+7mf++//5b/2B3/bb/jVjUTmLKaqAGQQj4pQi7O6B825bw18mhQzA6Xg8HG6WvML8J7/whf/mCz/xz/7G3x9e67bsXvga0JH5sBEzSjm7mZZzTgKBfvjBn/+xP/Mjf+pP/NJXvvzGa6/ePFq//pUvpcxZcP/sWUrJWo0RRG0hcgtITSs/Nag1CcIkPXw4X0PX0VGXLGXuHD/M7NiBbUTE0gxO7X4HRNnA5lIIvTAQ6rui0x0nux+dmTsqlXszAtvpF76apjSoNHDCrm/tukjfgUPH6tubJCaRiAZmKfQl7VbufnpW70C+MP/EU3emdBqkqsNV3MQEO7EHtGBICiIyU7OdAHCfcWoLafLIEwDXIAp3EhYSIpiBQVGtaHaxD3d52CQY2qS7N8eStGwfuLt3zEGLHgjYSbXnF5ojTCRrLpgOJuwzEuYudarH4aOaxOjuWZi19rjY46S4WG+xUPwS6kbdkZFYymAaANWAekZuEZB4DkzjXLYwjL21krmnB0TMXOcy1pPDi3ZnHoCWOx1/2rYTER0Oh/P57C0i1RIyW3N7ylz1oFZK1hdtrGpmHpl+ROSOQbQzW4yzz4V5t1LQ/Rc8hQq9lYIobaCojVs1fXh4ePz4seRlPt4ozM4wU6kpudEDnvIV0ZG01FCE1WoRFoI+PDx88MEHhPLo7jAmOmKJZhzpUDLX2eOOIWdSDzpzinmZ5SYR1VrP504zRnQ8Hhl3OlG8jrMGl4dusM110UGGnWpSZGHmej7NIm6s1dnAG/Glea3SpZNorss6is4Pc+Vq8Y85mi/qY4JJZI3uzBFLmlxpifJ4LPcjmjsuepwj4wAd3dkbQ62b48PmpiGCmfYyGIgRDszFKNhoBuExod0HtY/keKZO3DZjkKMJ0Q7usm6s5xApfkn0F0RKV6Man8yDNnr34shjirRfLezmyJkmxT3oOffBISJwIoChZuaBJ+/eHSA4qZnRqtjPXyTskd6rJuXZRzk3CXtpinGsxD5CLxMysBIAyDtYLRChcaZ37yM3H7PDESzQxHtp1nlJkHQrlxAVuNFyDUJht8tJ3KPoQ3i2BzqxyKlUhWVwUCi/+ubHQQku8BxubwM5WORuQCj7LPxy9hXTJz/56Xf//t89Pn928+qrYCq15s6F4RiZb79Ku+4l14W7/6Nu8Jc3+COjA9b54adXYH4RXb7UDZQMwSxDIliBXNXWw9221e20MbNkYfKyle3sS31c/OTJeHmy3KVb+liiV6Fneudduv+AScmMDSgKoURgXkEZSACHI8dpGNcT3WpwA4MihfvyCoGsDdG5m4UGgCl5CzyYM6I8iFEiL3H4t/kKJQQkUQ26ZRmwuSKc8Q0KHgIeaEYSWemYdjRmXSJCYirNk9Uqr3Qhg5SDqfNSSlDHSw7ZIuGrq8X6d92dQk0eR8M4eXEpAGf9x5u+t6MwxnvNLKVVmCOVA7jwMw592xtt88VB4O5EUQsnetrqmjKz94qkHkmAu/QJH35s7qhy2A3Cud1D+ymlVN0krUSkesFyFrE7IoIr9VEbA8Hm6sWs2c3hnI5rCQxKH6AhoF86guNPQc8AanLHrebDnZnBNDIrzRF8DKFjDe6Q+fzj6aJe0Pnm7s7MmKSUAiNmI2etW0qJ3MjKs6fPOckn3nj1/vkH/8l/9B/8+J//a9///d//L/7m/9nnPvf5u0ePIEycaEmmUbNFSKL0CgNQeHqJQQgAh5ub5x98eFhWqJ3Px9/yW37Lb/pNvwkie+GIl10epXipVXYiotNpY/iyrtvz93/sz/7Ij/zw/+etr/3iK3eHj7/+yvH501I2hpOhqllVThfubdPOlDtlERBj0rxtLPGxmm3C1zU1FF1ld88pkflIu0I/tqnHIoioRw8E7hE+8u5CdnQQa1tpOsrQx8qsRfsiDFxC80wPjzv1cEokSklqL5tXr89e0omlY1+HE+MF0DxL4wjkCfkZPKtzNux4bLMnaVSuQ9RZZuaRHDwu69fQUXzScTEJLOo6OpkDLCMOP1Axza0cv3RXFkeJHpAQgMhyCdvLXcOCDMnqYWkEILl5v+GujoAztblmQIlUEc59d5ee6eDuDQ/hg6Rh76BP19y7q9toqJgAYGHpjfEfSzECcoml1cQSSaDAYbq7qQLV3as6oMxcapnFzqyZjR+G6SUi/sI12EA4WJfNVPX29vb1119/6623UC56NNbJ4XBIKdUwR1WbnwuNd4SIzufz8XhsTtBe6Hy0UDtxqPZLpMn2kGPDmeITvT56JMp6OGX44KL78erqbcBTEvUyRmOctaOnQ24TUSTNtgdmcvZt2549e5aTD/fQYAZmEXXyYRBO+46IQBScGfP2iVdHcNU6WiSnTMHYJDuycZ5Nn9Lj0XNHxxtpsrXGBFHXX4fhh8nC5EsP7rxo51+vyGb88jgfbZilKCaRMv40ebtoNGMWBWMKrpTyYTGSNOHZLAd0vq4p4PbSa+4ymDG9Ak1nCAZAhHh3dyOQtXI7bdiHyIwvEsGd3Wvn1B2bt2/wnsuHVkcRkyBtJtDETxY5Bi3TrwfQqAfzrnpE/a+zJwXYsZeta7NncBJNRIKuKfVVQWYYebNE11bfPpSTPBk9enHWxrtmSDBNxqFP9+z9cqfOBLBfoUKH7TpsPHNzZ3VVTSzgwObFnTHWF+FBn1SmedWNvxLmYvT7zgqKZurCeTyEUOEaafZERCz5IABDHSmBiAXVUSso4cLyaTN4FYZ64XL+1Ld9+itvfe2tt97+7OuvOSCS45QKK+RyjD7SMPvo68qSfCnodEIbNjKsF9t89Ylf/s3wwnPpKubWbjWv56qbVnLImVhY3PjZw32tm6OmdTlgEV7lcLPkxI4DveLyHrAZsuA16C3Kw6PHn96efZ3sGaknuDCEICKSFkjulS3IneCMVmoo8g6+5WhddJFBdjlunbCMFJAerA4WKAEZmuNb+uYeGTFq4WsmiXO8vaFvXu4IwcE3gL5xwpY1arUBzE1o9/P2cxpNJ+zuJ9XGba5ahhB2926MUQ9AtsN6tKftGhpqT/scL3PzzdewSsZ5NOkPHHmR/Vu7Ek6d6949GHHC6d9HAxbuomYZgloOIU9aTPxvdYuCpOoXKOCxpUfeSHxdO+/quI2avhIdQKQUA010ql1YNkPoJBaV4YM0UKNDXXyJyGat1XsCTEoppdW8VQSipqKluRZCc035tdwMaT5JrabW9N6VbaullM0U5lgPd3mF0el0ckWw8EkiimJDQs+efwjgs5/9zNd+6Ze+9rWv/ZW/8lc+9+s+/8//C//i9/5T3//6G2/6EbdPnnjkX1typqhu/i1qdz7c3z965RU9nf/u3/t7pZQf+IEfoJsbPDzwzSMSHqpR18ABoJ896ubSefkOh+Xrv/S1v/QX/5s//+d+5Is/9zNP7tY3X3t1e3j29Pm9uLFZrTWn27Tkk5qZcWZmtjjoCApnuMFzgM32cJaauVmayS3GgGPyl0/HZ1tf1uFYQysah1zc3vOn2Lm5NyO/cMxpPN8m/AztKg6bmci1UO5lM8ECpiwieVlK5wQf63/04oVl3H7QogBYmj3Anf2Wp2DRUNPNDPAowgEgkqVsio27O7rlwMyhQMVgzO2ZWxJX6Jc8Ofj38eye8uZyi5RLkDpAlKTdFvzO7ADYhzOJLkINHIQcDaXWrMGIkTY5G+AgsOsOgRojBgXI3bv4i7ZaXw+OgNUn4kQcuQGR1OsA1Eg8qO3DMmUPbIgPUTikcOhpVwqTmakW9JJ9sZJSSkbGzDnlbav1Eg1leyeukQXePSzzig25N3hB+sw2ZpqY04iAhUH4/vvvH7eXGJkAWtG/WoZOPC0hmNn5fD6fz8HzHApWt0v3YybnHHDNVp2iP6GUImkZ7xqeBXePfWFButOjWLP/QlXNsMl2WFZmcngdgPwuUL3Xr2uLP0RxDJfvfDm11uPx6OsFPjP2JkveqhUd3px9X/dYNL2oahHRVsp6k9fb25ub1cyOx6OZ3d7elvOF02QMtU+XTejNyCHkJKOKUqmbO8lllHWIMusJipiE3ouvm8d8FoPjGvJz7rhPFuzslQBQTWfJMEvFqy3Qnl90lki+6xD7z2EQDvEy9v7oCF0F6qnxf9Za13wAkXWXwXgyERm1DLfRTndn2TWE2cIfpbeupkkm3s42kl1VMesecdqdueh07e1pV06Ey8UzmhqbGZNkHvbkaMlQyPoDrlUdm8qQzBOELjH2NvjF9eKqGNe4YZRI4Za+PvApL0y9+4zgdXdyeM+Noc4mQxRoJYeaupuqpVYqCWjqFIhQbeBUx5oZ/FVjnVzN3dU4eyeZG5kj6DuCycu2HQ4Z5OfzOef8yuERJCOvzfctEAYYxV6SjzapPx9xqdJrrz1+/Mrb77zzWfq8qpPQVuuS0vRF6w/7h7nsW9qEe4MBvOyG8cmFZUjXhmJ7+KRyXTScktZyLqWYkrlsToc1LcttzoXIgqzFkYvWIAmGo/rBcWfIwI3jUQJANzePXsvLLc7PHCrMizBEkhCiAh7RPBm024RXnf3ocWgZf95XzYgTajP5oES9BG1fsy3/0IfOKQFBIgjBqdd5I2aYurnuen/b170sEMGdRYaz6aJthKj0p6oD8NWno2X6mFrUj+Hm/W/3BVt17+MgR6xjm/eR6Y+dCpjhhX00xNHYNVG0b7Kz2kk96yQ+eQmZEomi8YIbyMyqudI4f9vL23OSUzKoKU6bEnGEZbWqSI4sYTMrVs6dfVuWxFUMLjkCLAtxcjBLVjNvRcOzOVyJKoCFA7RnlUgyc4Vu1TOvpBlbAoy8ZjJmUKbkZGpqqkZMCSQmALPLouZNgwZVNYOTsPkJACUiIWNyMgdIeLNiQsJZJAtnd08G142TSpauEgU7tqm2Un5ETuSNucYqs9/wDTOvyw2BtlolHbByVS2q5f7h8eM7JzmdTsuyqJXnT58+vsvMnOjpl372b/z9n/6pV+6e/Ibf8P2/4dd/72/85/45Yk7r4ebuNj96hHUBALeKu1rrkoWjApBVgGB6e3Ow4/3TD5/+J//pf2yKP/gHfxs44fbJhoeFUido6uedw0uJZAdYnAdWnz3/yi/+4o//+R/9m3/zb/69n/07bvX29rAd7985Prs9rMyHrZ6rOcnyoMYOuTm4yHYuunVzTjczA0NIVKOOo0SxbCc2kDoOKRx+kESkwiKJxUClnAEkWbzlZyIz57QUfd7DgN68LJJrrRXGkY5IDtRiBPe0poaUVotwNchU9XQ6rXQgohEGcTW0HCQGQ904C6d0LmczWw+5aK2klTRzYqZSzrVWrSW7UT3lVRYRh2YGct58S4R1XbwW1yTEVtVAxct6SAZXK2ZWySkJgKoqKYV/IaVUStFSiGjNudZSz1tmyXlxrwxa8yJk521b8kFA5rCqrnCHas3LUouNeGVCy0hJoFVkTUnhBA+eNtVqxCOnN2gzw1wMqyAxYKa1AE5kicBpdXcKstEuU5r4GA5Hh7ObmauzcKD/qDuLvJFMcixgKBoMNb5elapJYiau5aza0HFLziKybZu6snCFmZWcczA8YY4JdCV4KHneXQApJTdUWc71DJCk5Ea1WJQVNTORbGZFt5yzm3LKOefGf+CkVswQRJ2n05FZiN20GpwY7r6VKhJaPncGUSciISaCsLiaKuBk7kq2pORGWl2YRJIwwM4iklIgqdTtrOd8s25Wf/Hrv7RZRSmc0nY81XXNNzfMXM8b3fr5fK61mgIZ3mZYkixEVEw5pZu7OxXadANADMnyULeV7CblCld3hT97uDdiIyhQ3baqvNVSawXlnKLKQ5wyIhnA8Xy/risju1MpZTudk1CWdLsevKo73d09ruZ3t7cuy9mELY59JiJzSmjroRS1KLMhYnQkERPb6lZrzb465w3spoX4ZKSlnF2fn46VnFehs6tu7kpOCSau5PWQVlVz8Ol0ymkFsQhD2Z1D9dCiESw9LAeqzkKrLL7Vu9sbEVmWg6qzWjH98OF5Oiw42wbbXNceoAjh40CtxmYMuEftayYSIQa5w1nSWI3jkEaHgEaoeaxSMxP4bBV4o3XNZSthlqFpzI3ixOBBCtS0Bzhxx6VH7qXpvBe4nMPQQSuU6tRKlMf7hDr3c3eEVjjDmRslnfcVwEJk5iBL3PC6VjdGamzsWptq5V63vbaQu/vmAQzIqQUbmbhZeuYEZElWlUINoWBmJgotVk9N6bGu0kWWfYkaswy0vEoRgYhu23h1G7pR5775swiNK8DJDNQoZV07TpUIROw+4AlN1ATMRBt1LsyJKEuCu5VKJE79JeYAuZE5WFJtFUE7uRYgeYGqgEQI3jwuzaBdUyiSXW13NHDmnqgN6kXO1MTTWDxmFlWdmPl2Wd3VtWqNkmUIJY7CMFZlUJs4dzcjEZRKGNz6ClMGiJuaKyLkDlMmsDBS9lK1lgadH9F1NRZhJteGLBBTmB6EmHay4pQS3FU1ZRmGaZs4h7vf5IXIzbSWTUTWdYXwtm28PBzIuJ7ZkJlWyp6oPLyT84LVnO4UiYxSRNYqaugIQHN3XqjzNBtaFoHxGwbL7cdee+fLXzm//3R95RWca8op/Ai9eDlzAzJeV8j8FVy/kjISe6t+mRTIy5u/xV+nyMaFTexIvL5ys74yfydCsbPdE17fsAYSATgAB+DGI6E3W31Ez/T56weuT89J0ln4xOl9ysJvHPLdIhmo7id4JiwYuNDGTQCET6LnRr2sHyEe5CqEzjTMXUXwl5I5zFs0vsZbAjvjsAynCIjBO80eXFW1uiuHkIq2hEekQ2zcI77Jcb5v2yacRVLAia1YraZKnohTboa+mZsiEvwTQGSm6ER6zCwpefMM7rAUbkmbUC21VocGR0BAH5io1kiVSiBqKXIQVU1pGcpP0CDE1Kmqe4iIxjbHTKo68+EtS6P2IG4VDuPgCpAsXIJtYyzH7l3SNMMhuOOgjNW8ogfNQBc+4/ny7hePmMx4SLxAVZkIIFU1txFUNDO4Moc2z/CoFWKmBnVVJWGmxMxkZKpa3eDWC7N2idPcWG2eVd3J4zB09iDBtMg37PAb6aEYd3cf9BLoiBrr0KmwD2ut23lj5pzWqOlcio5kHumYqNltxjmdTpuI3t08Wtf84YfPvvCFL/zEj//kenv72e/8zl//vd/3a77jM6++9trjV195/fXXn7z6yvLaXeKEQEtrARPIIFSefpgfP/7rf/Uv393c/Nv/9u997RNvbk8/BMnyOMeap+GzqKXWmtZDhAjv33//y1/+8s/9zN/5qZ/6qb/1t/6W2UMphUxZ6P7+Xmu5PSyHJW+1eFfEB/iYpoQKZiYVdArZEUImjLDM7mHtK6EVQdohSdYAObEq1Oq42buTxHpd0cnT7WP91KJazcxJ2ikYa1JEzPp+wO5Hp+48DZamIHgcGlXfGDurpLvHhBI1RSSIOnLaC0kP7753qB4mHJpfJvCMbUJ9EBqeGfumUG2A6jagkyu6uSqGb7i/N6CzIhIhR+mLL1ItbQez7WhJVY1iYxGxIWaYe9VQaf0SpRBpw31bEU3Rj+FzGk0V8IsHZwxBpLrTy66YOJ3yc0TEbAPIGgjZWqJFo1RuIIcODrHhPaXJaJxHGy/zT0eP+oq7gNu96F6WVnBWbQITjgTU0ZFhHoxF1fK5O6HzDH0MF0bYw8Oi4NT8/T4llbVe9DyxWZy2+e2LaLy3OwIu4gwYIt6dmXPO2klltNdcQof9c6dRzTkfDoe7u7tHjx6hU5WO8VRVYjIf66R9PlZ1I7bpykEbgQ2upupRgdcs8v02ogHVbjBXcwqhCu/sO0G/A4qHj1lmZuqB7uhLsRIR1CWXYJgaCYQpJZ6iTGMvx9imlMJFWkqxQT3dcfne0znG6pqEjF8tiXa/6SguPO68nt9piyRJV0s3erQsi418ih56ArAucc7uq8tbNSbgYlM3PDPx3ux5y4/c0Xkx0wiO9eBhMHTxFLKOO72jNH2UQh7/xa/DoriILPYKuvEWAG4XKNCORIhxIzPJGf2oaPfEWy5xFuMHH+szDKH+LjO7CKu4+wScvuiCOxOZdqDpNOnTW9zMuG+95E4z2rYHDIlavfv5vehyY19j0592acZRNWInxblcezYsMR7a5RTu8CHBYsSYJXqnhUGj8IOZBS8Apn0BYAQeZc/OuDiXt22LnW4d0cAdONZ3xMUpYBPMHnNd7LEp3GEAVavFWG07UyqUTOC2T5x3q29kndG3juyZGbN8/OMff+ebb7/99tvf/vgxsC+EX+bL/+RePuSzizBzorSu6+3t7XZpAhC9WE3zV33RZbr19Z+iQe4959DQJQIAEIe4jmqG6ITAiXOUiED/eqzdWYgR7Tks4XZXVY8FjM51V8/YN91en2ZvcNcP7SKdh65uGyeXw5uvJETopa7UJC125phxzZba1Sj5VIcWQcDBHZXtNn+F9jyXJv3mfR2i7FrRCaVtJEeGOayqZM67WbDLiLgGQZxPzHhoxgCJSHjg+lvcOuCVmeHcSI/dyGBwNjIzMAGRX+A9tMwwB3FUI2hMJ+QwGFlIPiJjonAHQOFeOZY0EU8sgrMgoy7BzVr+w9ALvfOC9DuDWMhnhWB+ZoCYa7XTaSPwVps1881vvvX+Bx/89N/5GU6yHm4/9omPf/azn/nkpz+ldPvmm29+93d99tOf/tTjR7f57gYMbCU/PvzpP/pDP/zDP/z7f/+/852f+254We5uEC5e65kAQcJzLuV8/u/+6l/92i/+g5//+Z//4i/8wte//vXj8aiq2/F00odlWVISrZuWEqng51LLthH7IBI0M2cit6uVgB6DVlWWa1kwHBjzCMSMtHXmGpWqmFnhbE1BcXezXdNyd3ayy/x+u8xOnN4rRCKSW2CKSCiNY1JE9AXHwYuPHe0fKnIc0pmlcMMHRu8Yu9qUUvJwlEwbGV3RNLNeNGW/Ago1lnrHJO9YtTj6aUpqnbs8VteuzfO19Iz7qRkPF8XWQoTNEmR0f3+FT+I1BMrOJhW7qyEuvCuX4xX7cb7PTrgjd/1pTDHN9hsQA+LT7Ezb8Dq3at5lHVW7Q3mFSCeVd76//0y973u054WrzYteVhf0YV13S284j2rPeRsTx6lZbDHv3AtCbNvWQHFJxgDOA9KtIx1/9eBumUgymRkkVzuibQGRKFQYNTmHcjbKXehUhLPZAC/kQEY3SymllKjzHiw4Y+V4L/+AMX2Ng7QOCtO27bs91pz5ZuqqqmYEb8yoOeebm5vTcXPfNHzBNMo0hdScf7WxWuet3RwH2Bl7R18AEtnprCIyc7W/Av1KTU+NL2Jsz6HWv7CcLgzLi4VnjXi5hVPaOXJRpoI6GY9P7AV+eZpEztUY5/GKqKixmxM2Xs3uu/G1t5kvzum5/dKZaef2u/so0zzPMi4340AajUS1KyVlNH6Ikb7JeNgto8EXuxsXLRnrf4jQ6yfvXwwp1snPu3Ua7+IoKYFmHaF3c+74mBVMb/RL1O6OnzSzyUxqFKzYO9IWxmVKRTeDG8q9DxYABLCTLiSTRW3A7toOPZII8H5goCqGzda3CRF1wROPsbAPAUinxekVa6MMxpx8EZHe9ihHS8gcG2fceTHsQ5fb/URg3qVx3BPJILXWCK0vy7I5ARCnVugeDquwWrezpDNLpciQ8a7c9WV0aRO+5NrRg+6PXn/91Vdfffvtt7/9M5/BktF0y/8/tAb/EdhnAIDZd+mNziTltBYSIumj+/Jg5v8Pozp0pC4SrG+myCokh4dt02GosYNiC19oCGjcNuE2qmMvG8GZEu058+6I9GY4RCQCz0O4xFoz3bUdAITIe4R16H6ASOOvzQfkNA/dVTfjwEJ/k+2Jf7uOBKeRxzR2lpnFIbuP82TRtdSPS4i+mVXdZmVpfNdaJcXrRu6F6dmb+ThrS23zR5YUhdN3TzgcN8xdGvoBd8i4CJsJyHm/3HRKmp8WsaTFSonnIIgmKZG4uqMVSJPew2S1AkbOxM6chOAwspB5ElXP3CLp3ENeRcOmE253blELbDaHdNyzLEtw59S6dKHGOedt29C3SrfFGcD9/ZGZc05EVLZyOp3hvK4LM5/P54fzSXJ6OB0/+PC9v/t3f9bdP/mp73h4eG5eX33y+Lu/87Of//znXn3l8cPDwze+9vUf+7Efy+vys3/7b61CRJyWXIttRKfT6cP33n/77bfffvvtd95559133nn69OmHH354un/eLNhOzVprRebT6bSVk5Z6WNKTR3eSVyIYnBSQETy+tmzHmMR1Pp9j9HdK8W4Q7kstKjR0uU2dHwX7yfFyvSFWOcdBw+2taJ7UJJSMgscJbkHNClO4UYSRc0ruvm1bKWXJCxPIwa3AlLoZObt5q8sED690YgFf5D5Zjw+P8F3rqfe+dJAqM6uZwLY4KbvTl8FjW1qLuqTZjTLGatckJtD5vM/71T4JWZBSI3oefx5+o/HGtrHDc8wN/OOh5ZgFVvDicsalaJ939BzpopGZDZThfn6ZA/tqfn2Ksrbn9xuGtJlFyosr5KMEzjxi4+feq1YgpHM/NAV0PKqJHmrma/fqxeJVdwsMWrjV4htmoU9STJHp1KM2oRRmlRvUa/cQIQLLIiLrisZsXIbVJCIiaQcvjKJ/dt3TcYWlh77Lcs7rug6DUPZcaK09mDBWoDRsXkvEHUs9cvyeP39+yMubb74ZcYD4q4i4XQyydatv9D0kf1ieWi0EvlBmZhFQeFIcndPIAzm2rmsxJ42aUUTTXvAm80FEqqED90BKp31ycgEiTXdZVhGjIJVhjjL3jUHn0gBD2KjVnGt8uCzLKCGAaX+NI/NiQ/ZI8vjr8F6xN9svxiGSOW2qzznWdbO4bM+lnA/WGPlhl44/lc2aD+dSYrds4jE+ZC1m7xfX6Iv0cil4wRyl7gi+WtsXHyKENTFP8qQTmYzhilsv5NtkpMU1Dpr+luvkvXnorFPUDFl3tfGHlIjI6egALQuCWbjbitQtQO8evRmjwq3+mNOO0nMzi+bS1cVcti0IxK9adfXrNFQvEWLoDjtyZzR4d18bwIU93NlZqmLyq7p7hDV56fBR7ZnrcVKkYAELnJ2NshNEF+y9Y/o0io66B9p89FhE0ItOz3I75zStNJu/EsKwj33rl6pHxTkhilw0YiKyrdzTdsPLDVIiAii9kC/IPLFTxid48RJBVbC89tpr77///vHh4WZ9tWzndFhfcvM/oddLxu10Ork7A+GmL9XWEAsNm0BotVheTnf/y17UUNTXqkJnKuNhCobNz61cB1Fj6e91q+GAujPcI6SBLuvh5k7o9ai7KeUU9KVhlNlw0AROPx7pjT0lgOtdClH3pLi7DbglE3sai9C9LVO65PpS1dnVNU6Wge7pxuSsd+2gyyHzh6y+lNg7IAjNCusAiiDN6bbfSybiUr9qmCWeOFeiw9xLMpgZwXnXiS8c2+Oh4fIJ9XF0svv7d8Np/DDM3/C1je4RCTfLQJqy5LVWJTAxDMNfLhG6i2dwELcTm9VwGzALs5v65MzYB3Scsj23FZiyPkJSXSzTPnPzURfCtpRSioa8C926bKrVbaFgZrp/uP/ww2fq5gZKElKUk+R1Wdf1F7/4dx8/fnx7e/PhO9/8i1/5hb/xV/7bx48fJRGzesicGP/h/+3/+sprr6/Lzd3dHYTf+ub71LW3yKKJlqSUtm2LqmtWS4BaVfWZbYAnYklcze6PDyC/PaxEpKZWQL2aM5zhcK+zL1Agvb+D18j7D2Zm6AVYRGTTytOgtfHsGg8z49JNHrWh5+EFQOAkMjgGNLLXiBKSRtIFnDmZqhpiuSrczGqxWkxYifag+aAD8d3qu9gYY+2FA6bWCjWaahu2dasNtZVTirOzGb3W3MOzacR99VAH+6kqs/btYLVWEnYjVY0s5CE+hmY5yZemPTctv4cZqXv6Z8XOrHGZdFqtbjMQRWGoedvycJnPgY5G09DbEBu5eaB3ginp2h56SsfY8mOPMDM782UYeV4AYyvtPwBuhiAm6f/F58N5iBj3bgDLtNhG9nY4L6KXo2utmIfbVXtC7NDEzjLQ4GNtv8RN1rQcAOCpdsVYSOZaa61awxqMx3LwMZp5l4phsKlqMTUz5tTSbqntC5niSNZ1rCHHxuUdJ9kmNwaEmg/ywiDvozrUslmkR5BQVZk7hhMQkdqIRlNfaUa7l0RSWtwtDELew7y6shBRInKn5liRRrpDk6kDxJm3PzYQ6GYay3lMYht878aJe9RbG1Krny/ZipZSarEZtbeP10TkG+LUCOOYGPuOLs/1cf/w1I4xbA/vccjo4It2C3ARIUQguLsXY6z4w7KOE7kRfhOhMyRTV556QGmnNR6Ls70IerXX4oZLodHMUe6xphBxAGDmE3vTGBAfHhZ6yeDMzXDbT0wigupVI2NE8BH6yljJrUmXgnGWHu0T3vt48Ux3XCB9plPp0gk1xmce0mGbJRG8EDJtVKOXzSYivsxJGyNz9cnlxMU+JbcLNX1IgH02QcR7kaNAsiSWCMWX85knyjEaCrFWYgYaWBTD1zXKYJjRJZCkb6uLdqrGyUxjMGPBJ9/3xTDzY8y1MyHP8so8c0uoJEBBRm7iKrV4PXo5Ulqb1UfXjstfLt1u3EVWttc/9sbXv/71d95559e89mp8/CLR5z+Bl9FVPcZ2aalN5VgW00TVJS1EJJJbiYU4R170dPxq3h5x4Rc/pmZzvsjTY50fJf5jR4nlETLJ4dRAE0bN88yjuiCorTF0sHA7Sc2IOj9qlxtOBicIw5108rkYAZ5S4v5YcrhT2B2YotitiwBRg+gH3rW5xEfSV9sO8zFxPapDgM9H/CTVfZYM6OZlU+zb3btiOZ1f13ixFGILk2aMS3FgZmmW8rgWYV1A1O7q3v2jqiqXitR4+FAFxkuJCISuwSfJS87Z1N3rzJJnZkwpSC9UtT3eHUbODo2qIWBhgHsN4gEpbNHOuTGBERr5PH1kBVAiOp2PAWQidquh7zWqn/l8HZPBtLgVVau1psSS0+H2Roq+++67RsjInNiMyrmWci6n4+NHd3o+fnh6BlMRWjjX0/396XR7e5uInn3w4RuvPXl4eH4+PpxPz58/f7i9e4VAZkZabOD1icp2rqUoEZy2bRsIN4rax0xRJ2PbNiYkJpHktaoZKUuiTrrrKS1WDJ08WrptllLijnIZg1lrTWIAqCttoeAAIPLw9TlZxF72Y+zC853cL5Y4MwvPXGS+x3UAYmHAhUSE4oRX9+YEkswopaTLmBt3ADA1jNxFqGrahPtUCvZlOSyciDg1JGGE6SZRNjYndxgqNcjZyKnY3fztODS1KTIwb5Nd1Rh7sId9OnZ6KJ08Ojgrpg365kYeldCJmBxNYb1+3WQijL8G41yUGPGhe2snpSK2blnIJFCoq2j7sAsPB9BUx7KPw6TwzftoVpXmpl5bwrsi1RswzQsNO60/WUTChu+xJhlLHd2BdTU4Vz8PfZGZE7GOovbUJJh3os49iuX7lNWyAUh5GRp5rfVUtlOJ+uwq0saIWYC9ZO2QNmOch0IfEKzgIYwwHUcB2B4F4o6NZxHuH/olZrWrfTZ8FofDEvLcphBE8DHEiTNPdPMcYl/hbdBabiChb0BhG+GpRJmImlfYWlZXU4h7ZU0iYgos9O7p3PdUSmRFVbetnmu52XMm2b16VdNxTgv2Lc/DwxLjoKqRqD9vOncP0tEX1+FcL+5S/rd7uGeHEpGqjxzIvkFGnuS+PuejZBxS88OJiAOV3s9nIhoA9ti+tBtvFa3+Swu7tO90lScqRXR1nTmYfjtLATpJjGnUG5tZQL2/emfcnZuNrgC4T3VN2xa4YiN0nwotUMTtXpDMRD07qH1jYuKd70ETRhT6wHgIEbqDb37yvNPn2ZkfOORq24kdSLyrCt6me3Qz6uuMPU/djqXmloK7U6N4bD0KSRhtDdlECBJ9ai74uTB9j3O6QYjV1K3R/KArMRSl2C7Dv2Rx+hnC74ZWkdmDr0/N3aV7tYZ8wOSuRfcg1FoHsG24/Jg5AgPTim0AubHCbaLnrbVykl4RDYBDFSiOEyXy8mDnlfMBiYDFAQO/zAQcpgJeNBG1Vgn0/s1N8Dx/6uEhP7p9mR3yP15ArGcytTLORHgiqiLSSjghRYxuip1j8g//Kl93GSpsgTdHZ5W2TosQka5mU7m3asREYjAHN1cykTlxc/ZZ6AdmNqoLdsoR2+OIwUVF3gsSOQ/zRAhRhgo+neauasuag/YkBIrHQ81S4rHUJ7tul5De9IHG9zv2whyBd6Na6+DwC/Xmwu04QUkBuBtPjnh07Wj2WvaDtW3kjnBpPvThTk3cMOSdSrh3e1juQ0h6sEhNaVKYxM1g644WDOMq7eQZs127jxNzy1xrzlADwFGXuZfMcbNWd3IsI+6QCYYMVQaIAm6IWSelRowfMssIEwnEuKjpXhj6UCjftbqqq+q6po78aTA5VY1CAjHoYhKGjZmdTueU0u3tQSSrainb6bSVUl55/bXnz5+fz2c1O9wsxBz8CuXZA4BlTYnIqtat3L7y6PGjWzOLStbf+NrXP/Vtn3733fcT8badyMipwdLavBLcqZRSO1qpamURFnb1stVt2+C2bemwLEvmlDOnzARKQsW9BRv3qKmIRKwXPcPTcRG2YhLu9PeqxV2oh9t9aJYEN6M+wkS5L4J+UOya/c7KPZY7R7gSFhR5aDEE75uNOq39nrkqYGNUrXPsYmyPKw8K9SjQknPpVWJEGk0UtZN4zwEbQTkJbDuRoB2HYuaTo3ruHfXwgnTG8Oyec1b1c9mmLcDzN8cpKyLWBdlYnOEoHas3yP2m3rVfu59kpEF3tWw2YntYYrj6hi9tjFKLbo073SF9AAeMYQZ+mKubmcmlSjf2l3tUiCUAmBOJL3WXIWEmiebuvgfryKgfRETNZQD3CJvPXRhLN4aUqo7t0zIYfc8JHarerA/ZlFI4JKG7Ry6S9YSEMYUtCc1JRFALEaW0xHprTZLAbfbScMSDxgZMBNEegRkNaG/vOYQXoNBatV/UXe/Vbbn0U0xUlbsMvEo4lLTPe0op5yw9yVZVlZUS5qnxl7DaSBhvRJJSwuYREWeylNLhcGiEKHEoT1A09OpJfXhBvTB97DCidigRyaCXYmbzPXMmgsA8FV5nvjg1Aoh7ZWZTOA2nmu99Thu44GophkI8RnJeb2MzzsVImh95Mub7vnS3oCGVedVRR9zMIiV+7Rnso/0y2DtiGY/tA8Ddhhk895eZDfsn4/k+BVplCkm9oFv4sNXHuNBkm40dDVxUk78SCEQ0+Ne5J/5RB9OO/dj2r+5wmL5gmny7ePIAgLQx6XofgJexf80zO1/74AwEkzsTjXS7qGxI/cijiAe+6DTH3trxWA/B1X8ekzv+6u4gY2JQo++ZXXXzhMpFQqnFwuoSMvhXPVheHdgRHmYIo5o5EOyBrg/3OQAKHdqMLnM4RwOYOdSz+RNmLnUbYrPPERG1shM0FVKjAHRYrAEyNnYfGno23cpRZZFyoJSH1Ue+s3rO0dfLAu4XUwyAUgLwxpsf+/DZ0/c+/ODjj25fOun/ZF82BeKgqqFFp2YokUgmCEEckQcUkcUp3vMP8e4RKuyF7M17lcJewr6BSPtbmmMl4mqBdmIiwIJFgeDECXCQcyTjujt82v8RrgBMwYRewn6X4zEO1Vs1YOrZt0xQAOy0YwbBxC/WiZx2x4jIjcLLkXtn5LMvqYniF7SmEcobIbddlwDcESWIhzAZMmGcdP1fuohjAAFoG+1Mfnm1rlDj/aOWsddapqrWXZuz8HL3w+HmfD6PnJPxqBef3wcRvf9gZmupy0Fzb1YNXgnsTC1AR01WeVfTk1li6WqWRaWyFnBwLqVgOl/38TX0Cou7YzK4KN1bAsysJDE3uqHQ2FJK0kNAfdC5TxtUNedGRlrUiYSSpHWB8MPDw7Ztpdao15yYk0jOUktjAoRbKSUxMfOjR0+27XRzc/dd3/Xk137u8z/90z8Dlrffe/fu7rHWYmbVzd2Fs8FN7VwLEQWRn1bfaiuvt22b53Rzc0NEcCuqiVOL6oiY5SDZsc5hSCTDdgoTt60qj1rVFEU/uTuUAZgGUnZOY9DqHN/S6kI0JzVdqTjeFEpvR5ijZ/o1Q2bWutwpklQmbU+Yyd2tNKU4DKSxUYkoNN1gU/TdHx4MMTHybqqemuXWCYksGQ+EZN+WSDkHV63ChU2IlWjQGLo7T6d1bJnYwGPcwjjcapkHY6zPEWZvUaaeuDXyWqlbs03jnPIx5iFNOSHcb+7tBjdTTS4X7+u9i8KV4F0puVLdduEQ2iDt3+0hB3g4n83CSIhh5A423iHoTefY0eNzS8aLXvwVPZzYa9xbD0k5MCmncTbRFSZot7pbM4i8izZcBDFawXTp+ZMDaRbRcuvsnb3uEfrMo69wDkkYL+prNZnVnFeazgAiTikvy8LMwtm4DC05DDyRXaEXEe6Lf59rt9l85ZycSeHSXz0umy5MVq5PpkjYbHd3d9ZdjERUO8tIrTU8c0NNt06iGwbSWCre68R6Va2uaiSeUlrXNSzn7Vy3skXer6q6ixlgzQBTVXdKqSEku/0Ti4F9Mr63Ush8GIix+GqtterYTdEkcjAn1bNOV590Fmmqhu3YuD2z2i+DtD4f3tNFRNrfO3iqrzIt+9R3/wgujvC+VHhEIMfn1iKWc/3VkU7PZkaXzuOxX4aPA1MQj5P4ZYgyBiRK7DalpGexICdc1lYeLxCJsgpdRRjOFxnOjotNSMQXH7X4WRPXGKEib3hEmcykaX/tEzF3lojQ7Y3x4Vj84T6YP5zHZ556dxfJ4fjzqcdEZFXRw2vz7Ez123fNB+g11EYbu2ct/ouiq3scweFePZAjvlPpEpOVenl0jnc5AdKrILZdTDxYrC48I9ZsVjOHERG3agsKCEtEYFgACJNa9S5bhlODuiNVRNR339mYwXHYXU4TRi5DeJ/7uJHWChiTW2BPwKDQWSspvBypHLHcQsThBpGpzMMIsQ5Bb5c2IQGUBA4wodSbV155/Pjx/f39IJX5Hy/vKMy4qJvZidhDX/IG5kpLppTA0q1Bnrk/Iprzj65dQbPXK9d74D7DJiSHxiFJZFEgazioiRAEY6olAa1QYWTMRGM9ETRyfpr/LEKSGuddl06qLQ2hk7F5aj4v6uzBZj7cNMwMAbtbvSCRGj+LdJylJXCr4MCdIrWN4S469jjB7LukieRlPpIQOR09gV9E4nwZRAPjTmYGJIq3TQ8R5gi9IjHztm3rujIz96BHTqm4WqvO5ADJxFMHIBjtzufzMJnCGnT3CJ01QHlKFIzeE3BORM7bFs7xWitgoCpJRKh6TcjVKyes6yoi51pKKUOJUVXOaV3hvQaUltYGSmvOi5mdj6dAT5ETszAnkACmVqqWJAQgpwVOpW6YcA7MKbSHXpMwcIkU/DHLcpBEtVbtjIXMfH9/7+7gQV5HTii1ai03NzeHw6qq27bV8EsDh8PBW4xIzex43NjFHXrciD1zMtA7777//nsfsuD27vGyLM+Pp6JWiqZl3WohH5WkUL1YKR6oaTNzr8GEwaRRIAVuZhsArcJ0WLLBT2U72Prs/gzTsNbipGy0FqBK21hVTVOEpBR4v6hZxe6uVWutj548Dn1i023btrwsIlJUg4zHTZtBAmUHIApyEvcSuNZgy+1nlfkENm6KC5aUFlU1Qwqqnlozi3alqsmPfgLZXmJbt21zNDb8dV3P5/O862LST6eTmeWcE0vtGlKcakEddD6fD4fDsixDe/aqDEpJahx+IIkFY54mQGNKiVrQu2nP2mFLQd6YuuFXu7Klqg6UUg7rqqqllEQcCyaManSXTynlfD6PhTrkwkhE7gU5LIuklIQisMKsF86aJn0cIyKxPyTICbyxaA1dJxTHm5ubotWscaWE5Kq1StpJdEJINVMcplARWZYlDAiWpKpJJLbSEGcRmwqlYVmWeIKNBDat3M2BmGjuZLB5ET2RmQdJ5rqu8UVErTlmFgmVr5YQR8IigUcYaX5Dsxk1lK+M7XEDM0ceOu8BN049QS6m29SjlkMMkYgkInc/HA63t7cppYjaRd/dXXI6nU5JFu6jp6qnMvajB6g+1kDYYIe8jDtjCqIjtVZCyXll5jqKFqzrs2fPQlmMO7dtOxwOwyyMh4R4J6Kbm5uHh4d1uVXV29vb08OJFKOIQtwZJsSIcZ3P5xY5b2iFLaWUBUtu9RXWdSXzdV23cnr//ffv7+9N6bidMVJT3M/n87relFJAEmEuIpr5QX2QPwsTYGZbLfErM2v1ZVnqqdnk2+l8s95Ff6O1y7IMvbZWazWqOvAydKBQckL4UyfQQvfsBGbkRZtk5FjGaTgO43F+dx26eeWK/X/J+9MtWbIdPQz8AGxzjziZN2tisbgoSr36/Z+oB62mSElkFetOmXlOuJttAP0Dg23zOLcosoqtatFW3nMjPMzN9oCNGR+StpvA4uT2h+2MkEpgCN2+X52R4WhK0uEaSUSZ1QvQR5UKKWHVNmJSITJTKymztaFNQ34DOFFG194GbdvFYWCmF92FI+8+VS4zc8uYLs4w4JJXX5kjPc5md7xErmixAM83FphE3H1auTiRQvt09+aeqwFyd6vFT35YqWbnjquBmUqxo4UX54EybZqJ0ujYsHzswrjz/4GxbaBKbVvihzWvC/deFTtJJHvqGTEockq7k6Q4WIS3Lfxe9u0jh91R9uNoI9DdRbjYb5YCHcexGnWrFR0OLGYGn/Kidme0errGaZnBxMRwTAUJmMyAA07bTYzm8fxlyEY/CONumPD7WQP9D9p01dKdI2vUzFjwr//1v/5//L//X7/93e/+6q//BeAGOwM7EVt+dZb+X/8yt8CPN7NqSGLupqpvb3efT58OFlPg/Z0hwACIIFX29v2N+H4K75++Mne0qBsgd4PHmeUyXTtO6NFx24ghDCfKdhQGRJ5I+QpiVyvjGp5WQW50o8ZtW6xFqtLtzi7eMg91V2GEbWxT80S76dKkqlg9tTMFZUrEgTbTMFy1+IBWX4M1T2SMAZCfNTLUOlUsUPQlajdQ6LHuXmhkqRKH9vVyVPma9nKRX9IpndeopSQmdag7sbXpg2+NpF3mququMaVwucX8931/u99zmXByh9CZRFqNUHMj8unzhC43Q/U2jFeIQESEArFgmk+Ws7bBXfWIQBbGGAJSaCUquLBEUDW62iX8KY9wlF2t8CvmRPl2UwhFl9pa1pimLlgI3x7P0Nvc/Tiez2Pfs5z0Eu5oLml7JF4wuSvgamY4oH/913/1nPrH3/3+28fDDHJ/CwYupOEVjL11yjqQIxM4JEonPPBVqfsrahA/UVQczcbK7dUOGx5UDZTcVxQPd4+cM3edhWJaUJbcxDDGcKa4Zc6pkRNyyifdNnH3cNC1Hx0AEVSR3VEi+v/ilTenqZ4kwbL0aiNzC3uHAD2c1EHEkfXEQiwgYTEW5WipAPbkBUEVrWQwMh2XR+XmgZrmiUgqX9cJZB7KqO8HVzsBLGpiQy+0RwMVtTg003Tr1NjKU2JVF20h1zD+1je8HOakiNfktKBOgIxxiR6UqsHkn59THm5mZrbUNNLy6X9b/+mbEwss8gsWAusb4D7doQZBTTx4vUXELpKbAJR1piVc4gaKf70S1EN7422zCnS35dzlPbxAROQ4rZnY8Ci9iRRUSDvR3bx80IFF5Mh4DgEBUpgetGYRESdsEzGYQ3vuReTx+BBiM0zP2JpW/M3doxKo0h6ZqzHASgx8RRXqwxuWc5gTxb2Tfwab5eWCMJzJxCuAELb6VpdXMV5bODWFs1bhKlE4cj2inS6RVG1Gunr9PM45WVWd0/RQcwqjz93f39/f3t6jTVmcaoCJB/Mgckem3Z6tR0WomExI4lg+e6qqhi+uBznnvOe6Sgk+MZsAtm2Lwtaol22i7WzuJvIms8tJ7+PjZ3Z9na8U5z2MMAg9leMtcLLC+6aVRB2CdT3WbqSHrXHvHhicO0LbpxdlVXJxjPiLRWIhn2x5GSd0zst7M+yf/TNO/alIq+HsqPBDkm1WdB2lWrg7VB2zD0tSJqd8r/ee4d8wMqnSLq5Leq7AOjtfPO69EVihsK+rZy0O6kMgHWR57nDp0ee1v+wAXMmp4QF7hFQt7cPRidBmk5kQaLCYJgpNsf0M2stgoIS6G9wDOgZrluzyb235VSV3DM42jFTcIAOAajCHTatGFIMYzBkJ+RSJTbBFz8E3tc85CSccdy9d/DXhl6m5Yhe7vhbwux2WfyKPrfeAP5u2wyZMbZJsQ3D/aXTlkFfV2qfrJUhocB7dNh3jfvvpp58ikgFhzqyy7+Oj/Pdw2UI6lLVBQMWZq5ArjRwogMA5/1OZooZK3/3HXQQkcmR90pWifBqbHvc4hXUIJQjQbeudEm+GqLAFSEacphq3IFy4x5H1MuEEi0GUW3C5zjygFAcVzcs/cyQ+X7LE+xQgDmPefLqlQkbk2TFzj1IdWU5WHq6AL2kDr/JcLqojgEjHIqIxbivu13n6rAHJci5xnYhP1v7C4jUhyDO+oWUw4LRiufzEx3EQ+e12WzNZqcIU9YqTp4vIvkQzPV1xZhSw7aZwNt0sIOzITJ77PsYInyOZ2zGtfKXRDNvVpqXjaoyByk/LtyS30qmzk4WC0Rm0lQZaXI9JL4VHUuATcJutiaY6qGGIhscxzP0hg5xgZk7Gshnc4OnlNiJnIWYmvW0A2I0YDCd2s+mwX759uLuaE4khcGRTU3ZASzk0dw3R4WTpoo1m3gidg8+we2wrwRnmLGwkcbQ8awzI4DxOG2YlFzMTSaDh43j4GYg7k0ZCrnoWjheseUKalICvE8JEwlt6BGYiE8K4lP50DBNXQma0xO4ufJodjUPONWm1Ll5OlbMEqw/MKvOkKrvQWZ1IAVYuUhnEUDO3/GsOrojEfaret0GL7iUiI+Kufob4m6pW0c6ZmDeb5azk59er13AxOc65xDf65yTXxH8Dy8X92Yc0zLxIhVq5RngDzI09cYvLHAIqRPBilL6oLOvn3qA+C12JyEY0zcmcLLtiTlC0PGVH0BQ7BCSgQWxkIiIyPcM4p23g7uGWYma4uqK0EBGxWJpWkWHqWR2hJJeCz9KETi8PVxJBUyZXSI15BCkEFdE80ZKI0tILhnOmSyT0S7BN2se4zQlnTZSPKlM0i/JaL3jk4ziIT4iLtAPh8dhIi2hnzbkmBBGJFvNpiaFarlVxbPysqs/n89u3b7/++uv9fl+fg8qYZT47kdiJiHv262vz1QoPKjiMiWe02W3OyLrp5NtNxEjEjcyeMc7jOPZpxJs7Edv9foKymBnqlE13EhYeIsKy0Qk/G0ZpwGkN2Lhvm4D2/dnxmXBhrSeLsgfncgoWJ+l6Hi/W13JUFxGO/uJa6oxFwAPYajHzT8Wdtup7dB6rOOrLe+l6nF/4xsvgaZH67t5m1+eJvDycKHsoY4lb9l/503j6T566xOm2MDMZZ6L4GAMcrr9Lgnp/vQIQl5nGTq1sZH0jcSeHZYwUtSxegC69SsD56ssrzgbrHMG6k1dXTeTFBHN3uxjq61DPu9LlSiTCyBZ5/XZvrXFRH/NR5P2+/lPfcNJJuXjRkc8CuSLAKwTj7mROntDx+BOOgDCS9wVOZpmrqeqQi6Oh92LxVbVF3zJdAtbilBF2EDPAWUHBLg4WH8yuavZQd2MByUaE2w8IQUncqY7fzVPsmZjZYIEjzcIx/vJf/NVvf//7j+fj9vYW4BFmJvSPt2H+//hytMEUYnJ6lnI0FrRs2wZeSwcJZxeh8/onTBtFkDewvISXN8SHmVxKlHBZDAPInCTqfz3xQ4vzmbuj3FjVOJTbffzqHBdmWrxmSdsRCminDAMeWupxnIqciARGDnE4ryMKY1xIyFg8rTm4DCNecCVwsrETpQWlAzRrWjlhaA5mJrLVE0rH7ss5WjqvC57dDGN0q/riOQS4O87ut1gHGt8NFeT9/X4qHGUTVkIgAWyf+sAiNXhxcLQQYObDprpigddjZmYFGci65BQwMoWbM1vivW4ilNmPoIkG3onY7dznfugMq2ZdArcTQqAGFvCkUS8nHUEK/uwFI7GO0N0BckfEapkzktPSOKau8OzqQRQd4bao3YJFk4oAEDULOFAJpYVdj1l0Q3AE4ArcXeGWLZeLpAJiLWw/ZMYR3EOop7N9DEKEQoSci8pDg4zT3lNLF6Dqhe+3EZgNqTnVBfMptJXK2J1aRquJy+4TETEN9aPPm5kxmCCRKtnemVVmE+Bu7tWLjJJyAGOQywaEqyLNeHePjOXV7L/Kra6hKkCXRfq2xhwWCzOTEwkbwOqNPbM2dOqTSZU13YVMfb4WG+aiyfUNlnk7+ad2YZx+6OQm+VJfjJllGOn0NXcOyq9kHjrPYG4SUGkzn64aYX7TS4dru8gLPRZLt7p1nfsU5OdFD+3rWpSGK0deqG69Mx8TYOtjsKTpKyJ0pmGsukgCebm1oUhckBum6MDjOqriUSe3XWnYEpis/OIL7nF/xd2tGuI1JegVCWaWNtbEMOc8Dj2Ow+4bFW7QnJOrrGjWFTGOThmV7ISuRGTh/7le0218T8OL50+1X3755ecvP/zN3/xNHxBbKkJLS18VdCHKSrn0phlFG2B3iCOUUnSVYyY5qplFiaxIQGPJdCUEz7sRxGDEYgYQF7hRou9kk0OR6XYjMPM27iLSNYTA200GEW0szEPVo4NWrtoR4qjqcnOb6HMEQyt3Xa4wod81jYgoWM1KsX6NrhR1LfG6q7XTUZe+f2VTq/69GORYN2s5/tG/MdgjEaX1BbRIz3OUXICI7GrXhS+AQMwZNA9ZozmAMQau5LQekM+fdET9hYo6x7XL8fLzxfi5kOxyD4otuGcvS2Z+ce2vzPbyqHCZlf3j7pHLlH5ApFnDV51hHTkWgsnPvT50j5zS9NRGkJAIEXhElDLFzUTMZAaf5dZto5MACn8yzt9Pv8VZcn/OCdCI1bwuHemJDpyaZqmVvSm9FFjI6WUX1iSFleT6tvqVrisHZIOZ2l/bQcOMjYXA5JluOkTEDrgxqc6P/cEQ3gSg3yB93OSuRuypE6Zu8HJ5WBRmJBLS7Yeffvr511+/fftGIuFmygl+X/T9d3B92iGYzbmPCvCGhL29fcHtXtZg+Glfv/iPtwbzAC17QRCQXY/zWXoXYKdOWr4n19PMizvNwNJfd3YzB/PShoGiHjuiI4V4GjQ9mEHEpYYVzTMjK2yBaN1BZqe2huS2gys3ldJcUneYIhLQUfzf3cO0SbWQaIyxJuidM/8EyS5Lc/ic4qIKLsJIGjzGm6k6BePrO0ek1eJ6ntcTzpzpeVo1VC/ZrlRab+SIR7HT4/Fg5ixNZPTc4hWhmtfXl4AGk84Ya6ydrf5pd1c7IuQqg2i6mbKL6mEGvjHzNihzOLXgYUTIEfXPSuRvb+9RQNJV+znrBX2+VzBAVty9bM7mpQ0pLmOkXtv2YfRC5CEywsAzxaRRnvgwPFi2cRvEB1T1iPry6LfgTA7b3u6Px+Pj8TQzIrltohY1Zi3quJVSq+ifu6trrvYavfFMsciAHQ+3CSAypAeFcsyjcPqbZYch16IuPuzm5nNO3phl8Ni6ZibACb99+1ZvdnPzdmS2D6NMa9MqEelTl4tvAJyMOCBnhEX6W9u2tS1EBRrhpX6ZGZ9Q7K96QJ+HleZbb+tfVwdMnwvVrOhTt+cz638kysaqaUpq/wkwd0GcbwKLjLjWjOMiOiG/67CsHoemzIzDtDZ50eFKxWNmyh6BJ4M8p5OqS6aBpSpf+kzDjZ4pkXFKSsj76Z5I+zDvJ1YYEQlICWGXn+4Ad48s3NBG1ALGpjeIv58H+2rVxBIxxDhhIbZb1nTpPFQHV3XfuvWr0pNaI4u7T9PGPKh+QSi1Mlc1WNwL2y16A/OobFmYgdnnnI2fyRywbKlAq0bLdLNcGCYeJDHUaMd7AW+kSk0P8mk/30rXwYSjPX3coGZ8lurUMVmgRgFE5kUHFaMKLdj7ly9fgk/2ZLMcdJP14KwHMCZoCpJTIPnRHL6Wwsnd93yvE7LTsVXiaxNwHTo3dyJnGoRylMRa1ZoAfZQQVPTcZ/h9Y+Tzke7LITfhjcom9yrFDDAwu8adiCi6JKyb3qzgu26C1YO7XHy73V7IL/6wtq9AnayV4F/WeX1oH0MsjNquXZ1WUvn8oovLQzLJ8MJqYtbtbvyUCmUv823PVHfwSPsHUgyNihWENRIpo99btMuCv6zP+pV1xSLSxcwv/L9TfD+v4ctb4t8tGtmXAw414E4Oehl0Ssmy/fu9CMDnBcwm5XRkVLdnoV9BuLwUqTNY9f377lr1yCv31aNNxXem/OnrMc5gCGkKFvX115m5QwtNbyt1NW/s89sU6Ipw31wpJ2tWmALt+UZEmknbcLhNI2AwIGA3O77ZN3Yo/fAvwNGDhKnCNATBn7gC92uabsHxjoPvtx9//PGXj6/HcWz3GxVefYzsO9bRf1dXEICqqt4TVCaPW1RNlzVYlLlc/4SxQVr7kp5J8mt48PzZHQiI31ThQhMkBxHS3DKkkUiCtRMebPpU9UiJR0FInU6Q+NeKKKgM1hCm1A2l6oaotihvvrW6crt3/14FoiEWE3mtKno93RPIJ7QEJLfhF0HT3DjO2YvUaCa5MvMIUa6fuLtrdueKA3siQMQVuDIi4jOiD7SGPlD8K9SayLMPLcQsczhb/c2iyaX4Hlc+Ygn+Ti9DXKddDNA5G40kU2z+Ev8KPDLK2kPs3mGi8O4lZuAaq2nr3N25SttfGO6cu2rHD9OfHXAjASvKHGGGMpYGqeo0DDMJOcHkC0y8uwkNkXEfGzO7GUHIjAXbJttgYLjrMY+Nib+8u+HQ+dgnAUNoqgAeUWwjEodByUMaRfsjrmw/AkCSAso9MJnIFK6BT00OJ/Ngp72ezf1rL8R9EpF7QQgsBvCcM+LAsZKzcokDyTf3FGrXIPi6BShV0stbU7QedQVFyOnsW32QKJlrqqrHDKssCl9baBMRcCnEKmo0APex+dSOcJ6RkDrSHe529+M4bi6QsZ69nA7Ai57Qp6Z9Hyj7KlJto4ajI4F9BHqclfqY4etWQOMlcUit8xzapdIDCF5A+dcsj1pe1FeUXTeP9HB8cH7TCYYzBnYyAWGD92at3z2HcTIsXi29K1M730tLnPM74zQzs9UzHWux8pYuAb3dRkM2+HWQFyKMT7Lx5KXUc11Gq/T4Xt4XAgheGjHIuKeLA2nRjYjS4dVX7WQwB3a3oFUQjXFRzftQxDjD79YPb2swGkUUPgL1krr7XBBGXhhyP6dgKSisxNoOHmPcbjdyUt37Wz22RaM6SyyCSvdPhlNYejYzH1vEilGDSI7jW9p1jjkniOc0Ndzv700b8M5VhgwhmB7+nMe+72YsIvf7/fGxE1O2n7ITNEhkXXC0UGhNud8Swx609t9LU9DPviMnYb+Qx7prqMreVqC5/CBnrd3ih1oPi2cGbK6ear/o1MKxAAasX6QqH12PQD5KLp4Xd6cIhVWk7kql1MBXOceyCrAc5/NAES2GYhUi5finrJZncpvsfxjUtFLLS/TpHFKt4EqEZiaV0pLdGnp4dA1XXbkhFbPyNp+YI3cI1zO47vHl+Yio3WXdvBIx6NNBftmsdW5+QTRweOLuEhH5q4rSYJkUeBgtR2SJfZU16+6RpwrLpp/ImoUK85YB0CTkJyKovxB5+z6aw9Mnvl2Lf359WUkmAtOTSJQD4JqMCKAN/nzug30Thim7DzLXj/mh4/4gEfANIdVT+y/FYF3JDCRWxAcIRC4Gfvjhh8fc3f04jm3buKxu/Pd3hd7y8pGqRtWGJ8gKNIpCd/WNF6L7b3gREWD/2T0hCEj7GMO5gJ4sg/BgotLzL4YmAIQKY2YyBoGhE8je3SnUsoJjKlxwClNUcOhktouJZIESnGTlKBAKM0tHerLuM9CS6UsdRXE3SykT8qq5TcugPk2rTK+H2Ovw8maOnO31T6sdeUHSK2cWjzGOmQ1G4/ltPiVO8ZWp0aL2deYSEWkVLlvZCVHl4u4BV30VfnlyVz67ytEYTv8XmagwFUqeFEVwJeBTv3eoQIjAQiDSpR9OG4RmNsZtlTj+STt092qqu2CaBc8u3AhmFqHAdQ0VxytByNQNgGaGqrvHSm98CEBCY/D9bRvMYT7tXx+3bbvd3wH++vEw+2qGMcbXjyO84yXCWQCC7dGqiwfXlmWKIXslgCaRuZmqCrmbCcHJzYyr40MoCrjmIzU9+en2wJUWT7DvcAS0TM41pHMZ12216GbNY/XKxOfMHFFTKreGmXUwas3/bDVXVUmyI1ypSl7a3aln+KLzxeWV1R0P70SyHn8hEqlHW5SiDWYWdatSwMECvhgA/dhah6x92ufRp3r1bryYzZe1Lsq83HZhb4uGF4VnbpzqO2Lztb7exVIR+7NIvlheFY/XAs2PXWbmSBmlOrPxxgD1oJLH/mnYsVa0QFK1z77f1T9wAXU2SXgZ54F+icjqZlkVkTSSPTMzl+rf9LoVNuNJZrUayYsi/7nZ66nvfmJ362qvk+0xc8dvm9caQG4KFg74ljos7uUwaoeL10NWft2v6DxGL5DJzijOVEzOuO7L56vLI65usVNPm8dxHMfx66+/fvnyY5yFE3l8jF33dQWuxHdyxfjBLMj+ouJ3kLIMs0zIF2GggKmcTDEcLFEPDmSjERCYpZx8ZohKqIViqY4YcYZV49dt25hFdffKagFQEKlboIny4jz2VBROFXwlyM+UEH+9j1MRX8jjlOIrnWABj7lwiSvL9dWXqmsbdxSY3nfS9uqT1/G8EOo5C3eYQbKvSNsGPYY8a4sBDGTKKcpE/EwWPZLicuQVHCMiRH7iVb8513YxNV9WHsty9XKICDWCZVBjf2XJrseyRNpYd+1DifujFqhWxsP1luh/V+Mh78n/nW8M3hKRumL+8XwPw4wIXr0jz3khegWhfJ9ASOpXOszdjPwrP6/zTwtrbQ5wa1QM67pHAp8NG73k8qpiYlHhekcCw7wl0csN66uD9a1b1zcIuZfnLuDnDKQwGEYgUe1PYxnjTV19Ph7fvm7322DCiNDKn4wNJg0QsOCayjYAsMhvfvObfc5g+JL4Bc7yDz7t/+LXuUGBJCQiyzlNKVnXf8Pw4HoRLXTzvZcQiDCc1DEcByUwDhGE4MhsGwoQXSIqJJdL7OHyPoSMqbT2cMFk4hwVHkzw2zQC8wnC4uha4pJBLCensoqWOSXcRurYaUKe3vGg2Iv+ExddPeBIhUr7npep8JJPRIvW3ffU8zM9Z7RuZGYiJwrzGCM+EZHuNNAnvBULIopQ4dvbLd6977tVG+UXg5CWDLSxXXJkicjdVoNwvXxhbS8rosfu7lz4hwgUkDE8s5Iyi0gk254RD1/sgVfiWzS8loIiZ02RmVlCDjYcCGkVCDEz9CRcMzPPliMzEPANUSLWeicBTM5M2+BNREYwVvqX/+Kvvz0+9v0Bku02fuLfHDrd6eNh4e4zysRIZnYnCk0zlMjAfdWMQXmkscF8KR5K/SOWYoFmEz+TxNY7e5WsmnoHvISIND5DlR2H99/bjwBUY1D/nHZ+0Z5XAiPvEl64u5pRgpp01vUC4sI2QUTkBJFEvwj9I17SZ6DfGDGW3Z/uzktKc+iO7hCR3tbqVHnWOsJ8EG8y/Kwlc/fuCpJcQK4IHDjR+bQB+tou7UXolW8l/twyu3p3VpdousBWrtECOyFV2hyKTC2uDKZ8ftWp0PWqT5iEeTmAScF0GQAsm9evlNMHlsqAj6Zz7crqWTfrWImk17OxNDl3/8TkEGIRcT3h9VVfmwJRdjkninIyg4hsg1tnen9/7+CYL/qNLbHx9Zm9DkTkrob8Sth7zLzaX5EVLTLCIAwetW1b6CVJ/JYHM5gn88YiZly8SNoA9rT3DECAykQzkmm6cRrbEdhpjrdeSXJznqS7LJRWxSMzz8P2fSeITaNxoY3rOpx7bWbFUF8Nj9qFM50G7j7VCUQcszCDG021jYWIRThc1O7eZdmx7HPfIXJnDnDUyBFuL8YKsRMOJr16UpOWWIPy1idHoJLlVUY0va3OiF7AXmosgiziunHFX9sbYteywOUcnWv72dhbCI/iBK1b3N8yszNVoq784qc6E3fHGkFa4pme/SakzcWVA+ShWNwK7s5jlKWUIoBFwEzw0/yLWko3PZv6IkwHX+wTWqLc6xK1gb1635II3X0xv+sruYO8NFnRU+uqXQMBOKKPX8wiqMizUcSqH+Bkla+bSB1pVPU2lohOsqiJAAhAXrhjMLqstEzNoBh0XGJ1kX/SWHorPxOVme2PY4zBY4CE4xiqk7vpmVrc/ArlWOzVpoVL+yeRHelCvezr0eAq535ZH0rEg/aDkBGZYxOJwmKb0x0sxjA1ejy/GSkJCzGE3BW0GirfuQwYzAEkytsGd4Pfb3d1D6zvP7Vi/z1ehFZg4oPwl4dShDE+m4L/512MghVFBgOFElkU9WGcmaWimLn7F8WZ50zFOn0ouCgkAnf2gQilhv4G6HFEPs16PzHT7caqUIUzqkYsKF9YavXMEQUO3t1zLjoXRERWRRcFExClByjrw7vt0FXC9hrRtZwNSH3vRSKoZe3ScFISP+aTGep4HN+Y+Xl8E+ZpO9FGNxbife5g3O43skPnw4117jqfpiRDWd6e+0EMZgbDWWewuY2IRojD/Xm4QfjmxpsMtykiW+Y93c02VbVpAh0yDp12TNlug3lGFOiYR2T6Mj3n4YAxqeqGLXzOQswQEFSPYEzbYIKoAqbEg0Fz6tTHJj8SgRyDxcwOPZjAhE2yMtDmE+xCvgmePvbjOY/DReQmg1hV53N33iEm22BmOuh2u7nR87FH4zPmARczGrjdBPt+wOw2xj6ft5uMje5vbP7k4e7HbdtECAANG7cbQNjdxW9vm9xv+zyez6cP2bbxPHbnp1mcWVLXqaYOHuzTnQ4HHYGxBFcyF4dpgP3TYGc4bMKV/HZ/fz6+6TQRdmafh5uJ0Be5Pdge+8Ndx2AytUOJTApocRvjtklkviVEjQfoj8McpiQ3IZo6dWpi1iuIaM55u93IjYjUDp/Kt/H+5W5mj+qGxMzeQb0hzHzQMbErHSQC2JxqBgamGzOYBjGISP1wOsDK2NyguruH9lyeEnJE/xlzPabzYExkj3SoqrAJiTEDYUWTqm7b5ky7zl3n49jv9/uDDrnfeLAdAEzVDjsA7rzZ23abb2/uETSlHcN1ym6D9bbRYHvoTvYxBLwNuo2A2FQ9dN9ZcMw5NlafTqY+jWzcx677/X43Mhp0j74gpgN+6DTaiheBiKLG0uGbiLvCeQJupiDmQUIk7oAC05yyotiJyCILkdk10uzCg6xi0bB7SU7LYHjwG5azJbMCoDvbcTBYZItW7IfOUIOGkLsBtm2imowwFpnYx8ZmZn4Q2zF3GTJ1j2akx3FsW3SvEYA7mDZVmWex+juJ82AojuO579v9fh/jhvm4y12nqivTBg9MYNzGxjSiRMx8EojYt5s8nzszb9tNjxnB9NPAU3PXqKZnZnNX143MzMgHe/SbZzjppP1pLEI+3sYwmTTth/uNiP6472RsU5/fPnC/j7c3chzPSSz32/vHtz/Kdn/uU2QDMYgVvol8+6YABotPMEhozI8n/5Q4gSIy7fh4PG73+7FDcIcxY4PB3URE5yTgbWziuLGQ2g/3t4+Pj33Xqbb9eDfD4+O4395Y7jLe3r789Gd/8dfgm/H45dvzxx9/ImNzNoKMAaLHVNV5k7Hd3oCo4OD0uFVkgwhDhLZ7VIoStuOJ48NwY+g4Dp+7VVItnIlATr7dhs4DxLfbF90Pm25uqiqybWMTEbU553TQGCzCB9yhTmbw57FPZXV/7Md2+3HXKdsgjsqq6djHHTzkmE+QzbnL4KlzbMPsGLIp1NzIw+SOXr2+P/V2uwEURY9jZE3EkFswwMZ9jY3QsCjKKildAUSgIWSkqjoP8YT/gWYWdBenhWLdNYdEJGenPh1D+rH14enQ6fAvqv+KYzaAUw3JAUgP62qw6b77am8Ih3jNX8tUSPuQEu0zzIgygYhQqaEiQRhhaAgxPLJzHV5oDMzC3J51d02jPwzIAtTGxQsWOUKR5Up0IrkD5Km0GawqR5h5zkllvU83irp8vuvj2esc41c3mBtB4eLm7tOUwpyTs79e+z/cndyjhDiyNnv9mdl0xjKijJ4wPn1Wy5zuX5IwxaGU8mJkOuAIpIxFTc+lIIKDeLG9IpkmUJjLuxHNe5ghP2zm7n4AIE7QDbOppm0KInZmMBGNOQTigWTnzsTTDcDHt0esWJBrxvy3MTbJVPlqORn1Cqq6mzIiFUIMOGyq6i5v7CxgwXzzAwjLjVlkVwbAsolsZrDHbma/0f+F7c72G9hv8PYTyzvDFDeDR+kLI1baIiYpvmEJPEREm0EwfBlvLu6m7pPIScIx/jnI/Q+ZQHa9iS4fv0a0QvKmyzWr7+tP7oCBAj6FAYSr4oYb0IbHd0Nkl+H9lxq1dzzN7zu9O8zIhuJmuD0ev8Hv/vLt4+MB9Z8+jHT47sfmd8KtJ7o6Njj/+ceaixo9A5andWuVqkEyUCEjkAPPMgXTu42MGYSv+SCwu3r0oiCYOYgg4q7HrNaCwq5hWU0Ag7iThhjTERKEjIwsGazcwoOL4OHubm5qKnMSB5lpAuo2q/DqTwOQ06Dh7NNnGK6UOe6V8U7EnL1ac/qEMUR1du/lObPNcgMHWEESolyEWbJYYL/BBGJ2UQKWwgv3TZiGjPbwAacLHMC+7+6YNJ/fPo6K9anqT++3jh92z6vShjviesYHIg7jDQBI3hLlInhaoJqCSSBRtcIVYNm2rU9PpNk8ns85p1j1HCNaG3dEfSNl1RaLyNTD3VWn6gEk2+rwixULZWYi70jrsWdAr2VqMDhmdgs/elers4gcdlC7WBYezYtUjlHd7/f39zd5yy3Yxun+//Lly8czWjtssg3m8TyO8Fi/vb3NaapqTsRMZIUnlgm3kWISAuZ0A3Mp9J6pcQcfc07+1IDu7GMW0Z44mgsEazgbwiAcYxxHQIfQpaim6K/CyNLbHREQgrQ94FUTBTpVq/Z8HOThGuEVmwRwpzF4jBGQvt6dplySt5abOBZgjHHWfJZfxJYSQWRY4KybQgApLQ2dI8xTQjeJlojcE6a8fauqGr3RaNti3EH8RO3amdEZxZdGKcR+TLXv+LZXaX95O5dDPRiHVyapRR7vMspc2GutUcDrXVQBt0YKdfdCWW+i9swSk0v6Fio4GQiTHjy7xhlPbj4VrDzGGTU/7lYB2DwdTWkXLdB9jKGaPUU7+IlrGn0ng6iq2ZLia1FXkO4J6+TtauPj7oFGE+kT5tqdNuecUpWovDTFziVa3hvjFJGuhqeA2ODQjtx8RuRqzvl4PGJl33/4kYhEeGxsTiIDpoDMuTc9LPue0UKR7NQkIm9vbz/++GMXyPVJEREROo4TDCNHHaXg0ct+9+dTVL/I2OIJf/zjH5mzJcPjsRP4N1/u27YZAh/eBc7c++hjY3eXQVMDVTGkoJlZY4oRM6JtNTenzYJJATk7rA5aEWQgLefIW+enM0yz3bZBkESEzm6Kt9vt+THVzvwUM5pz7vt+u3EHWnlJe6lgESNZYnhVz36DnE13L9klsbO91EEn+JTkUjSfX4yRBw1sLM0nUVbE0p2VVmbl7gE+1Dyhd1OWmrFeWBHRqnjka/wzb76OEmYBxN8r0/zEP2UhRkTrZCw9KjslBV95l6myGyI2GDEZVeczEQsv41kYXb+l39XrSYs11GpM28nx/evxqfEvK8aFB+hm0Y6tzWBmdnImRtVG+svAAODkmecUaEVPvbCyppCXz8/h4Uw/69UGgBXdhXnuO/uF3ogo/Xc1cQK88kqyqVJqpTmEpW6/kqFKl9OGc6wjGTka19eBKLQAalAu7dIhzkIkFKOmhLITEXGd/XwssfF93+k8+5nnMkQc05VtfvAuIMFdIENw5qSGT8LdyyT4nomHtrGiiqtDnZ9TmP7LLBy/vI3Xzz/d44uBF7OWnoLnAL879Jc3Gv1TR+28tC+vbALEDo5/dim1nsvpXuzCPWIKWNOdFk6QvdYIoWaA6rC7O6JhL12ATvLABpELSUKjxYt95SQ5pDrgq4Kw/ql1BSyenZUJ9FkLlaAZy3rbygN7tFSlEM1DjiMAzDs8mK8QiUbBlp+k1U0DZx7XpGo1vrDaTL8ZY5vzOI5DVdw3FCPwJWej+5AGwGBMqft3cSXphfSy0yPo/RBmNl+yYko73LaNmaepmQ2uX+d8mDFvlvktWBeiJKi2qGY/55Uap6sD8fmcM3IgLTE8oBp1196Noqk0/pjCseucMyA3o8+jiNAkM0MiAVhv7cvGi8i4397e3vSpc+4AbttbKFLbtr29vR3HMYGuqNnnJJLbwDyqjTU4wVZKXzl3xAWksRDhnW2GZIrp8ziO29iIqK2sivjg+XwuBJ2oP7R0ge9ZxDKCbX11aNJaQacYatR6hV0nmUfDYI5GZPHeOWd2z1j2iIjUU9Vb0MAYAKnKkDFGBN9POoSYWRVMWgftwwBotKHQesP8YI7YIat6NGYRkRAMdubHBsKN9Ynq1ciFXQCKzAI6VQHILTLPmYjCvUyneXlW14iIMfsifYlIiJ2sD7yZQc3lO0IiXguE1xfFIC6aB32PbQGY3ZCqbnNOE5CZYafRm+arxHhQKQ1Va+rkDk08XviirZ6ccUlp6LNwHZ6vI/TFc180eTJEv3qU+p6VNfV38+aahSZgb+4dmv/YheH2z6rKLEE46MLxcyWbm+erOdoo1mNyVA5U7keztbDl3B10ejHc56FzzjlIK3Z00RqDJ7WoW/gk2k7DouwG6FfPt/Of3T3aGwIIa2qM25wT4F9++eWXX34JoTBkc/ePj4/bl+olsxgYzQFeWMSc0xTO8KXrXRuEJWKICi0JlaRqWlk9HMcn8g+cOhzdmWzqSqak1XijUiKvXhV3jyR6rzaPTRJxWKgcIy917C2qyoDMHPJTYC8l5bZq8Au5oqrliS43r2dkZQWrpVqnrxT6sidf+M/LOeq/0qLZtAy6ENKiu8CdeJynJraSlhsAN6OCCW0tZB2Mn8pHYnKGsznm6KYyBpizwNQMYHmx95p7A2aXZXwZ/MoQygLpFb56rBaSaBsSxaYobNS27sxlAd1JswTJyV+Wu16R7Z1ejkBw/ZfRAoAw2etG9HhO07QfqIqlJHj1+Hz3sojVnyPpHHjxUp2pjMPe3IXYXmZJTbr5c2LdnxXvkdEzjz04ZOiNVAjJY4wAtVpJnUHzlYenj49KB3jhFcoTdJiS7pAIm94d8kNWXCUGZuQNEsBXjMrrRUA2kYs5+vXP/wVW1qdbv28KrpchOmyE1D5XoKxYFNQNgdyzbL+TIf8rx/nda93r+Nk9yCzbZqpOHczM/5mKzf+TrpC/7u4EMi9r23FdfII4lJjhFE350sfi7K6ppMXTKP1+fSjqGAJAQnraxUG2DmcZ1fJ2otYN+oFYDMLmTs2usXj3VhnR1TTNwU5Ou3ha+70nZ77WpZeLLx8dEnt4Iq4m42vXKW+bmRH7bbtV3mqmiavqti2VUZZSapk+k01bOjJjUSDWuXX2V3M6ME3T/dgBjH2LrYkBEJFJZm6KyJQxiLfbLcNQkW1Y/7GImU1Tdwe7bANMsg13H5uMIQ7rY8jp2eW1PDsWK7ib8EaQUKoqqsBmhxmIhNksI0sXa/5Mpqv9tqC+pYhC9QBAMNVD5FaGCr29vT32p4AKFYBFsG23X78+VPXQSZAAiWX2gJ77fFSo9eAoegRBDQ6dFwHQ5GJmRKnrFC5OeNpO9aWoXgO5A3LBQcnn+MUAtgXKIuhLRCJnxo2InQNEZAVZqcjM5h7l44v+HVAgmazi0FU1ZyaQub0e1Iivrnp83V+zY2/3v7s7OfDSMzqNZADw5ttoV3SHMFqA1eJbq6FhKQIYY/jUUhryZjVtoRsVce1P9UWrQKmnYbCtR695CjFxtYwjIv8TmkPcr/CBU+crzkcE0oKFN/do3iLOWYVP3XjaAkcOVOYW6gSVV1hESCJvl00N7oZpPlUhkugwZmamUdYl3+vVQYVTtbLCxfnXOgT1PoZLZageeyYCnEeYsky02bqIHHtmFmzb5pXEf0QBMJ/0UBKCIh2aPrlyaTH4rRIi3JLUI2/27e3m7rf7eH9///0f/3Ach6pS+o/KTShdNpfnLjnhFaLWzPZ9fzwedYpJNfoZnBg5qofq0Z6L+PB2u0temxkeH8/H4/Ht27evv/zqjv/wv//tt2+P+/0HDuAlOTvbRgfO3oh2yqiqs0/TKOZ0uFSywnkYw4lARc/ZZqrEZLZCrbBDKEQ1zZwycprHcTgZ81QdOl3Vsz0jbWveZh+ZlY/5YhBKQZ60ISEBPVqOrXCVrmLbygeJ1ZG8XOvBfClnPc/v4jZo4bgKyvUhzQpeuEFPc/03Z1r1/+sY1rGtT3B3uJ6HzlOpssXb5RVfahKiJcIW4mrNjclTSVQ8geG5xWFSOFHcFWvptdmgajT7J8yensK68g3SU/lmp8M0FJj+Sm4rFUZOVPGFGUZgFtQW8+LxpHIx9pbkv0tuSy+vmYWZ8dKltr4YKW7LLnQGx1XRNLNlRS++jDFGJAVh+VOu/MVr732C4v96EgGusJKZ+1kTbzqJ/AJNxFTTXDaDz1PTIjUcTCjZne3kFguQrgZ2ZHjFMazUjMhtM7NSdgcAV5pkzzlJCALDMGzvhBtoA0ABnBkTon/IfCKiKjb703bjP3h9vvtPW4AvV8SwOHIpir9XMgWYlghusECEV+K/WRVf76eZSXZT85l5NAQm2KdJ/DO4KAK7gbgPlE2dTMDPlhAgSO8PgRHh8qzVl1Da8lbn5EqVrFSZI3nU0nRMx8rVaGwfUSvMlO1PmjksPO21Pft6EvvDNqZWpwx9Ej1xf2sFMdHTZQ/EyQZwHEdxyPCr5rfGRbrUgBikbsdxbELbly26LKh2Ym54cjOvMlL4NfaAmiWF05kaPiE0hsg32Pe9rM4tdbJynPMgdzd1ANOcNbP4wqc+QvcygCAkm2wQBjxit85kBGeBwGEOchPDjHoAzzOZUIvuulrz0VwOgJkQcVg7Y5hpZTy5w4ichcgZx3HAMk8eELjGuc1dMV+lVO/HumcxX5BtN4HFwtq2CTOO4znG4HnkF50lWvkwyzZYJ820E6Y5kRGd+Xi6SA6HgtgdUclgwlxDOo6DYJAz+S2+PrZxHIf5hCNSbTv5rQV8lrFC55wEZ4pk4PTuj20jz8hwu9IjP6pToVbCjeN0v9/nfoTmrdmOzDjKNmAIbzL2yDVNL8PEjhlNVEQkXhFg5JYFLdaSLJKczcbBGipjrNhIs01jFiICZPuKbdxO90xhn+DTta5e7jLO1OUWge6OBX1bQLtHq+JFiXRUIguIXz097Cc5tRJm01Jvpkz1iYXdti1x8xCt7aBu7Au7IoKFqHE4pju5kbm06pMjSrHU/SeiLz049q3ZrmaGgFq8zszC27uuVQ9+uVDK56vF2z+8euXbZ79EnD7vTi91PEFEnBgOrjBL+6EtqyZpHQCq+sXLWfbizMsDqEQOQupJ5aAsAwZOnu2h3d3ht9vYd2u6mnM+n8/H89vz+ZxzD5WJmW+3G5syw+1gZrCvNmccgcNCkkXAWeacHx8f6qZuBGI3OJidnQxOwup26DToNjYZQ80cuG83wN3o4+PDbB7H8+Pjw92fjwcRzcPu97uZffv27X5/v91ux/xWW5CSMlmZmZfdG/tqmbRD6lCzqTzd1ewwvymv0C5cWSGJQhXO3aL/bAIb+S2ddxDdDKGeNeRp+K+5/YivWAhgFpExBtFB5TjolJYmsJDny8VeYY31jPdXfOlHst6wXk1XSRILFAeV0bUK+AytfIocfnYVYzHteMnqX//lylHvHDCu1lB94tbheaVPn4fOvKGSTnO67sb3LvJPajFFc2dQUIgqKPsTrme/LZgWmtIiow3U6zr36vWhrpsv7k7OjP3zirkIsS9hK1pM5cvsesqhl9un+W2cYLifuBDS23HuF5XhR60V1H8EeGPUiVDUqZfDYqWl7658XyIvmSQ1r+pIg9L3egEvAz69SEGNlh57PssinBwYRAQmTjPWhdNX1XsXv66KLBaC32QASBcvdQHVJYrui0FLiDpDBcyPSO9V2ibbhLxhvIPuseHlgOqFsjIVXorunBxnfuk/6fWff6KH5Zq51UiTkIlgWHh+mokdKvxvdSUV6FnthnRobmOM/5Zv/q+8KEw+EnclSERdAQFmqCUAVe/uSyC6CCPTstTtzJMignXydOXNNoh2XV4GISijy+4nTt45wnLlnt9aLvtkY9OSCbLevwqpVRsBEPpwnyzv+JOFydYM7TQ+g9MQSXAGbr+nFfchIlMcxzHGiNZzqiqUCSRm5mpcLzOz9sDFgxgEJuKzmCcy417mT9WKbZ1n3BxJH3Rjd0Qb97ht7egVMRM7ZkA7COg4DneHs3NMlUOj2vfJjDGGg8KQtOoTjYUPMruqveQRueUJZBJ3i8zG2uy8ns9n2Q8wswBW9tIP/JpYyJVGzxWVbrNnHk9mDl9oGM8isu97FKGV24JFxqH+3I92LbvTdFO1OS0TsxBOP0dlHhIkA9zpcWcmdNpbExYz0xgUOYpLXWVsn5mBAjDX3ReRBlU7NtpeWPxnol9/bQrWQgKEn41Mkl7JqFrZej45uDbR4iAPvyn4XF4z63Yb63janc/MMkiV3I1oENGQSGM2lmxHJjKZ+dAJKua7RA4BDhWzrhKl5QHyq7o2mI+AMEwyiKF2InEI2TNxlxyuFu7VPl9mNhYUu6tYzWXJlMKSKxSx3YUNBV2v8OXZFB7pocxV7TAFME61Po+0A+rIMCky/z5+dHeALFOkWKMXrDPTiCiumbVeEvMdY9yEich8VhnVpTMbnSkTl7jHOqk2cXvxaYlmhJsg1yrCx6jMcDq77vZSdzcINwOd7UPX27zjHj3U76lneb4+CdGkSdf8z/Q4DhEaY4goUbjsp5mr6jbK4w6Pjko9VAAQHjIqz18i4WpZmVczu7moDBIjECn8OHaf+ngCwJz7cTznnF++fNkfB5F8+fKj8AgfihlkwN27Z0nXlge9YTW/U46aw6dDPXE7wxnZYuJFRSsU0KCuPFOqGqngtbPKnF6q8gj4ycog4Tay5eow/gohiwozmlkUeDYhISH1qVxLl8iGVfrly2r3besnTQko5Ti+lUy47nmxuLrBMZYbRMSW160awPquF+V+/bypos9IP2elk2by8aGArLnKwuIoDa16YzPweDv8fMhCh81e0grq20obW4//i8L0cvZXZtIEWVO4pLdZ1D60UGiLlwiRCPE6wpOHU1mD59OCfb6c+tPVRj3N1ttyNewcf6/t8lVkMm0vCBGXM70ETd7fZPbdGkJ8umrigFejFo+f3Rciz2dWSjZvmyQGxGHVFLHXnJii6JSWS0OjSi0lNzfOkVAW7vRphatHzNnI5VTPOHIkquqBCJEMvyc2hDgR+4PcQE5mrpPGD4BhOOgGiEMMRGfUCG0TMnhxWbATvGJK/6UmD6F9A2XE/mfu77eWk8ANlNGUIJNw5DAJIUXtImUI10oQT+HzXxnhXC+HCg04zJXcVGeDGMsYcnvDd2t9/8++yiaMOCGZM0eftbC0qfI4nAELH3hGkB2lAMhl37wa4DGbzpWnUZ/tQuXFCx8OXd5Pr1ZcakreIg/9rZXNxpH3a9r5Cz9/Pp8vpuD6zHNNiFp3QqWqvlA3lfcTYGdnGwgHT2j/t7EBiAa+8aDbGCISlVeR6+XuMA81BtW6zcxUK7fQIgabHSmYZeohmh2x4rFjjNtt7Pt09zTnwC8zx8LlYzw+lT2b53Sup7tPU2ZmYRCFazxoZJqOUiXVjAJ0hbn60gfoVk5f1aMYmsBumQ7VvMw9mpilj7Y3YIwxxs3MjuMwi8xbklIAY7Glum71xhs8cj57lY7jcLVosNb+48KLJ1MHiGTz+dz3OQ3TMCOIPaGqc9pcUkYJIBoBNQFgAkDUvQxmptKq5XYTzr7MXq4Oh6tlLnLjsnjiXQXNpa+992gl1lQUVKdGI/szMbhRQJhHK5FrWuBps0lWTsbDaRBIVEMdZBEyP8VMKtxE/qljb4cHvRLqooa4D0Nm52bTwosDRoQOTb0wXtJ1EVIx1TYzSrHoQMGpacXc537JCPdIR115+nLhmrqZM706pM/nN5gEnXKCTlXvZBRRKOjuYPQxO9GPCMR0AufVN8MxElHBSPKCwzwxypMPllpUY2akplHqLBNZmgf5uuv6AFjhb7CEO16m7Fc7vz6/cNV1MKi8g88cs0dSWlIOmHDJzr0w67raAdGPok9PNrPKF4hfczWmHYHGu/CQwQXrz4z0i2FO033f77d3X6zcsgmZiITHtm3mPqcFI+2C8l6rl1nE8I7jeDwe8ZV97vu+m0Xb604XlK9fPz4+ngz64Qdsw9/evhDR47HfN7ZqOkrlbWmCTLILf2x8yOLT1KDq5jn2yBJ82REBnAgIVhBGozO7pVys8S+qLS2TKtNvLYkCLS0Q3T0qqDt9PVCscrnOB58E0HvU28qVBx78mavotP1N/a6VR9FiX63/vtAqzkNE60HoyyqslJGlkjKg7Dwe9MzrlJFxHjTuaE2kh72+kfCaXp6fm0F4HTYtjuoe+eXsLJH8lQqprceAc1uTQukEqok757X9zOflWtWjoJMXbnBSQx2N1chMly4BlYJxToQaHOG1n3Uub6A0r+9aqLK38vJ2+FqqRkRWNmHuYPGWdU063Lcyz3Veny8zu25j0jkTeyW8UXUzXvdupVvPhiNsZpBob0XAyJPWHpirpe0FNMiFnXu73TrqkBhovQoezlEHjJMDAE35pg4u3hhFnkzODHFTd4I9AdAw1YNNiRRwyHQaoO1qHbVCzIak8mXr5HNU+//YZYnVDYsn/8NXD8hJCQb4mdTqmULsFHgokcPcmIV0ncU/8fUiwsofnSf0JoJ/lhFCj4UCOQjgGLDC2RkUrKPCwnHUXCmTl8WtIH27aTCwnC9igIS5+WRUUwMUgsVJa+/BRHbysejqTEvhkrubGz6d2VXt9PLUNOvAwplfREP7OrGIiRduc1WKOo32lVvWJwRgiAz3R9h7RORG5BhjPJ8U2KHHcbgmrPBxHDIyJ4eJRTaRNPNUZxTbJUCf2Zq94+5q0z3z+sI2KFPq9EeublSk/i1jDAHt2C8qO7GCYtjMo8iXCoAh+D4AnXOnnNMQEZ9nYQ9VemFDR0b+VYHfELORaXuzpPKJ55zRKiRY3pwzNI/YCyEytOZ60RLMLJBaVDWSu/7iz/7sl19+eT6fPISIoj8ykYAJ6tNND5szvJtSXvZweJs7DOQErp4QegG3MKRqnhpkZA62lzp03zmnu0ZFH4N+eP9xbDy2txAhIsQCZn4+H6EDbVtCsMQ67/vu43TlUkSAi85iZWVIBF767PWQTk8hgOGR/xm/Bu7FbXsjKjwWDzuTuoKWIzJmblelfz0L7uZukY4SfZzLQmAAqmkwxI5MR/cuyzNWSVYUvk86Y+BhTaR5IGehLC4c/DRagLNwPQcfE3D3qZEaxGOj68V+cTp6mYLx3NbwTjSMAr0ALIKEQROKdCz3k0OpdKTyXjFbuHuY9AAw0sAL3RMONzfz0YBjXTkZaYB69n0GJ/d0BjsRhMjIPNILiSgChzGYirad+Fq4zjqubdty4ETuHoD8vNRSv1wv6mOE3bosO1/N0l6egFAnIhnZ0SSy3KMYBp/4bHxLA9x20RtVNZvPMdz96NBQ6cbNwePmOWcW+IlQlEI5t9VkCUgOgoDjYG5jDCaZOr/NB2/jz+aZtfGi4Z1kAyRIoHO4wIxo3/eYk6oKBeKXi4wff7jfbjdyejweY9ze3r6MYXGC+uRaFiOdOSrB22NG7s4+PPIwQkw6R0nSqs4GlRO4HDYBgQPNWPxKCCB2cooMAs9IQu6sVbFuENLKXuKNZtZZaczpV+7dzLNc9QF9BWU2HhVKGNM1QMdLXPRFKvOCPhrjCYcXgCylvq5G31/iKdc2vupLHmmPvynqtMHqafGEkGsgMtXIA1rUgiRIIoInbHGkD6ykLiAsCRr56nW+dejy/iinWEJ/ScrB7VXTJTqkPYzozatXt9B/OXE967ITknHIkNq8M5jp7tvtlpxqQWZvXp0LyGfqV9APCaOtNeSol2HCieRP2IR0NT7X6RBI5+y1yuFG4nq3oi31FEQs0ja/l2w6N9pPcfBCS5+vkDKUog3uTuWrWknXSoKHJ9FOTBpmTlDAtlFrL0L2nR1TQ8Fj5vv93kudx9CMzElkK1sxFkdYfHExWMCALeR9e/sSBAI1NQWUaAcBmE4OdmIHvYNvRDeBGLaa+klInH6ly8pEod5/lc3TNuGfvPrdhKOCUWUQtngvWxlOREKgpeaN3VFB739am/A0Ys+Hmo6NxSJdCIBlrYnqP6M2hACQvmpyy3ifR9kaGxlnveUaO7Wqdh0RKzF41PdRSBVLP7q7w1kBGYNISROlL0+Bl2VIdLrkqA5jDMzdC6msTjrZNZmeImkJ51fa89/8DeWtDq4VcSMqMAWqwEZE8vo5WI5Mf5KjgoKinBLApbYfwBDit9ub17WNET24B7FDQz26378AFq/cttF9MGAeIUEzu9/fATuOA4b7/R6OeSrkj/AY9RyI6PF4isjb25cwq1ILh8x5RK85wI9Dx7htzM/n052ijEQPhRFD5jR3UvXIbIwqyfv97u7P57OYJols7iq8EeHj40N0uvv7+/txHPt+Iu8hsokqU5Eo21eGhaaqj8fjN7/5DYDn8/mXf/mXv/zyy/6cc85ojzGPU8ucc7qCbgTg8Xh8/fph5p0CZJbhQQBjjF0fsaOq+tifd4LI5qamcJaEI2Y6HvvXj2/Px6EUndN8P6I/xEYQqxR/Tj4eaZMEIJJ4g1bmnIMRqrw7BQQICCJsZGYmG48xjvlU1YgdRqJNtHpPihkDQL6Oedu2Iamj9HaTcKxMqDXHcXhlOXemaFBRBO4AfHx8MMgKo/Img5nm1I+Pr9sWVQGpmZmpMP/8yy/RpE5GdSkIgF3OtkvMkeuSmtPtthFxaKtjY3j0I2U3en9/V1WAh2wDZGbyFLCY2bZtDoRU46jsipOeKlSGSp6PfdxSyLa251Ww1yrj8/m8Dfny5cu3xx88E+HUrk56W46uqkItHni73aLe7P3tLcRtqHSVaYkQFll8UsglRKRIT1WQ9fSTJUlrcEyRkejWqMJR2mGqduZFR0a0MDHRGO1MK6ZDYR2ma4ngFrGiQCt1MxOObmzW6HPFBxtWyuxMqwvsX1HVfZ/3+73ucVWNTRkiRCeUuYgcZfD3J7fbzed8fOzq0UhvVLWw+XQiwsKUT68bSEZq/2EPgDj2d2XoQfOd2mS1RD5T94qG6WpZUktI08LMPj4+iOg2tm3bvn08ghM6ENu3z8MMx5EyYE59PPbbbRDJfjxut1vkiM5pZiY8nsf83/7Df/ztb387xojEksgPnbM2johpfHn/8ddff/3ll19yvrH/zETZS5MdAagBcjNnptvbe0yTmSMdPVS9PgjBwcpiYTOLrn0i8jz2+22AaT+eH8/jsT9B/P5lc6b0u+37vj+OqSIWgWIRAY8QfnOau6iqbFs64Ko8GwALSOQ+xl1IdVoCREeLv4NOW2iLTWNmLcsqltSzkYm6hYXi1bklUyca4ijYV2BxrxGPzlwFLsgxtNQZhnksC/qitytnnhWDtDSs74dIgY5iMd76k9q7q1ZRNiQALwM+NqW/EqskBVtFYxAzzOZDR6GAmpnR+V5VJTOS7EDYNpItJbXxuXbLgTgdhD5xzNlwIn4e2+aUvr+e8qqqRzi6f+WEtubj+TxfmhmhkDGiBINFgDPPk8oW9SVp5Xxd2x5l/wBoh5dFEZFVR6Wrz97MphumDdliZVFu/ugf2bty4TDLpFpviwrtUb7P+Lff1Q9pUjmn9jkmQJKNPT6j0YQSGZSpJ4Z2s9YSCWgxHS6kbdusWg4+j0fDb5TDJcfTrvyPj4/+0+PxoMU7k4U/V8dfPEHVoGZ0OjvMFWbEQlGdOImjKWO0hAlqsSlsALvZfB7H/gtvb3J/F7kDPy30xIBFHHYlqr6+99n/oau0re+YapxNDkLKO/DNVUkAm2Byn/v+uN+/qO7JdsAUpSXOcBD9ACD8rIk9k3lP/6gBXwZfPwhEDwzCcRw+lYRu2/j6Td192za4Q0Z9pUjuv33Q8IyU49VVBwAesbmwn6NszIFAQpgEq2Ye5m4AMUHNAneDeJBGHYxHz+RsjIvSoo3o0HW+KCPQuuSYTgWbhPU5u8IIUXm4wIYxJxBkqEOqCqRpV2z8zFCoI5CuzGA+YSnEG+NI2lKj6w1X0c5ukQj5JO9iJ2TXQM3YWKoG7u5GY+GqqWm5bzkUDCSKJEccj5lVPXjpKYfO8ELxX2vrFmYzElOb8bUWxXy2CJtHgMeob5eoOzu0yu1afWxfrIj4nLVPZzFDS81sHgkSkYg+vdzTH2SAwjM+Gf4AZtzuY993NRu4I3N2Z+iUpgiQa2YGzdhLO2YvDiPjjfue6mBU4qFAZY7jGPDpMGIHTYdMm/NpCgWpqhoUcKN9qhqcoAY7kxIDnJMCZdTO0yldRugW+Xh25oZUdA7pMDglO9GYuleWHRWEd2ppp0lQcaS1FjE23cxAxhIwBu3lFS4CiB9FhMgLJZU1Gn+b63FU9FhDXwTSsMkMe2TO9G3bqo62MUKkKw9VrTz6eSrCxD3ZSF66bRvRbMJkygRXm0c4OyOmtGz9VueFA6AiHngcR0CiVRLgiAEkI0iovdFBLCAzNk8us8r+0AUXbpvumDEi1tpqH06V47QkzTMRlhaxkWDxS8a5JswdsVXWQcUC2/Fv1gf5ZDTuPuvcARcO7USNXNPn3U8FKINF7icG7/r1lxPKUfRaakQhPb6u1cv0IwgfK3aI7fu+B4YQCzOHo09EHByO8G5yAIB5qy07UW3WQa5bFj/b8id3V7dw8ztfdjMbRRQTav4Zxq2IzGnP51O2+7jdhcc+jzknbeKGqGrODWcC+LnP5+OwH8A09rn/7d/+p3//7/+3b9++7R/7168fRGSGSAQdYzANQjAKfRUYBQHNS1MEOa3WytdwBk03M0aW7jLFYGK1IANkkOFqDhixEQNEspnD5uQIjcpwzxYRcxlJ1ixB25kei2nVPHe6RRUIEbkrkbCAWZ7qE5PtOI6pC6DWGiCyTKkgAFGkvdJPkMqQrVXrfLM7Eb9k4bZDsykhckBOwug2yh24gwMQhgTifyQU0AjndsfG3NfBnxbLdwmPqnLBF/u2H7L+wHJx6ffg23iLS1SRUo/da9GDjK9JmGRuOKNYevKBS5yqi2/Xg+ruYEHGb6NMLjDbeOnRna734mGXqoT4d00Lbkfky2KVcFs/WWJ09UxbIr19L/v5eb3J3Y39VNfWXXZJ66to4CzRZBFUe4nLixIK87K/HsYkpTf6uo9lhF8Zn6p+bl9BBJ9zva356tQQpiuaEbBknfWHNYC0GIMgmxWr7o2dBsC5Qhm1boxLykaHZJv8qJzUnF2gMjE783dup+3NzMTO5GC4cQQuwQHx7CADE9zgILUoFXGd0On8NL0TEfMgRJW1VEeKIoCrgUGvqKT4P1CVl7gdlvBtKLNTBYhoK2BIULavsB0AXMEbEZgf+/4xbhshisAZ2GL9AOjxlSAQ7ibbVQn5/TjhP84+S85peuSAQV4++jhT/6jH/ze6nCOQ6UAEAAMjD0SOE3qh4FVoWcBzr5vy41fOAo3E20UtbHzfmRjsVF09i9Wk2dbIDO7uPkvVQaXvNV+lxbUXXw8jLcjeF8QyKn9KS/AWOuvBbybjZRm+vb1xQVd4ub10ycOvD3Nsax9Cc7tAuq/Kmbu7E/M4jkegzhD1u7NukBavFUf83dxgZrNr+sN6LhZQTWYgRNnqlCx9mgzqltltEwMgJhIGEzit87aPubJe4awaDuxRQtPgpqqSvSVOowjliKXqrt5RFwAC8qnMfBO+D3nqZMcgDIIxGCRwsmDKCsM+d0K2o/VCvT+O2a4492gwkAahAHNiqjPTPMw0YDZ9u99UfVdVgzvNwvjSQ1W1JK/B2Ux3vUgvpGgGYMnvvNLXzgLToobMFDp1fQAZvkI4Di65N5HHG99Q1axJiAWc1IvZ1jsWDQaA0kiohmxDlU3AmXkxG7wvsJEn1pNByfNIbNs2RMwDyaIJNbynfVrM/VS7aYm/R80nV0VQGMBEZDhrRFdOEWArqOiTJzgYchPCBqu87o7C+5noBcom7ImOk3ynvEcxfum0d4fgVIa8SoPWcEGfzfXycgMzMxGIs2ylEUqd5FSVQpMADBjM5lmcWwqZu8M4N6UV0F609e3nUXK2wixQChRZ4mvj7BdmREQvHjiunH6idXu/kxS6zr25itlrKXYMm8cYsh2qCNjlxFWhz2ior5O6pp72Y4mqL/sn9LemqzlpmsIDGxM23SJZvEIooWIRTQBmkHRDxLmkbbuP27bpTabetvs2NhHRm729379+PL7+u//169evEQ+MRImPj49p9v7+A8lmdkyDq083Iygwq419c7w1ddCr2EydXYOlmynCICwf3Hae5cr6m269NeuauztFb3RTYXNid1e3jpGqu/ksLQpBZrPMEvegRPKK9Wll2Hjp902oRQBbeuig5dCJI5n6ZTPetqNiXjV2KhMm/bVcsIr1qElV6Z3HkP0UoO4rwvBKQnxNfew1j5rtzyT98vPKsWuCaNdys/2VdF8G0L82S0FXSiwPX1WHnGClsp22mRqIXDIuVC7RdAKh1f11AFFO45n93PTB4bovDK2Xoa7/9ooFlax/7T+tVlwrrysLfWE+5wM/LZQfhUS1fiVUe7fm4c2X3C8plBcJAnecEb8LPSzu2JU4XoYUU77ect7Dldy7PN9VlT9tPa7TqfF/etdyVQ/6pn8j6bwJdxV3VzhVo6nT3o7U8HKrrYSXi0bEzId6eBB78DERAfmyxa1OuBNVlyO6tBkETEEEUyJ2PfyY7pvZD0QEEpENPMADGJkqCPJMDuHTHfEddJZ15T8bh9dzDaUE9laGApOggAEKn3DH/ntihtywf9NdZRPCIcKU2YwCDEABiSChhKONqCy0zoGk4oJUw/jPFjB+/ypTJzLKDJA5dzM7Y1sE50vc/p/PVXgwxWXI4KG2FgAYmDy7SiMheULVRN3gAjZXuKOadS0O8TMS+AL9QHQG55uRcmmG62WENIjKaCyJRdnRqh4C0Fpqh4UV99XSZ71n5TlUGm95YRLVKQAXUbV4LCCSziEIPjb6lLp7lU84e5WEOTDKSazGfMKFtws2blvPMxGhrAgqm61fuUqR9DzVbzklApExsdAQGtPNFG6k5ZGdhxHm8/n89vUxs9RtQ5hbRmnUGxnCkpzuPoZ2DCRWquYRJfpl/kXik0QLclebj8eHe5YyRmuyjwcij2jOXSQdA+4KcCRhhqblU20IEQ/O4kAn0loxMzvmfB576FtmLjAVZYkAg/GNpttUV1Vz2tWOeezHfM7D3aeb1h5VF/YiqXKfBm0UiaTrurfbzJiJar+8CCvSxnDCNL2qI02OZmrmoxoZS3ewZB5jPB6PQCaMsEh8X1XLwGD3gPsbZjaniTgh0yM7J1lVQR4Yla7TWVDmelJL6R+9qlGTU5g055hrInDX1rZFZD73ReFjQxqoU2cMhoCAq83EVEW7SjquQkTCHBbycRzRUK6XjojcDWoYOR4RCQdqalplrZk7gxpCJrfG3d23Stlt/wgzb9v21BbpsUN4vThKWsNFllpBca+8hdpc5GIueGV/fbUi9cKS8s8GCGd2hyWToEAKoeh7glYWcTrVzufTmfJ6vtGW63xXDs8BzDm3bfS+hGUYfGaMYUQ6MzjTT0MH8Wr6RKSzuPMSYYjnvKTHXBWdUneo1RZyd62BM533m9mccx5KYOkiJWR2K4votAgub9v2fD7HGJHcTs9D1UXkOHTff4kDYgsCMxHt+xxj68pSINohOlV/UYIMuTmX0wTkcJtp2i3Ly1jcTIujMEjtVNBnPB+RjAC30Jsp8oUzQbCAdfc53eahESbqBF1zdzIFaM6p6nEGKrWNt+1eUrPCYhRlBQbc103nrPE7xbl1//Oit0JMNeJT45xzOhNRZhEUOXDzwKaEWO0wpM0sKn687P8hSeGreKbFE9yWYT5qDYPUhWutWn+IsgvorBM++Vt/TsulOM9RP4eIQtHvX/uvpqdycz7QPSrVQeRlGweYVdpgQSdFJM15cWUgVFX76wlaB4aFOcQn5qu2dPKZFwfT4iZ7faz7d3bwJOnl3etQPYivD8ApUgEgDLx1AJEsSstqoDyt/a6VzZ1Pu3Izv9Yoov71NRK7XMwMr/YVXq44eEiuZcze4nK9/KWEqNf5U0qI6nGomptc/BeuqFkv8q6nECBdWBPKej2ZLYoy3GGuSygyO1dXC7l2xwAYw4BAMovMUa5e9CGyFJ7lTASHHYRfiRg83AfJBt/AAyQAOxgooJqSnH7uURTd969xbP9E/JBQnlU4JmCMCUxgdz/InvAj0jzgT/32IYc8j32fz3Ef27bx9p4OLAxACQOIGRBkQwS1nFMddw7DIdZ46U/4T3AJMSxAtifcIzwYwgVD/nmijL4WClaPtDT/KA4FMSSU5b7Twv/lGV45LWyiU9R9T//xgujzCFSYcWV7aVUKfeLbxhVsXEWDL3eGaBERGUMrv8wXg/Dlu1xYlaGfdNisXTNxbFfetbLBeq2Z5Sg44FjCQKJysuIMg1wQseJxG4uUGGg9O4VQhSbzlQZTl3GqsDFZrzQkqvbKdMYfSELUgaodMzFlWULP/DmP6fZ4Ph7HTkQKcgUzO7GRhgpw27Y8nw44JKqJxoY5PZvQcuQNx1vmnIGLxYxo/2Omc+5hTMvgqcfUI9bn8fw45h5+btooATvoZHzubqZOyuMM8gSzVqTqsO/7t2/fbmNLaUHOGXwjAB8fj2POQ80UCj+m7ft+HHPaDDG8bgpREGFzZHidiiTNqyRIIqMECGOOGkJveqoQDdqsEpFtE9UT/dXdOzW5R6Kaua4dPQAQFk+ewlNZh0j+LCLu2hIw973BzoFM983s7EsFhVx1o2VBTnHvfnrxUy0uDXLWqCjB3GilhzAZWpipqkCa4M1MzEV0jOHEGYT0ZccB8oBn0zmnMBO5mW1lPEdGXAtRT1fWlWXU9Zk3EdHpSW0/ceujn75i4eoDVkQsFDOQa+ZVem7JVhW213PlLOuAp4PUuaFHDcSuBFcbLE2QzQVXGg6SazrJG07FKaPTzSKb2rEAwJqZ2WnbHIc1beiccDtUVXXbtk4ZdSzxT8qUUbxkytGqap7rVq7/9GFHcDK8QuaZmsjMwkn5+0Ft+5XzBfOwjG7RiJeLiOWJDicou/uhNucuIsdxmCX0kYgIkao+w9dIxuzHMefUlg3Cm85dp8PPIsw5w6JKkVamVrIRZnLhTUYscmRamtnHt2fdwMwszKGZ7PtuBrPw3sOzuTwf6lLgbM9DH48nNnO/jTHmke1GiCiDuuFDIGIenrhcqNq0CpjnDx4eO4WqwyqCoarHoY/Hw13iBPiF9XEw+ZWiABzHQSi3zKLuAr6aV8xcnrLulkSFDFX/Ftrw+cpimLak3PeRuW2388Qt4t+W3NQr2X2CdfkHr+v0v/P5edAWd2H/Kcq0qM4t6s4YoRBBjYi9IsZ9ZKicjJdj3oHE+DmIs8opUfe3FQTAzV8W4TLaxUmPctxgEQT9r2VLrstSnwf8yo5y/OvIl8XJPYKv6YaJe3Z9xPma+FYRGSrklTeW6uXuPd9eAXyHBs5rVUaXWXsaqO3rX9xbl/nXxz27elFy1yqdjUCCdiVgfO7hMjmZBq8qadIwLqva5JWjLbD6QNbsgzYLdCeuOmtMmCArTJjU9QH2CANZ1ja6AZhkRPJBRAyBC3CHC2wDCeRGxFlii2wA4yDH9sm4il9fTME/ZRqZQ4E5/cF+EJ4+H7AnYxJFfYf+/u//g9m8f3nb3rdBsDn1UNnugOSrPRotMohdn+7uGERMWVp0I4J75T1W23r6R+O9MMARqqy0pjNLR/ifqzUY1xrO5So9MIDhljAz9asDK5Z4X5e0r8z5up67yFh0p66kD29rpEGF5g+Ps/NCQyLiDLbTuotbtvs9OVilzITmFvXeMbUWLkQZUTzFzZI46p88g6hSI3ePdotdo66JKxmMqP0hSHkfC8BdQBILEWkeEBEh8yhCZwhRlvo2IwixwsROEtbRwmxONlpcqSUKkk2m7GAqtKZgHHTx3wHxmoI5DaSNWKBPL8VqJPQGMI9A5SREWvoJ1RWVl1SWVV9jSziBfd+fzw8RMZvHEca+JSvhSFPMJHsApnpNX6YxhscmadrDz+fTfL6//dDuSaLJnIlM+thn6LVgkKjqYXqYavUMjTkF6HM0CiuyDg+fs4MAi3Q4awo/73m5qCIzRbIauGFl4mr9fPF8r6k+MS+13fmih1nl8DURM3O4rtuvT0So4qL2cGzbdtizpUKurch3hXg+XzKB2UxbbvW+NHn01+rX2JzA8kqgiJbccUXkENRhjTNlLg5xrMa2beo2xohmkgNg5kgNd89G1z3uJk6u2fWoVrcQLQe7PQ5kbsd0bLUUeUDUTl9AHRvCWWdCALq8LXBl4gkZQvZTOQCAa4lUj/xcQCsfRLmW6xC95ji9UF3QGIKf1FjtiqHnSwil+d3nHfSKBOJqQpvZnKbRFac4UCsuvqzDOSm//NrXSi19UTatUYCRNbSXPIie9TKeCWCM25cvP0bPdyE2s+cxI5XRmWXcbrc325+Px+OHL7+53951+oEDwNQZofXb7W3mZaVQjjFYhnP0kjIX4dodAjCzZt7bX+PuXpgoIjJoC+9M93C7jy3mWNWb6UxpEg1a6skaLvN192kKJiEGeM75eDwIcP+yHgEAiQPuPsYtSnAj1B+sYyV7EAkLMxzq2RuGm0LW7A98z2vengIi6iAwMw/ZnCnStGJv40+quT68dIBAYSDHb94p3yJWhdO+ZDBSuT6twLf7OZ1K0KS7Et564pLknBDw0uFsiiYZncGI+Kt7H5Dq6977gpJHL7p7fM4uhGwFQUSpBa6u4YVT9XRAjKrlZmZiDumO6+BRdILvmXProX75sD9/WShcj6S7o0FZApDtwiVOgOWgcyAQJs7lXqgXafFe3XPEZ+smz/gx4u7Ycb7a1Zfv5pQ/T3FlOPl7q4lhbDeIy3ejfOuLks2pLtYggMsq+SeGHKf79JWcBmSd6wBBJUO7z4j4uhEru+tFmIWrwatzpS7tdhS1Pr70LettcjcRoTFsftSQxYnIxJlciZndwInDR2gYF/mAg4xBm9NBvoEEtEEi5saAAEfUbuZjYwfCzsRLBLV/68B1xn0zKQYGGMEANd/JHoyHyITvsAPH/vH3v/z827/94Tdffvzyk9zEWA0k4kD/F/pwNkInadRjqhsU4AhSr+flvzZU2Pd7/3YczxErv3bRdYL9s0MZXY2Z+NXhmRYFQaY6hTaSqpSf4YnL8QTgC6wDYjMkndvu0b496Xyl2Pg3UjCTCuIULKmeVbh+4V0ANBvv4UUoLDt70XNW0XDOeeGrviDQEJHqeahXbl8qd8NhRESAhhWknie+Qg6OSBryMQzCdAlXXxS+vqCTlPyqy6oezPf266sqUzWuuP6b31WLAjWPPFXLB+YkJZkKmHjI7e2e2lhe3NOec8ahLc99KvRbrR1RtP6MKuQz5uPu7ko0mDEGW6VgMp/mdVg0tTFd3D8BDOJdo4PzlnBtANLln/saiRCq6rsK38uFHBxck9cwmdk0OCmTaymq665HvitR4YU2HZdNCACZjU8h4dzDTg4CRYDoZLu4gv/6LGDC5O5PulNIHyQzQ3nK7UoAZgZXcl81oTKyTppm5q5HB+DQBTvgcmhDKAONo2Z6zKkFCk8XyURExNGUQloR7L0OpMQeCkf52+KPacoJw5IWLd8LPDADoXxaLFiUmD7DOUedHQFIOrQ8cal0luWzbkSL9paUPTYnr43G+uqV6QQ5IbtsXT5WuFzdBF0LR+WX7WVfn5k75YyzlrhZRwxg9QX7qgskUbmZVfyETjfHORLPSph1vswMiC5VvrFcYbE0H+RCQQg+fPIxSLg/VbUkR/L09KgdNQa8DuayynkQagynh+L800Ln5nTCG4bFEjl7+77bVCL65eu3b18fcXy68Uk0h9j3PXLUw54MA1LnU0S2cU9LqV50v70hcH29KgecRDai2c4LIAFImQmsXGlaY9wGS1QJhgNijMFpbXJ8a8hWy56nz3LuAqJuCu9GbqREkbrP7OY2TQ+d20Snpzbl91aGXyZJqNZ5oXwXkTF4DHHQxBQXERbJHkKoHmjzuGDntInbxyr2nCrVVliMEAahn17LV/9Cf3dlj30qsZR6NTdoqdGtC5fJUkdCmnT7uL38m9/S07/bZmprAOsJzeUlWYfaPzRqdJ/HlGt12npIONlaMNgsD4jvsHQiWd3PHMBHmbkbz194e9G2IwC7PlX3LarpOeyVP6Bss75hVafqh1oW6t6Gi10dttmVIYYpXV9nAOrOnzxQ6za1OtrkoXW6eknxEoZd6mu8nG5t0mN5B5b9aq2w/10EXE5wGecpiV7WMMdwJZXXVy/Mf86Dymzrz8MRRQUpl/UysfJUCcPLY1ugfPZ7olYj6d8yTthPEJD1aWNmmk6AB74PByyxAeYgiDtxmnnpA2V/pOcEChXwhNwCHB8YMVwDA+Q5hffcGnjZlLRomBfbIz5wgOh0fzqcMJkwddfj6+BDhgLP/ePnj19/+Q//83/42D/+4i/+7/J+hx8M58FggU8QAROe4FkZdbcPgjALEUAMN6gZlCgYsoMIXd/2X3t5qYtuRtP2feesrOGwgPK8/PMzB4uwwxnk7o78+R9aEnNvhMFYQ2ZGYY2cQalU16kC+9VbAnlDOL/qQfnpOrKWHapK2ecrvxvXvqek7pKr1meQXz+L2H1xlFslaTYz6a9jOb8tXqNzDAB3NVORLfhcy8F47NCyAE/d5ROCDX26zuUo/TUKkszMfSmaBFT1dsvGNcFHtsGvTAFwJzNFSY6Wsr0QAcSXJT+UTKIkwXfG5ukkveS2ubsnqEzz/bwKKMXnnGH7hWLxPJ6qB9F4e/vh/f09enZt25Z9PwqV23zCYlKxkrhFY4A1g2hZzxJgpmo1Tlab0SPJyW632zzsMPVwVAHxt3L3cdd5M7P7K2gBeTiTAm6p505u5OQR7mW4zRniPsz7ypl0d+4Wz6G/mmV/tvMVZ2GMuztTqpXmFMD3y5orSvmDX7ajqSsUSxEpYzUNNgu7ONXryTxCBJdR55kYqDPyCeNYiggzRTmi2jHnZIrGX6fssQLRLveBN2vosKcv9YcRkSbLx1qCY6cSBcriw1DftTpBr8eSmaNA1K9XLyYzq5+Ls5JoGBIRRGJmsmQirr66Bl+oetW0XhSsviY84EybuVQN9KpdvXigQ5YzXfkjgAZDM5DbRBIswv/UR9TdPeLorZxdH+7uZj7nFM5EVj/BJy6winH7cRxjyBr7QpL0kq1HxJR4+lIoo+suu7tbsr6onKqRGJejsAe5Gp9eZkwXzPASDGc+ixy+vP/o9nXf959//llVf/311+O5u9Ohc3/OH3748Xnot8cH82DZssBmIY/kVI62gYNJcnUKCYT3Mudunsg02+Px8DPYkK53ZuaR9BlwLBHK67WNze/1MbNp7Qirra9d9atmqVnskry9ma5VK8h45iCmyBj0FiVeuuXJZ6plgjv0Nt6o9PsejOlJwJGC3iNbqT0ydaMkWC1tbHc3dSMQCZXHE4A73t/frrr1iceD4vxXI1Ne3lj0fyJjR/1hHqhr8nbT4Qs/7yERuA3p5dBdfDdYNsI/ma99/wsfOL9IxIAthk6FcdjdO9UTZhRhQM30HA3u6k7uUXcRg+tXE3CmlNSp/84wlg/6r+1hiV+zsW3TYRcO1IFFp/XW+gNXkxKlcC1u1nbqC58iwJfM0jbSkubPRIzvq5+9QSeXi6h8GofF//sLAbqzrA/FQ7zCDleGHAdvXcM4PiyCUnRW3xl329jr81sqvUzG3cPVu1zLbp5KYLknTMHExQ97qeOvcXB8ccJu4+YVKoiTJdu4FoieUxhzIvw27sAAotEWgdicCAQnMiLi0BFhDj/SGxocmBiYBIIdDnMWN3aO9EvtKX86wrGAL0cy/aefrihU02N+7I+vd97HHTY/fv7D3//ut3/3+9/+4dD57euvf/H4htuG+w1we37QtnkqkOIWvFMB/0+/+9sxbvfb+/3243ZjJgFHMUdvEIjwT2ETqgNmJIF67cek6SrttYUIWP7zD/r/7eWfvBt1oFIkLZ8Tsij1cidQvQgXqLC4Fj5JVAZhmI5zzk4TPfUVMwYD5zlqQlLVcKbRwpCJ6O3tLY5eJOfb9NcePDijZVg0Q9SxRcmLlkqrhtlyxxT9dTPr8qnqDEzBjYZNH3KfcycSdkBtyGY6o83LJjIY85n9i479oWb3+91JWGRz2g9Vg4wbZICIZZtzfuyPQ93Bh/oPb+/j9g4WsI2bg33Xx/RjexOzQ50gwrKRG0fOCd/NpprxENnYaRoOEiNWj95CHKkKOGwe8yBFpgUTs89IThCo2mFmTkblZxrENxnQww1M2zbkOCzQksMwmHtidQwRuEdZ5ybj+fGY04RvH7fpLCw/gN6n2lSaCgxmupkebnOM8VDXTB5DpLMe82m6s4hPBTkzz0O/OcYYQ+7HsaumBdIGsBHGbUzfjyPsCgBwVTsOkptb5HCQWeyzgikSVCOOZ2aGNGMM4hOInhRE042chHiqbyOawpE7nnO/CYNI5RjE5lNJb2OL7CQRMXMzTFUi2u4jifi2qadNFWrecRxR1XWjjaDmNg9joe3+NsZg4huLHyaGQ+euKmzw5xBe0i+hao+nb9u4bbfjeIgPFtdBxzFVJwDVCWzTtGuYZBOwquqN36bp4/Fhpl++bGR0TDdn2cZ0G7cbBT6N+z4/Nt7etx/nsQuzEGAHk7wNtvuXr18P49vHJPcxnZzww/t97t8O3nbYjdRFd36aKJzmDnZhHvvO7+9yv78/Hg8WM9MNG8+dyebc1WXcNz3UCfftbioKutEgp2PqY+rufvPguewsLsNlmCoOPfRxv93uRuOwcR/OeLgfGU9vTSPV6HLFk4WrbFG8pKv4Fsbki50cdwZCNgAy0iyVibNCzXTaHFjZq/KU6L9iADDGcCefyrKZuRptcqMNUYMj24BrgpA4IvrQaSrhA2MBb3w8v02nL7cvbjvBmdxtAgCPgFPdtmGqksa8EdG+7zcZu+4f+4fINm5iZofu4RU7dIIH0zB3UwvMDBnVkTNYqoGIQtmPM0dlJ7PAGcxEuBOpm3n2DoYRppuAnOSxTwP7uM05bbf39/uv376pkT/sl+cfQ67oRBSE8Pb2dXd3ut1/oOo/PljmfgyWEG63twT5fDweEWMMmaTuM0ysmR5HZt5pv91ut9uNmd/f39ucq7xQQvZaCr0wnZRzzmN/EgVStBuR3MhMn/Opfgg2O3uKTmYeuYbR0C/9J+50H/c5p9BHaIBvg+d+CH2JRhuP568kEzJ3PIbf1NxZ+Pbu9PHYn0NuCEWTiYmiSZeQj7Hdt8Hk87kT+U0GJm1Et7GZzZ+f+1/6D/b2xY9tRhLBNoycxJ7+6+03bLcn5v14Mvx2HOpQEUrPjgwzVjV3I4IIsYDIdZ9cObFQo1B8nQmkdrg7gQTl9yHmcbPMDvUwrc3gicIqRFB11YPKJSyDbQFdC1uslWakU+mEZGMRLpN1mrrO8+Qu2d2ny0zzcKJansb9uqsvzc0HMShq/JJ9MAUekLlbdD5L/NcJIsrCjZFudXcHsXARVXQsDNtDs9VlMqlOOVa1RflO1YpPm62mLOv6RAU6M3W4NY0ZPYhobIIThfWsVwSRzbkahG08D7mtVUBg6JxqVX9b2l4yOmY7a3twGjbmY5yRQKBQXoiqJVoZk/EwRxU0+mpXkMMLeGztDwRAxkhI0vJ2sQgx2/MJZsDboAIRzG1OTg2UhEgVqupqCetOFIEQJzXOSpaYT4KuVXmkQOywG28GmJvQmKpMg8SZhUiCpDyaWxHdKMYJgxOdrrGU7hG1Cwqi0+qTpX8VR3AvirGnahvMZDp3v917O0RkhLhUMyjYGQwJmBAQCEJ+3ACEuxojkvrM6IBPJyYTMLFLObOI/ffhgHcaDgY2AwPbkZG7EealI60NAoTg/iT9VcYT+Ir5M/AB/Xj79st2HENuvssvP//62//48fvf27ddiXge7k+QbJg36M6HwYm+/QFhd6kCph8ff//3fw/+6ce/+Kv3dwc5jg/IG3iQMeROIOSxClpSdQX9ZcYkQOphsQTI6ZmTuvq3JsYGyMQbNvavkK+g336zP4j81Y4vjJ83+SoP+1HesTNuG67XJ3Ttf+zFfzoIWUaSRKFW6CdMR9aLQbM7E1Hh1qZWTAim6gJS/cYgYgIs2pTFWVObwXgHMdgiWwDADK2GIpOYneLMO9Ogys3RY7o7UQBk2nJyM3rPZ6keGRkVVjYcQPbIReghDubsLx1ss5W31O7OCNxpE65mYTuqgm2yZ3iffdQXRSCY0aghUuosSpeZaERzwy4xRABgHkd2Myxs6xiimnmlx4R/+nnsZvPxeNzpLQQlM2/bFhmPbY+u3vQY+vP54WfJB81Z6UyVvhhpUCEJpFo2m5krhJ2ZB7HyUNtbqPhytYxs6RILN5Ylq+4Pc9/39/d3EZFB/ai4bd+fZiYyPLQ3p+g4174yzkJq6Y4anHGzdsupmfFSa9eD6fH04C0a3QJRgRaf78fRQ6oP2zPaKRYpdlYp4tH3zyrgXRczPZ9POA/J/F0zDgt2owSBj9R4zvvLrRj/142VzLNUd7ExUpcuiVILUiS0oEcyn6AytiK5N4AEECAQ/a1YpaAa8RFJgFQe4qBeIR5jRCVRehbH0Ojf4e44z1Lotdu2OaYeu5ne74NAx3y6u6k6AtLIVVXntMr98zK0CFKQVBkjFZEoJPDLFYFcrRW9oK5x1d70HF9YIRV2hSyQxJwIvaerGFfnU1HIGTRYecpFD1uKiPrO9bZ1JFjUqSbLU4ezLCSztKbMnRAJ1QsvizVx/344hZYxF8WeieUvdxIFmr1Vmv75Xa5YWFCgXQtpOgBFwliq1PoV3jdVX7U+qgCi0LSadnovZqpcy+AdHkyVmR+Pjw5khTIdGMu1ItbPWc87FsQOqiau64FqOnsbSySW2SsBNTyR005mnotQfep8iXyKyJzTAL52Y4OzEYygcCZSRJg4m/4KE7GQyZwTpsxsBCEmyk6Ptlwi0g0bzvHDI3bHS+60Q+ecmV7grm4+iWBc+HBTj6FWbs4QGkclUMgqhlQ13G7LGQl3Cs05nUIAcEa4KwUj8i9eCaN+bYnW24RF0rVEeDwefX7ju2HS3DfpwRT10gsNRGgubuukhpfjoAXS1hRu2YJ8A0DskbXdJNQsNwYf7ZaIMrV2GQyoVmRdATJ3cphRycHVAAv6QdU45aOCUZw22imSAFA2GQdVXAspTbQXGcWaorCz2Xuvv+sF5KyjeViYxivDqezWZpW9cfHFdg/lVlaqbfyVyyDsXKRc0mtYD0RYEA3gjuvI+wyug1uH6mYvESB3J3ceA8zkPuLmgu1pWYLlXPcGrPTf3LuW9/JeGU0nF/pf2ez6eW3sSUPxpwYORakWKT+X57xO+ZMygGjt1dtdy+gWAe0QYQjZHAvaqbuqWoSMs4ABXSyLbq8CqIM9An0UQ1YOCxMztR3nrC9VI3EiwyD4xPFII+vjV91V1aH27evjt7/949/97R/+9m//dh4f7jrk/vFtfvny5c/+7M++/PC+vb/jlx0//hmAP/77f//v/t2/JaK//uu//umnv3rotskNxBk6ZoEMyACkiCW22gASGtBZWfg+QA5MN3MEXvNqECrM4QwRYDAYB/wAdn38uj+/8m+YTFVnxJB4CIhh/mKv/dNag/+0F9FrzNTRdoEDhFMvZo0Ic9zmZz/VEqRMldgSIAv+qbQKfS5KRqOzri5i4kxHYsc2pMVJC0f/lKS5ShxakDuaL1HlcrfoiX91n2F3xkQaU8MsOqyd4HzxwNGpaCIyKJm7oarOPqmARGQLi2FmVZ9zSnikqvWAoMBLb4POi8OMFJFffvklShctnFI+Q4Ok2jNDpoyGhbltmy6Z8fk4TxykNi14SaJVVVQuxEIl4e7XxIOp1VE7WtDG0lOBrBBRhKHMTC0bGQOoFihAmoVhnNQzqwwjjYQalburu1R+FFgA0uhjaq6ZA+lP3yMkY+6qp0DSqfXqXIfiaXzKayby0zCgBBpI8RS+ScsSRDiQQl1NVYfLSuhUCs2J2BH2lWfNd4WP0s1Mp0LsfoUF0rNT5Tmw9ciVYn0mqBBRQPWsh6GPT6QqxWGjSmbQBMBA+kQRDoXItj+3+2QZ7mMMM98fGp0IED4VRjj5t02YObuxLcY88UpU5O5UmXjm6wE2XPqhXYSce+WjyshEWT0FXi8OZbHAKRrP1bCTZRARV9PoltmExQxaJO76ihe+tt584ZJ1LfrE+es6jPy6B4hcGmyUZ5OISEpmN3msW8xrH0KisWjAfSct2i0XIgKXehomh4jwiDzkfgVwnVG8vsd82qsE4ESK10IEWZbHUVF9LBZ17NRp9RXNxBP2/Xkuu7u7V1Qz5NDrHnnRaudYLkfAmw9LdVwYpeE01kukoCTlfErxbRiY6YbZbqk0J+xKFcxsRW8rMeR5XAoC+4gl9bub0Ur/IhKFo+tzzO1kwle6AmBLLqvBEnEYdhzHkIBpjdRcUz2YkS0SyznuMNWDKGon1qYOTGBVRXLslXMyldd1Je8QLg5f5+tlsL0Qas+3fQrr3mkDei8HAVeMASJxn0VsF4n83WO7fjh1Z2ZagmP9/H5jfYsAWov98i1cavTLFSf0kz60XJeqSOTsThrm0iXiLACoOqZTZHfywnqU3C1TRj+bEHRmda7zrVN+WeSXRaNz3wmfyA9tgfQbv8cSz/ubi9TP6xrKnx7G53khGBro5U/unlm7PQA/xfy6Zz2p6Hy1bFaSWb+sqctrR/K716FKqR39aTB6KqN6rRp/mWB8xVJ7vrz380FYJ2tmfDtVKQ40I3diYiandA17P5AIQ8iM3JCx9wEmIag7XEFwF0CJpJhvdW52gCWSDB0QDIcx4G4Moki5AoCJYwd2+BP6FfMJMTD5w/7wu58f346Pb/t/+rvff/318e3b/vu///j56+/n3H/+ef7bf/t327b9+Z//+Z//xU9/8Rd/8Zvf/OY49v/4H//j3/7t37693f7Vv/pXHx/0fH79F3/9rwYP2IDZ49svj+cfnrs+jukYYBljjNt2v9/v99vb2xuPgcBTZQbFvHgjbATzI1bEYu0Bw3S448cBG/Qr8AR9QJ/Ht9/b8W2jL+4d/gmYMX5Ji/5nYg3+KTYItADvWtBrfjhAJaNlRHdKhxqBnM/elBy+qgLojlSCOdW/+1Y/lb9+qS9Aymap9fX9L2yKl/DAC9PoG6jNlvIX89J2gpbmunJj1bm6++M5UUvVC4gSYWN9WZzh0P4PZir0l3XFqeq7wigKPex2u2VdRA2Ou+TRzvm8GAAhJZn5lDtEUdgSAifFLZPDbvdtmqsqOJNt1lL+Hmezs8yJIgsHea+/FnJ9rNcYA2RTE+a4KjtB0dIbNsZwC/FM7gElGn73UgRrUpwpOmoUHCR6Fo9Nt7Hpvu8BDHPGR2J3iLT8iPk53OGmk72dr2FeIsBjiARwgAGzipy3nlwbzIviMc2iipo9VecI8IqbKYxP5xznf0bu5IrINGXnQSPi8QCIhD30MREpXdPPjRhy05k1hJQJhJWscjWH4lKgUmjqEF5DB73X8bmYnXgkftp4lrHu6Kt2BGKYuxGNMYYUVTjMqrbN3VUdnwJEIqIGgCJkgVNxyenIYvcu5/90CzWpZ6jB1f0WAYd2owYqRx6Zqs1oWqVzXy554VFKapnkcOlTz8wv4GMAeNHsuaqW+1u29Dn175RPXK7rvC4f5p9qlwOPNjQOrSYykRZuBQT3wsrj7bzobf0uoQsK/PqnVAKWWs1YooSZWSpDynartK6rk9vdp1ty9JIkNV/e92fwnGY1zEyUgf9eQyvKbA7TE/SsTz6/7mamiJJUjwimaZO9XcGH2vJcFyF4dRTFxZP1+UBw8uYnZu7+9etXHtIOQpyGdyaZq6piBhXGIeKISl/W4TTC1z3yFHXhPr+0SjdTRzYXiiBVe5fWffdA7+imtewOYWL2PBvLawMJjCJNwyjfKwmDT9RtPyzMDS3LM2zZPqppvYdqysxYAqqAjY2Dgl4OhF8DU76wbpToCQ9FJ9e4+/1+X7e1bzueHy9E6I7ofLseeYDDJvQq6m7R8HoWrp90xLLpav23j4+UMX+BIlkev2p/ecBBcHRNqfvF29VemHXhsJjKcWr6dfLiB1n4zDq1c++40UQBogihuJ+RNyzlvsgTeqmBj7cInX0q6ZrssC7juZ5L/8bz4miHV0YIzgMDfzVQL9+7bqK726c2G+dolwXpQdqCUhvzPH++GlQ9VFo5Sbu/SyThanbqrK6/uCxIsDCgGuEtX+m/rq/OrW6rH7HepyHq18WPzDVf7MMIMGy+ehki2kNAJHkHHIu7a/XYja4WIMqOHyBzCyxegwlFSSOPOFwAiBSgynsiYBKY4WnAetFQIQQChm9/PI5vzLvbk/3JrNj9288fv/tPP//8+19//7tf/93/8h/nQUPuP//8/Ls/fp1z//nnpNL39/f39/tPP/10v9+n7r/97W9V9X/6n/7Ht7f588+/n3P+8vMO4OMxf/eHP/7x52+PXZ3YMUwB4VBOxhjbJvf7fYzx05/9i5/+/C//5m/+5i//8i/p7QvK5I44OMyE0jbOZcfvCDv2P4AVz19h+vUP/9HnV8I9nc15sAmdTZ5U8s/CGuyrmPl3LndHKUWWaHGR1GIMR5SsRJ3pJ3cMC7ePKU7rZ/2El0wN12rjdB4Hbo9z0fOI9FQB+ZJJt47Z1oSCZUjtMG3psI5hVZJTDg7B9eSWknyWxjRPNrPEnm6lJORHqBeqp67QGklH5FBHVIS2bbvdbjlEpshd90wqsRaNQFq0oVcFsl2p/kNV3ZX5BsCQnf2Y2RfCI1rrlS9aly95UKu8pMUwylcLQGY+HQY6vdphdwRMXft9hYdyjH+/bW9y2wDMafueqaqanDRLMz0bY5K7q9koFJXDtDImAJxJC7aiyhKDQUaATzNVR2YSS/K5edGe21nvfuqjnnU3qRAaXXQX/55wyj+YzTnfPKuVYK5QKZmRBBpNo6dGx20FyY2IyA2WvcdT9xeR49A8irVNIhLdIJrvu3sgdHIV/eeQAuDtSverliDCnHVQr0aCmU3ddU4WBJZ9NRUk6TrgilK6+3EcpLMBPDyaL+1RI1d6aL5XuoosU1XPhdWV1L0qXlbe0TuwIliuqYDNMtbp+Jl7XGocEDpuX+uBl/7Csr+lDvsnJe2a8Pk9a3Bd/F60Hu1nclpeeuoH372n12RdIgAM0s/K01LUf9E86IQJXSM2Vq1BgrnmJ04VCIrexuXBc1c4FC8IY/32jg1a4aGH1ut0kmUv1BriwAIognCm5DVsajRf8cqChlrbtPFF9bNBbe8REd1uN/qUGeHuNAKg5WLzE5G6UQmMXnMiAok7zMMghBgvE7fwx5GfO/WnyAMVwzSL7E1gIfLlu0yFKAt4FAt58UUBNaw0nIytxi+rVzWOoxMyjJs9h2jf96mHZ7Hc2a9yGSpY2nV4ji1eEZplDTUEn5vOF9Oi+I9Efm+7ouJPjXa7CqCwD4vdWZNQU35Tcq3YWRmyzDpSWppaTuBHfM+0aNrjxav7cmBfJpUG20Js5wjHmVBz/kC0OYGi9q+S8wJ5rx798qIQoNQvrX2lNqUAATvXJ9fG9DVUNr2URHZ+77IOF5Sd8A6s6+O1Hy8j7Lesb1znflmx84mGZQpAB+vIPZHvLw8HXjwN69Z85sNE1a+iBhO5A7wNjwNUhmjPl2s8RCf0dvPkz89/4fDfnWYMoz+kUvP6RYth+Z2p5bdAVI4/XLGRXqe8DCBY3HEczerPG8A+p3MgTECRy02UNZ9ORDTgagqDGojHBjLAAOFw4wbKKIWFwIADkyBAhM7V3QmS+txUn1NVx+Pnb19/UdsZ8+PbH4/5QVCoPR6PP/z9119/ef7x94/f/f23j28Kl9/97g/z5ma8H4kj8njuv/3d4/13e7R0Po5ju8n//h9+/rv/9G3fdxHx+TNA87Bvj+djn6Aht/uQ28dzLyYTyoZHq9v3H/7ir//6r//Nv/k3f/M3f/Pnf/7nP/3005//+Z+PH3/Exmm+egi55OlEP2P/OH7+7YZ5fP2FQX/87f+K44NsZ9OICSpoRr2iDPwzMwUJ5OHxBlAd6lNARDyw7kymFpGLT3LMVSMGSOaRd4h0g1GESQNodIzhmkZUEzpdsjkIn0gXXah/6nhC5F3vc07netvLwemJWF1efvD401o1E+JYdYbPdzFzfH1CjSevQeWyHWMcx3GYRr6cbCPDHeViCeUj+mbEz1aScvVleqXNh8BjzMik81JxlsVyAMRgiqpLUlWlaqRYDCxk9HEc6p5ZKDWeMYYVAPcL40g9m8+x5Woik0XNrHq7qbve73fVtQYjpmaRPaUOs/24HTS21vvdwEJGiXlIPEL0U5RQe6hYu6pGTRpXzYyRy5L4e1FZiPzauYGHBOKZVtOOXPMzzzlHGwRC5aoMMdd0Scws5/ofx3HbZIhUmgerHqrqX269p+vwTgGw0CUAt+hHGQHhzKtUVjr1zji3AJyZ55xjjOXcxrkSryZRXj4IIuLqMFZrZUXl2pldWKypXF5PmCYhHsyRB5cBQ79x5V7WPNjM2J14UBrPFlAT8dd4IzMHnn4tDmpU6ZIJwMxVCw90xq5i7YNNZM0I+nBe2Md5Z6V4LH8lcmagqi8/Q4uFhD7B7/rt/poZ34uG19Y3y/6+POTzDVeeZVaJwe4eHQgqh+IkCfPo0aKqZuxXgzDJvme+cMbPPBelSq769/qVjC0vKaOlyqanA4AnmH9MJBTfV+VMZFzVSOsDsm4ZkDnM651VfBRhwE4CTTaCLNtWd2fPelevzorkJ1H1Mnb+dm9i83dJAEjrm4kITFT0+bLUkbhlBjcCmXN6QI/jIBp9j2vr5d/R9uKAtHRZ/0REoEG6dC/UntFJD/0VERFiMFnWAHvB+uR3A9aBTNx9mrJzBEqP4/j69evzcQ8gX51TVVVZ1TXZ6cVvEie01sEIDPdIECEis2mmZiYup1do4UXrEVjPgunhzp69KzxKuoj8OLqfavTLCYuGbmO8HMlaTFxZ31n71xxjPQWr1ffytJV4mp4vpLvELU9zqk6QJRgsfdr5HAEoy7ZK088yNiq8iT778ctpOBFRga6Ynui1Hj6zcsX3SHCywQsf6z/FuXvhY/Hr2LbVddg/uZ5vWdeTOEOO69ubfby8IFaJ/WIQIlc7clLcu3auD6wnvpcvFz7ld3grTssxOV8+RmaNEkWRTa+A409o7r2C7q0hLHO/8LTA03amM/RqqRusoocckU52iralPVLPIrhN72Wc8Bd+HtdR/dkabyb219B9Cy+Il6rqTlyygIREGMwB4JQCmZ2iT7krMCI9nKJLRQFoalYJRoTQOVKx3EBMIPiEA2o4Dns+53HMX37ndrzdhh7z1z/8/tdffzadHx8fv/7y8bvf//Lt6/GH33/9/e8+Ph5mRn/84/P9L39w9+NgM7vfb3PK4/FNdYqI+TTzY+L5/MO+78/nk5kx/3C/v2/3G5McavvxoT9/0yUdZpOs+LFBQvzVoMfHH373d//P2+39/f2v/uqv/m//5n/4l//yX/75X/x0G2O7iYywIdXn4e7yw6/z8e3442/dj29//GXb7t/++J82mmwT5eMO/gimpVvjP78rM9AJEGAibcXXK1MdKLuJUimo7eZw8tO1RuSHZtxrAUg2M1kdcHS6lSFCFqnFi3ZkoHH2hmkGzp484cVxuZ6FFxFjS+TQyxALVwJwsrJ2zvIiu+KT6PXQxwrL4SIKgLgLb/XwLYVlk7Nikqx0x9GKIwcoAJvN4zi2+y1XpARwrCPLaUwCp+W2DiXs2kCy4UrSC7+vuVI1OIogUiwHVb8p9wWfis/nxwJhESRpxFbNrXuiva0O3fKyZRKRiCTAHWeL52DjZoGwHenoBS/mICIRMiPtTNlw6IY4F+YT5TVYNmsU8p27DjOYwaLIirCERryETBBlAreEyKFL1U2LGWTtIDrhLtAAgHYel1ToZfG6o2sqZIGBjtweqlKWCvZGlvkRShvTjN70vbyU+ToJ/FCEexK6V7OhoOYcTPYb5NV4Lu+4hOaUgoeLMNirfKEKzEDOkRiZWVjsZBkB0LvItm2YYcraGMLZUXCCBxI1ZGxVo1Uc0gEwDWYLO391VxOMmaPc1GAxi14Kd+clzzPorc7Ci7+oNOzMVY5gCItIJ06TXxhEX0TUW7De0Pv4IoBfJPf5w5/wlF/43eUhFz2kY+KxEUFV/Xx3dzWIE10G0JTzWVdYP3kZc0+wP2+aCeQI8rPVGJVeYli5HwrvL7zFBqRq+jJTlCEEen1v/+vtlpJLz9m+mbwLU1vrpX5ysqzbrb9SrpAEfVl3ii7BJQfYPWHN0inqbpqqODMHloy7d5IqEUWeVQOYxzq1TgBkG/Sr++/VzkSekdnUDhSU0hI/yAZj+eoodwmxRMzdXJral5vMi0AQEc5aKDd3iGxDtnk8v3177PskEvdAyWKA3djBcCEM4Rv+xBWnTNMs8Wpe6ptwK82r29HMVmzeXopwJ1ubZLUHDfWDelyuudWZ5wsdujsV9FQ7yKRQ+xeBeFLL50O9zs6vjideIjNYDl0bZsGmr+vzHTcTiyCNEBCRE7x6mwCnmvUymha0oEzHemE77k5O3S/s83n3BVIvCCQZ6AladmEOFMrDeopLE8yHM1/+euWl/UHWmyAC2xffhwYC+tUhRQt/iHY++errkVm24Dsbh9xr96s/xj368Cbuji/vOutiXnh1GeS0nOI4mZ+3AK1iLi9dN5Q9M/JP8jt9pqcq+jojAEAG2P0kyPXOOonpmeWqIR8jMyx6ZXMkCWBB4BH4iYGvk45Cro0mGgQKqsoJBo5CEXdgTIQp6A4yeGR5MaZiOmxi6vF47o/Hvu+3/Tl1x5SPj6+//PHXr1+/fnx8/N1/+u3f//3vvn09fv22//Lz/vPPD/hGfOPt7bG7iDAwp/OAun88pzo/n7/e327v7+/7tJ9/98c59/v9vm38w9tvFL5/7PtzPvYjsL5Etvf3u7szbCcSoSGxgNjModOO/YPoD+6//dv//X/9t//zj1/e/8f/4V9/+fL202++fPnh7SYsg5jBzD/95eHzY//ljyA/fv2Z7l/mx9ebiBigNuecZmAiYRdS+D+7vhPfN/zOnNiXT9bMZ09INAM5RW2Eo7XS+K/Jr3pGXLzSWI5MHT1ZX03ly7PZHc3Xr5Rb5MpqsJwy6tS/umG9uQyZClos3uG4YSyO7PXz/noPJt410v41HT7yz8JEUNVp6mrE0Rcu5cd2v5GwulGpKe6ubuujzeasRvAcNoMm+On6bwkdYqFQGdzVTIh8DDGDs5tZ2IfbtlmZOvEiIYaMg47lvc06z/3gqtGMqow9Xa1pXMXn27apaisfvdwi4v4UEZIRWn6bjpHJsnI8Ol/qRA4QS2AJxHOs+FgRq7PDV4UyZwFXP60LjipKD8BLDle9L5WpzbJXfnpSAKG10SJyjVaBIsJMehxhyo7tvm0bcDagb04txFOPU94kZZmdGcQjA1aQKFlkjJZkLZwAiFA4HZnZNHYNCiWiyKOtrbyKq1rlHtt6qNxTLplFF4bTU7LoQGe+Ydzq03VX+UHu93d1+nj+6q7bducCyGVmq4pBDjxoPxsJLhvkIqJa9qcQICyY6qpqZAZyXPBFE6mMQ0RdfBaBJUfkeIFTS2P+lH/ndtupEvbSiQiZO59mdr/lhUh6m5r7vLx3ZVK9tlFCjLJGVv5FBPZAhUnTV9Jz78ycaLyUBSQ9gDZCTtqzky2uJ/rCka+qZP/pvBzr8LCc7nRmJwUTwhhcIwYVp3SCqdV25/rHZUtmWp84Wji1qkLXKVCYDMEMOg02zsig6hpUOKK4tgHsBONRkaX+a07/GikFMgqWnv5gC4iWNd5S5By/EvFZuuPuQBvVDKZImGn2QtckgsiviVyDYAiHTkY5Osolkjn56aQSAO2Wra3KzHNmFuaZls/pQHRmm4c7M91YaIybuexP1UmEQRhuBBf4CEgFIma6idzc1M9z1EEMIsI8ha7n2cx9ObM9V5pvWOl1F1Zeh0UY9bY2b8n12TNnmJ1ZTuKPBUQ5yLLtLZCOzoxCc1MFsPWWrYeoGxyfls+Lq4UIzkwZ35tzMlNqi5Subyw1YCtULwAepw1QbNY9atu6oL08YjEolAtyZTUSZaU1gR5e27ovusu62vHxyX3yNlzuP46m5CYzAEl+XNVo9TWrA3IykjgF2QOqYqp1f/pM0wOQWKkAzrqOGmV6eSuOCoAWsKheycu3lp/b+ZI0uSApEBGExXM6VHFILEk0KNwqEHk0/4kvOtaX9kh2m76oobnpRNrwDR57itrivO3FhsykBktW35zZyYIGKWPppd2N7ZxUvJQJzNbadpdVh/MzW0rUyljC/wQ3M+NcBHaKamNXNwjxkj8ZrJ4RfTxcyT1LQB04DMec+zGPw459fz73j8e+73/4w68///KH4zh+/fj197//nbr9/PPP//P/598CPJUez/nt2zxy/XTCP75+vd/v27apBey5HWps6oSPx3NGwoj72/sPxP67P/x+f2eiSKwlJ5lQM5u2q0+YO3Rjut/vfN/CZH48HnPO2x5QUvZkfn58+/pH+Q//27/7zY9f/sVf/sWf/dkPP/7w/uXL229++HK/3wfvYtO+PiCkz92x6WMf9zdh1sPUDjPQDTwI7Ar9Z2gQAsDSRjIugjjm5/tKIUQ0c/R0EAfskFUg5Kz9jpKyEEnB5VdetF4lAgIShLDklcAxo13NgkXskel9HpxXJJR1zH0AW470bas12Ld5g2LYyfnjk8AROI4jXXh1xdezhE9A6uZmIqSq71/eQ597fjwsIcJ5zhmmRSMr5FYsQJq9joNYxnYfm+Noq1T1ELmJCGDbJiKbiHB0inYTkej9ENVcxzyEJJpoPZ/P+/3+fD7nVfbs+x4Pj0nOeXSXgtvtFk3k7/d752WJyO126+KN+FMkJ6hqZC1yZdBxZdLu++5q7+/v3W+dmVns66/fvnz5crtvke39drs/Hg83jx7KPKKB0gxqjVGp2iDyWqvQ6jr8ZQg5WuTCDGCfh9kJvifRQWFR4t1dzW63N/cZ3bNN3fxwJhZ2nRGcc5tmBJa4/9u3b7dNOCo6UvCnjAlfADNTmND7DkBEwmVuALlv9xtHA1ORt7e3Tcaccx4J/Y9UcVKbCVJT1W/fvplIlD+lIyKNq2rW5DwGlcqCMWTf9znn29ubu0d/7W3bnvtuZts2iGiqARgisWvHMaN7O2DHcZgxEdlh7+/vb29vRHwcx8e3p5ndbjeA9+cx9cCcYwwzRDLJ2ATAnAfLbds2QrVhnJP4du5XKQ2hrgVkyPP5DKN33z+Yec55u9/G2Bz6eDyYadtucMhge6RFrapEXqqbBsm9/fAlaB7szhwYwe28OHRGw4N2RKRnczHbzAwaNb3MSxVT86NX3Wj5t9nEGlVozcAaMmp5WohnGs4gGQPgOaewiMgxZxtILpyKOOCuASE8xphz2jx5egwjWvAFE9j3/TiOb/6t8TkosyBcmM3s/f2diG632x/+cMRCAdi2TY95+/JmH49t26BmHihhU52IBxM7k6qmBSIjuLS5Rwu1SLSemLT4m7Qy1fd9dz+1T+ZEqxtjYxaiqaqBSJbz4ktHgYYsyhDQAqqE6DEAokoUjOn0XuCqHK/rFv/asoNtvq6bC0DhakqW/q8UR2GZdG4YCREi2YQ8d/ZFIKkeYwxUnEGqpSGRMEGPY1d/t/A56r7v3759e7sz06gRkisOPVxlzsk8otgShZmx70e88TkPADIILOw2d7tv9+dj3t+GGY9xH+ONaDuOnWkbcocPs92N4WLKhBmEqqogOzk/WF3cPbBqAnwRkVp/25j5OI4WCkT0fD658IR6SUPuvOxF/KnRR1czTzXQmvPzXFIyVOP1IK14afCZNYOuH45qdxGCGEuTEr/SQKST9A3MLDSwUNS43fKtDW97xSSQUOa7n1ODmrCkhWNugUPRGlE8gABAqwYsDFNgccnH17mbdIWpkWSZUxAJE7XhkYqWo9fWqirljFgEIvp89hR6ACCCdXtAX88UyoyhRWfqP6HSwnNaRAiIQvdM9/A0uojpzCopbimgbh/STxZmGSPaq64voqsCum59zI63De42p1XrHZhr9WnkwjcKEL/someG04APgzCZWzOTYggSCtI6JDMT5vBDdtTaZtbJBMxVZOW07pffAkUavC5YR6caU0w1Zoli4gABAABJREFUWMf/l7o/65IlSdIDsU9EVM3cI+7NrQu9AEMSywF5SA7ngQ/kGx/Ifw/MDDjTQLOW7urOWjKzsnK5S0S4m6mKCB9EVU3dI4uzEOeg207VzQgPd3M1VdmXT4bN08nByYzTUStkbYIokwgkRmgSXB3ouGUtX92Kw00JIDJnZ85mbqZsDHSIexGCA1p2XZcTSFAqmFANwLvvv9teLqZ6fX5ZkjDz04f333/13e+++urHdz8syyKZPzw9fffD99diRHTda6nAuuRMpWLbrgqXRA51cEq8bRdVddfLpbg7C1BjpJ6/XD6EBX6tSkRqJcCumNkIXvVyveZFElOt9bJv5z2fTqeUpJowl2u0uEc5mmsUSLx/9+4PX339yaePX3z+KTM+//Ttv/yX/7Jey0koQx2uV3//8uQVlGnbNlJlZrUNQEqiWv/xzaUHOoz6EDNxqWmMESGKPnn1qCeZXC3vv4bYnSLfE5LYMEn8SLPEP0GfEZALsZNznigvqmk0BOBgW+IpA9Tn0QPwHu1FDwhSLzIaTGEdomKEw4YfuO97tK+PP4W+OOV1BDTvxFcDO7m1JVLwXkhw6nbkrrUZ69PANFWtrtxD18Majl6LkTGYOdzdWTIzM4lB0asrm5flu1ly95yJmYnVXGPIL6gNpIqOqZS4gad7S7Rw734MU6ljPFAYl5jCbFEyi3vp0348jINOGGOLmwMW2sibus1tTq4DSFmopwsA1Lq7q6OB2glx9MB1+6/2EUBBrUyIk54SX5MqutnDoLxmfESVUdji3uGSuc9DMyIyarqYG9pro/rYriQ0BglGgK0VlsZUD2GRZIaqxZ0oJSFmCvuLnQRmPabJInlZIqYgIpRTG8jhTik1i9y5FVvGMlhiKNbhS4Ttfr1e+5G1EM7giiBuZt62fVgGY2eaZRBA8O4poKhZ6jFnjlNiogZaw232JmLkcZCxMHsvzY1JFRSwar1/6TiUhuEhMe93NhUiZeruRK2w27xKQ5yH9bh4nGPOOeda6tHyqlpqrTzpvynKagGhdKykZ6uGcWD9pA+xFht+jFe6R56Yr3iGu89Sjxwf4uw2cj8+Pl7nOabrGG2r5BNcezMOG/LHECZNNpkbHSEx71myuILBfSp48F6gHjYEAOYD9CXEkjDnnNNySmpq2ErsVXsuB8YowlAATUQGnoq3mRbkc1l1mFU3dl7bhLs856v+xtA+5jrhY41dOmLzTR/cdv3N5/X6xZvXJ2+h7V3ffpp8+0Ze1M6rfdYoZVao1QPxKCiul3YzyFudtnmv0Wr4Im2WXatzJ29h0cP29SkKNm1Um3IOsxieYj+VEo9tJyNmdmLhhYjcVSteni/v333Usgmv2/ViVk4PlNKaRKxWkTXJWsoVQCSrA/mLKAEMJyY+ihQQBoTK5PI1aul0OCq6xz7fHfTdz94N6BtPoJfKj8gLgOGOzhcdGUufN4Q7OCpu00fjS+dbEfmolfCoIobO396Aa2LNcVIIp+k4iM43mCDdp5RcEJK0ZoqD671/vC8FkS3j1vbajvb2eaOYeuz/sd23x4FbXqOp+N7NqEVXj/7GtmD3PsbpEHFDqsS7ZgeJ6LbotD8WxVbech/cyUCSA1tsCDoA1Y/6ruNJ+4aP0zm+Ik58ejO6Ptq3LUwUHbWyROBmEd7LyQO/6f7+1H+4u1KSQ7Z3um3O8E/lMyMiT71kifs1vsjMoi+60UM+yoJmrlnXlW7pELe8NubetzMNdHd3EDk3gHwwiBPcW8MKkTNTG5fdJ4QR4SgfIldLcpLFYIZaoUAWmH/443cf372vpZTr9vH9h5zl7Zs3wvzh43652suLfXh+AtNle3m61l25mNXi1aIEjIpqgRoZmbtp/OhTJmovVzERG5QSWVbfiwGotcYw6pyziEDoei1eHSmZOcGuO6lfmJnlTEQiNloLAcCV3Zjgri/b9cf3H13L6bR888cf/8UXbz95WP/s7frpw7LvVR1aiSQzCU34TDf/+ad0RcF/sAYTmTNL9yEbRwmotYwa0IO7R5R3Yph+mZlVFZEhZYLUiRmmmGi19W44BrC2u6ObN0zQHgS/s2fu7brJSJg5xfo13j84ZXzdsHJnJiIium1SiH9bhrCBn3KTsOFdjLCiHDM0LPawRXEi6z3Z7t2bOkRq4CyOlc3KzN3NKlEOmz6UpcPCVGoViIB7JE92DhByYgYJCI4IbPt0KtQ7LlqZDfu4czjQu7ZshogQJWa476oa3gszcwuDtLOXxCD3CJazJ5JaRXX3KXsrQrXWrRgACeDQyN83u9fdlYE6RyLd0DqC0nxOuG36d/cY5hauv0UBSLMhWhKApmioOxn1QRTkIbvDsQnt2Sgv0ZgG4aoxKsDUAfWcUs7UawJFRFLilFSVmNjZo0LYht3Apaj7HoYOYopD764REYBtckg6fVrV3RSmYXe3EilmVG3zP6h7HdynQcZo75HvDQcgpQygmAbRLrJGaUmywytum6kgZpGc89rH1gcyLTGzg60PZYvDtY642PmHmZO6R6XrJHGOy5tpchOJ6XhqTS0RETNJF9h3TDszC1qNk3oPZfX7E8hGdy5NvhP1nkmfyoRiOXNH0J+6/FbAAbjL2k0PjiFEbi2Gm9g2EQlDhKwa0CYZzZ8aIm9e8BHq7qtiEDnCJbszULRPELkTL/PGAkgpLctCanvpOY1ejelMI53R0GU6foMRXBWKlKMKi2iy7Vz59YbMphWNXMStJefugcTZsvevkDDGWcxgG/NNxhvmI0MkNqc3DDWgqnPl3qQejl8N4F6m6ybu7gS3mwP12/v3H1Q1SLGT61g/XLpH4VEfHj36Ik0cuUe5D7WZQ8bMg3OjdDGyYoOinGme+Kpq3Ox4NsPp9PDwF59//bsftm17eeZt2zjhctlenq+XyxaDFZiZhXq/ZfNXRYQpoRXa9cR4l5Odc5tMSCn51K1xrO32KA+GfeWboRnBNxWn7ayn8tT+8cbrr+/Mr5BUbkliSN/BIJMd0F2wkDZRLR+bHjeK+whIe/jkcBfbNxJu4+5jwW0ld4/c5Ri1sttWUDroFb0Xgnv/JKZ03AhD/9STApjHUdDwurV12cmxzG5IjRDAfBN3p5TCKDxKWCPsWFqmehw3op/wlsfb2gjeAAbbu/tWQ5bcCjr8ZmbgEC/3++bzZIgenmCWMOEmueQ99xuD0wYjtdIy8GTs3tDn68u7KYhOovyq0iS2jxs3DujvQwBG1nq+SR+9dTiir/d/3xuK5qHFmEHH3N1wBw8Pn0aUHNQDbe190TBNQMMxAHmPpDaNxMPuTxEeMkU1GENref743bff/+Grr58/Pmkp2+X67t07Zjw8PLjaV3/48PHj04eX67ZtVXXT/Vqj+gRqESRQrb6XvXV9axSLFfSok3sTJIC51yOoxE4ELdFp6QAsAFThgOecSUjd1eCqWr1UY+aUExHFXFYKY8CdYYmJBWSoV73sF6slvZRr/cO3v/3jF5+++Rc/e/vPf/YZsIHosm8Pn7yNesqZQuhPUcl/+as7eG1W1J3FYiMNGNhX4fxRG07fqkQBa0l+ayqscWu3LnhC+mBmkk5+3b4KigQZqOFf9w1j+DEsdIQXR9PuwUoi6JUjMbUBU6xqXPipgKPITd4FXTXM3MpzPYII6F6KIhxCZpaOoDggFnSYbr2ulZlD7pFw/K9NFesFjdOtjy8Y9aV+XDrvbFjG7k7kImRtJrKP0hc0P61F1mdxY2ZDoYaqi00cEFXjGpEqrQ5ouKB9c0UEOa9jWnGXqxZeGBB6s5oZpEWXVa/ePIqSczarpew551CmBIroATkc5mrubdJlmLbN67g9pEYZjWqPEuRWDWhOjpAxCM3aGxIO5SRMYxSeIrwAh7o1GzeUYGCvA4hJXej63NQDLYeYzd1dk5tMlcDqpqbetX0QSUg6ERHOwlnIwdQzlh5iOnKPcYJDVQyTNUIRg3y7EQB3Y87cC0hKKeu6jnBIcE4SjJ/HobsRQRITNafdzTQlzZQSZ+ZUSzVzIOZ3HxAj7t7jyE7cnMN2LmARuWzqXmqty5IHSzJzqLaUxwiyGs+i1uc4dYcnPEQ/xgnY4Hxmjsa52G4C+RiHOpG9u+G2kGkWFkFCdutf+W0P8etrmFzz3YjIcES+53/5VYS702Evvo/9vV0YejR6xCYw7/C0/sEXg7yHgTiKjcNpjDDEKCEbbyul1Fyh5vUYYulVSynX63UsBkEe8EjCkid3BPB0Vy8GtSZA5/oKcyOTaV5cJ4cbx4mI+FaTjqfuUpHcR5/ArdtAdrczd2Jt3qJxw/mv4+dq2lGW6OazByEF9jqku4gAWIhzIqKW/jaLnqsgW+5lCxPljIf03hUKgDsuT+uxVLeUUsiY/nZnhwHU581HUeJosI2vph5wdKYG6B+2VPKc87LEPK68pjd/9Vd/eb1u5wdKKRXdZg4ajx9BQ3eP2UZEHEHTeI+IEAtra4ob5uk47ilvPYdsj1LASaNz7+pCtH1FOmfQxkzn1JTusc5xf+pW73CTxoHKhPuFiYtFuNuaXUdA3SnnbAo/2kskbjia48e3ExGIhHpJHrqjGF6ZtKBnt6y6x+j+p2Eub+iWrNkYYwd6ajmejWa+jo8PYTU98iG7+o61Pw36nGPt3pVamgJPmDz8LHJ7/7ZuC4ARv9lqdzcGY0IZ7Q3Sw3cdEKPtpJgRExCmPTEzGe7xa3jqfn8focNWBcogIu0F9IOfR3/6VIIxQl2zxPAemBiBh8HR2svhZuE/nn3WKUQknHSAnE1u/hHubEkFoh4cvWOfcY2SUe5wMsNznpmlbbgZchInM4toa7MezAStNM4IxOQKEoc7tbniTYgg2rnNwQ6v9XJlF1j54bsff/vl7/7ul3/77ocft5eLqlrVy+US5s3Ly0uWL1R1r2UrxQlGrEbF4O7azf1SSnS+hOQC0JbUqgS7A+/m3tHp2LkbRehZmQhWqlUzOz+cuSEKuTvVHuipVYnI9Sgri539uF0WSSwAorffxay8v8omLxfT3dzptGBZpVjdql7LzlqqqreoJcPFIY6OHfyP7hpthN1l6mJhXARRbAAMzjDACOZuDHdXGoEKO5xwP/p7O8nF/SV1Kdf5YnQUd80wyQ2K3rGby8mtg1QFJ06GRDeebww//GkDIPrsOiEdDfDQI24YkqfpcWqFY7PEAJDULcaqHg1dudel3CYuQpZZLeMuY1lhnLXX1RzHsNSipRd/vrJFOlerFtVW9OJEJBj5gWHJtZWEDup9FJFeGyJj/Cn+ZeYYOxEWZLTjqypzJghB4HA7+jf66Cr3Ca9FtURVjLnWuhMyyEYg3MyAPNA+iNxU3QzkDWwexiBhjgxJIOspM3mk5ClLmINe3EeIC4A1VIbYBO01913Htn7/xgmNaJoVIRhzVAhjyAH1kzKDESgd2KQjQhYCWuG1eymqWqh5p20n46w5uqSVssADlIt7QaUA2MreiaS3FYESsUgrmnJ3bXhFQVqZ+oitrsUPUcjMPBXsJSIztGGrU0HXyBKYgRnCmcG77/tWmXnJnhMTEYyirauHVRr2pLXe3G6KtJjQQavM7N4SvbHqBnBMpGqRZKDeUBd06OVgtvHvzNRDX3Z2wMT2Ns5oMmXU/chumR2KeTbQZyY/JM5Pqd5GQ9Ms9bvX56WOu8322WCWWLP3mBv16/YON/BfZD6sq2H5uXv4hPPhEt34wD61MoZDOGiAp2p7mTKKMjk2sVkKiuTVGAaR2mqlVca14SG9po6nGHnrFUzR3TFsdO91FvNjjq0LmF6A3NWONkhrmYRuQxARKEZpz0d/rxLGEU/7f3z14W0RCOO8uusyxTD76x6vs7dAHrxVxYeOcHc6SpdlCA2goWc1aunePhFZHcR5E2hgSsB+VGo1xueAOkeD29JazVyKaimFKU65PUV1Y8eaxaoBBjKQV90/Pj1/KM/n83o6ndazMcNqFaFlTcywTgBDtR07adY6uBANAsRM0JtizkF7tdbXI2FmypxPynuqJI57tAMxs7cBNjdsYmZEEt7pYOcgqnDwBi+Mj4zG+JneboImcYpHDOL4NBGj13TYcFFuCRgYPtetjWWT9SPNCUOz3alZ2sNLjJe6wTR2Dw61VmGEkVH3QFduYe8ICaOJ4jkK3G/SwjRju8bTDSlEdzs2bgVMh+fu7iMTeP/H2IW7GE/4TnDxm/cjmoqDJNBkWmyFd3NrruFs59XWfXN/6pbo2Lo4xaia4Z4rG+Zg6/JNTGEDDTAM6w6/H2FQdMPXJ6us75XPAQ7uJaMzU4wVMnOdSnAxOeR222Ig0TtFVOxo55n1RQANzPtvZuROXcOOXWv/diKkTkLebB3zhn/LBCOR1hhS1YgAZ2diB3EL++/g5FRxvV6//v03v/hPf/Pll7/5+vffkLZCswhPl1Iul8vlcokFBB4Vp0xJTFkrbRPoWilVd3VzEmdZImEAILp0AqWrlNKBnCjQ8p2jvrVzOjkFkkE4HhoVREgpe2ANOtzYYSDSw9FuG1h2q1QkERGZqQgxG67bY/4cmy1P29vn7fN8WpcTjBT+vF0X3zziZZSEEkPaqMYQ+/9I3cK4XmHMjGT+AJWBA25uBK9uiQnoodvpyYYj9xNPGxVl3SmP7U64wVag0QUzjYzHYZIdHOfTSGrpKOLeLZ9hYOCVAWADAfEW06F9ew+sja8eVuLIzRyyDeiTpsysVTBifKY/rFbTABz3HlUd7qZPoBphBsy4BWPTiEhYbALDGf5b57G97Tt7SjlqFM0sEZgzegoIPcwZQiol27Zt1gpx55xzvE7TDlJ3mmdRZWYESZJiVGC/iRCRSBbJwHM7fDNVFWkBHhGJPs6mV9hjIrHuewtuUSIy5rwI5Zy17tY7rIQoSl9nqzocI3O3vqvNiPJbMd061cmjdrSVEKCZUdMmxL+OPXaHW0HXzfFTKCd36ph+nFpklHNiB01BaHfXUBvcaLSabtsWtmOQ47ZtgfQjOR3cOZ21u5rVqPH3bvHYLYjieDH1/Ukp1bJHHXbOuaoyE8nRhsRTfVHfhKbp+aj8jK1mN1hVJ5Z2hxvb4k9dRAQPmIEGEjh/xKYwattJaQ/fWdoHQ6nRoHwzi1HaTh7lDO5Tr/NtFq5n1yeZYm2M260WbwHpmfLnn19fsyiZP3LnEP7k2+Ytsr68xm50tMbJ7TfPq+LuCLXFT5UYd/7tCBvjCGArurkcZeHUCZ2ZE7ciCDJ0oZGWZaEXoj46Lcb0NVkRcfe2nx7t4GbmNQqVhykW9Ud3vm7I2fs07MxxU8ySRyCp/7GHe4DwBsexzobRHdX95H7e6AwCpgxS+yAdFDI9BTXHoL8ygnCxdcICPVyR8S5M+I2gYwHuHoItYnPBsEH5M+cysZkNPDQGIca5TkM4MMKcnHtEKeKylcUcZS9FdQP0cnl+/8Mz2TnnTCKl7LVegUzkrTOwu4LU0zXMUdneCtI8yoeIHExHh/nN/psZTS7ucQoe6bIjBhe+pvcYB4J4HIyo7h6n3G5uHSYNzvEV43U+xMoRJblb2CCD+HXf9dhnPoSJmc324h0V4TYQEw5MkM3Qnu11myLfQb/ewKwjrNpmAPEE4/kqokF9+uJ0qxbx0v6GmWaIMMyPvuYbq2P+Ex2Fvodvfxzc7UcGSczPCIBGVnZsON34zH0nD28uVhtzAr2HkAZVUHfeZtHHwxkGZpCLvnVtWw45H72s8dfoiS0HlEN78KkJnCItOW7YzXq/CWHc9CPdyflZFLf3RwTBu9vVv9cPbKAjP0ndVGXmljJtIYbYsJEvb68c+0lExDS1QrRNGFZyKdZNI7K4i3W6GMHJgduUegCuBUpABNVSiu22nmS/lq9++9V/+G//+//x//3Xl5ctUcAlLDmvYTJWV86nh3y6fryq6l62Uk1xBbGZlXpIGJBZbWVVTGI9Kusek+1NtRE7RAhCElQmkZcT8lKr1ureqreSSErp5flFJC2SclqQUvFi1c1AKYYLsQOBaBITFkmSqpKLMDlgJMRkZi8KdZPny9uPWc70Rh4psYlbQBPCYzFMC7mQp6Gg/hGnCoEeQJlgGDjIopfUxQ8tWDWCoePD40fJuYksa8mi4IFwiDBOeYgLFyYzdNC1yGzDaiXgaD1oHOSuWmdvCyMbPEC/JkgtRIL9FmlphPKHQzhrTJk6xseoqvbtdIREB081EK3aq1i4eykc49Wm0RaR3aYpARJvjhyFqvZBfOJT6QWNmL3fuOwpJbUynl9EeluXDeNetaSUW9pLhLnpB9yKe3QLUif40yHlhwMdAH2CB3dHbxgbu6PViYjlxmNEzwB4dXclDj3k7p4XuV6NiCRRraWpcCBRMyWmFSIRhzxzdiHWSECbRxjLJ70CgKZsXtuQHk9qkeMWLLyR12Oy9p3stiP5YRTDVSeDfnpntPezuxetLV6bAlDHpbcUMnN1g9leS4QGLvslbhhgpJGJrbU+SEC2gOhm4jyiPpvi6TA/fvCATNBJ0m5iETIJPkkpYZrKFSQhaaDxSkpL54rgblmWkzc9Sr0kNhk89EFsVNyQBXZMh4stbQ71fWqrX+jaPToS3VWEe6O/9Xqam8+iu8fjDuOek3xxZmHmn+z9o6Nfl6hHQ/kwLsmiO+pWfd5R2nzp1Ks2L2kmqkMN37ocry4TpGik5duAAm5F782q9Bi2Nh7kJ9vYh0gZr4RfPUQB9bD0cA7Drwl8KevIruYULc7N9uqGKXUrMMIUjKDDZqZ4x0WIlYzCirFF4VPMyxuP7z3y0jbTG7juIKWp+fTGSB33H5t/dxA0+Vf38mQ60yZSCMNMnGVgFyyIGfX9DRwBi7bVOBzL+Ysittbltva7mRo4EnOtsQJd/Fo8IzMnSabWnY0e32EhiERrW+tOuElKEzFgWksShlutV5CdH1a3y49WvW5mdTklYs2LwInFU6KKtkgzo2MQIgHHTG0iImJ3U1Xu4SpmHs/VyG/qwRgLG/s8s7bf2sTUYWNrrZJy3//233GHcODuzn0o1jummF2dQQZER6lSP+FmKw/6HGQwHhOd9LvpdKgMdtg8Of1uGeFxoyu10GOKAXRMROp2t1extsTSmgm7x9jML7qBdemC+oC1w61cmm87ToGnjJZPe914bv618eErkTt6qifGdIw6WWLpYwytvdTWcaNKoG6tcySyP/G8NnUc+eSweb/V2N6JitrrSz6SfqoGhzXhcxhgY8Gvhh+GR0Y9LjzvHiaxMEuJedvZu0vXH3Am+1vSuNEj44BCcaNL9fEtw5Y7DqV/5E6ntNPv9k+tdcwOAhOz3LiqJOFatzLIEbwzs6iV3P3pw/d//3f/8Kuf/+Lnf/PzP377/cP6gCWVrZrS5WXfaimlbGUPgyQBtRZVLaXualrNQI4enm6Ykw5nAQlzNbH2gKWFvwkApbQGFhpRoFu5G5mDvUKraQGiygMilIgTSWtddTiYvBd5mRKByAjkLUXjYHKj7Vp0wbIsqs5u4MWc1IlAH7ftm++/r/50epDHNwslySuLihnICGC4AJmQpiP9R+UT3np0wI0unS4e3BCDTlpxBLvVztOIviwA7i5D370SL8d3zc041qGKIkYTOfkm8EdRYbsiST5uMoBnzCylNNrlBtfQzcS+9s6ZN2f1gRZgOgzC2SOz3tEzPwURpTC1460NeaWP/vTuqjFzlNeqauDtxk0DXxho9nri/kEMYaiDb7syOwS3mVXsqmt3gqtqG8EXn621RgDMzHLOqq59ONtQjcuyePeSo/8wnmVdV3d36CjGjdueTqcJRpkBjjbO2BHhlCQN1zncjyGnYn/ci7chdRWUqRuFJBzvD1dTQvVoK0dkZvcIXeHuETApjCF8+TZmwL0p3OkmPDD0XEy5uHvd3c3Vm509IDRlpqT+7a1qaN+31kfemg1eRSYctVanlrPNYOntW2Op3Gv2VLUBfnUbPacbdopwQMzzcHciySKSWo9sSunyssXuhUdaax3ttqkRugLgbtNwDs5cVIv1uk1JEitpe8tJREjHsx8KiZm0jkTlzVKHvuEpOz8+uCyy77Ubmq0mthTLrTC9sxy7iDjR8UqfZkZMIqJegSMxOzP/aL7tS+XhF/FNyWv7SIfImVzE/ymHcJY+6DYEJjV/3PwV+mVf600Q986Iv7u4A4hOdHgf6Bk/DCrCJIKZmQhJZCAJD2OoOT/BZKUWNSfetm2v2jxDsDuY+8zH4JcolWSmtl3Wp+cNYL2bPRxRt+mkGmuPTbgzho4fAOZjM4mmzvVbHh8i6I4ahw7wySd5fd0JYZ36k2eNcqNp+iBBAN6lMYBF1kP3tHK3w/e+pdjWzk1EpMqAKquq8eiUcABMMRC1rTSUEciIpa+q+UvWsawd2vNFJk4iAie14m7rmq2ukhjE+1au16KqTlZqLWWzVkI5DmgItyqSg+WJiEWIPAJDtwzoQ6HGtsyapRE83QD3U/dAYlqMT8DIQYR0I0mOQE/E14jvz3Qor5m55rPj2+Zb5jzej46I6dGjGw/Ltx4+JViAjgcQKzWjWVKUUgz7P3aF+Zb2ete0iOC2RLM9owN0VE6OnY3Oe/TE19jAmC2EXtrTKC2J9olTnfiPm/U9aQtt5ooIbksu25unSoqZTw8Lq+9el2g34ebjGu+cmvRAtO8FE9e7tyT0kNXu7vUINBwxsDtvEMCk7g/Un31v0oObR9p+7nEZ6UOkQyRmEkyHheHqH2RzlLqgS+CRj52pTobIn2IifOQr7oXVzEfoki1Lmnd4bOQY3xK/tliMmdNRGn3cNngWbmbmqtXRAotTvI/6I4Tk6aG4NmnQPApBddPffvmbf/fv/t2vfv7Ll6fLKZ1yzpfna85LuV6v1716VD/xVlSvO/armbXR5u4aWPRpLUXdq7mREwUCIgFGknPIMW+wSmEBtSqzOClVO7qWUimltIZtMlMrgLs/PDxYVXcyg6uagR3CiWgnanlm6W4wnMPu5d6LaECUzqZlZSHd8eOH92r02c8el/Nnkvl0OlHRUraqox+cGXJH9/+YfMK7675qFEMFtwXThCtzBEN9UqwArDuHNEg0ZqHHKKl424w3o2FwOKjBkLqZq8m6QotNUBfMLMSQBoJFvSRea51r7nyq0L6rSvOpA7DN+Zysi/Egs2oGELP0Sik+mUnjwdP18sHdhV3d1E2dDLV6lUWuupuQ5JWIrM+BOMFLKQwkgsAzOydh0iRu2ACOkiA3t7Ib6SILAHMFLEq7g71LKQE2sO+VSETWdeV931OtGUwGVqLKtrlF94syk3Bu+VPznSmlbC/X6u4BdgxhZ6IkKSG6eFXVCcv5RIzqdV3WfXsutaSUwOzukkEitRS14mB2aAdfERF2rKeslrWFVK2Ufd/3WjdmyVlq3a7bc8rJYWr7elrLjsS+73WvdVmSCawaczKre6nMUtVNGcJVsaTFtAk4Ya5mpewx4lzkXGs1tEElpaoZE695FMvBiT3G5jiTuwada1MkCEql4tQhAywQnwXErNAKzUtyF1d1cmPf7JpsNSdXlLJf95ecw1vbzYyJuvWW3B0kxKZilJBzCotKVVFJUjIzEFX1su9oGI8nkFhtoQQwHFZBxgnLguslyaJ1Ny3Z0ykJE+/Xi7Msy6otIqCn00lVyTkt2dVAklNyd91UVRPE1ZjImc1IRKKO18gDALWouxPYwA4YsXvM9alO5onEi1C1c8IVClMRB7naDlhaF7pIWh7ZTbBIBWpdqqLuWmtRA5EiVa8LJyISSg8LFSJNmmVJcFFkSysJWS2oLEVpVzZTcqdTfvi4PVtuWkLVRYSRS9lF1goiTioSsB7ivjo9El1TSDUnMhLRDizAEiUNBLi5iTcBsVfrvhW3wjYiCpT4JmLCuoroWcfROlT1gLI4StdGEtWdoJRlgbkkMa3lcnl8fERKum1MDfGIiFJaaq3Fnc4LGDtMmVQEVotWJ+RwqlVZaFnytgNF88K11jdv3qSUtu1aa80p0QFxDHe1sp/y8ryXh/Xk7m6oZHk91W2/7rWYV3MSprzA4G22W8rcZtUwLGBFQ2MwkVvx6oEAFlI14njCnthdybsD3zwWs25COTOJsMPVAsxWdmvpl5YcbK3FGLG8jnhAbu5wXhczKxo2DEs3a6KHl4g4jp6b41SVmZklDa3mALxZw84A3UyTIrMGLk1ERE16wJEYEiqLSJHaGBY4lIXRKhRuElMRnovCfmGJIaVy+kgwplPC6oVNhbOjXtyva8rkJ2G+bIXYKy4pu+O8rI/VnClft5Ly2R2RAgwlmlJKKaJXyJzAUhyLLInOD/kkhvry8jbjsj+rbivnN2c8X3ardFrfEpaqL2ovjp3II0JkZoARVe2Pw958NhGpRUmYp1o+c99qaUdM6P4Pc9RfMMDiZlXVyhGkMAenTIA2W9nNUR2JTfrgGXMXFhZmpuSRokTnl4aC5n7j3gf/xhincFP3fZ8zmTlnD0ryI1pHEHcnhvdqiIPHtVBKJOJlq1oIrYitTciNsGkSp9bVb7R6ZCE6cACPcnqCsCA8MdWIlDOvoaWO7CITiEEC85EePGBmSlm4Tc/zUtQNzFQj9gwnZhGH9+AjhYNhZu6HYdQDrzcFC118xVI8oLzbK+YsEm0i1msNmJiIXY8YSsS+m3fXa0FGZCwkZ1puHc7mVBNgNkY8BpQRXKHc40itIdMPNDiA0NwnYEArEyRld68xdYkEhFo1pRwVK4mEUhIzjjhFvTYPP34NudOAwYFwLsw4KgVFdoe5b3vjPoDUwCK11otVGFqVfmIIUW49VKrKfvQtN/JTZ0nS51gYHEIdI5rQB1IC0coxMoet6yJ+zd3wBTB0k8NrLSOeIkSBYOfmVpyZkYSSwFFVkzAy61YE4lpiRkd9er5+fBLJ33/3/Nf/4b//u1/8vJT69tM3Br5q0YWc3Qi+Mleqtda9oKrXuldxTm4wuIEoiRGuWiwqjWw38pwCPtK2UphKkAGRSkJMCKuszHA1dl5SInKrGvjS+6bn09mZL5fLvheRDJFaSGvJpzXySNUrFjhot7K6SJ9undZk5vu+AQDJ+SGb2fX6EsGImH2yvDxR8r0qnT6pcv7Vr573p9NfvF1qyu62J/tQr198kT/s7z+nSuiQndPVSBj/xa4O0ThHedyho140whxoKVQ2d5Czk8bItYgMgGHk3tT0+CChqmp0wri3FpIoDI6AdHxbjP0UkEPNNfxkVfVaiVyIbb92XQNJLUBTVXtFPXVBYnBzQtFKTNz6Uo4wN/VuQ7wKMc9t5NzGnqsLdm3zAiiJqW61bLWMW5m1GKWZlVoTT9COhqMiZb4wtJqNeab3zjRRW/38CrqHOoJhcxwZPUEUf4oIjVD7iPQBg7gFDqEpPWJmARUTWSZM3Y+xZukYPkOBjhKv8WhxjfwD3V7bvpU9UogHxolIih9GMHiUFI6wJY9Y2quYYkiEgDdYemq4kfKrcrj5T2MT7l/8qexB24F0V2/WkKzODycRcSNzRQiIXUspbJpzlkSURFx6GUMKTx7WTipG0Koqp9hSpNTFOSXJtO+7SFpXSimpupnFpMHzuozzJWmEuO97jFstpdSyQVUSLUncnQVETgdMBdwJyUZGa96KeHCbpIP3lIi1DpykHnO0a0/SgKLgpyX7iZkTpZQSmzfHaKrPNDPvIy4OQdOCSebesk+t5dW0AoskZmZkr0VVRx/aQYR02FIDVvt1Qo87untvO+mhpiOm7F0638TmaYS1gMGk4/4zP2KSMgGqMlhvvHPm7vnfHjmOgs/OqjQxPN0zwh2tMocRIimliMxZh42JLzUzpiiFPzLnr+8zzt3MYo5cw3kzjJrzWA4RdUHfIvdyw1zxr4xY4yQ3GCAhVtSIEEag0Qge4/gkqjK8wULC3Z37KQAYMeD4zWzkndqxRpJ8N/NDC8uUyxxiNraOYs9NGR33JUQ6jEdtpN1u/1AJQ0zNl5nVUIEBcXwrmsahUO+Gcj+qEOOdIkJOblGDoehNv6qdtjsmLXDDsD5mPELdG44FupzvDiEJqO5GgLOztIqVMUwcgKru7aLr9bpt28ObPssrwmpNHZC1AQRTi6MhUODuqJcmH4OmQOy4ZnYYOsunaO4QXPxT4BzDA7nTUK+JfF7PULI2pdDlgMDF7SKta7pXCXxvff8Ac2rrVC3BLQCIONKE0QYZ5Y7+SjFxzojJVaPlgRkiXvZBnH2XhFigZS4WiKK/EEMD3ZSYo/kCIvDaqgtubYO79Ox8dkQ3smKcxdjn+XVMPDjv+Sz6xp1H0mw+qZEQxlTBG6LGveWCmiyaWK9bnLPLeqTX2p+8+1g9i0tTgQZ6CvFoSehIShhxqE4ErbVdp0pUgIjMnZvTK9SxA8bm2ISQ0Rt625lWO2pKebphNOyEaJqspmNC/eyr/+S/8UPtSfv5rxilzmj5EOsvEuchW9DAF5xUZT3Z9copgURfXi6Xy77Xpw8//vV/+vXvfvcVgD/7sz9jlh/fP5ddU1pK9AVqA5OoDV/PoqzbpE3OcRgZhSFNgKTERHBTAhETp17jAICcLCKJYXcYmZmp+yKJhIkE5hyBIEfjVSKYO6kp8V7mJFI8b0CP7loBWolihx0MVKID4GMgw1/qJZ8EraexAnYt5bJtzy8mVJTNnVJKeVla9eM/xtn0/8suop+uJj3kwPSrjLx9BFwjXNLFKXVuijsYgVNKzH7jubm7z5Ud5t1YVRtj53BrGkXl42A3vJYDN2H6G70w/uUOxolb42389e6riShF4sV6eVWtu+oye01mFViY2UNF2dHhjUnHvN7WULB21PEHz7e3hf8mIjnnGBSObtGG7S4dLzS2dbbyhziYz3h4iQB0BppvotleP3z8Ovur8/3j9XoNuEI07EN3TIYyMwun6G3TYvteewPgEY6Nruv5nsxhxx21rN7HCwyx2KqVZiJ+XSnRL3bYqEzDHK4Ztq8DrSklDiJKW/d9N6+JmKhVZLG5iCQsRF6tll23Qokx7Q8H9LyZNQwIcFSURXa97VtHGxPJRDbKkgNO1swkp9PplLNE9amImM2jLHsJaC8xJTo0sQNWlI9OM+boCTUnmSpnmqGM8UrgLo/7iAjQHUtvypiZpFUoVXOnDr/RL3OYeXU0Ku3WgBPFuIdjlrd1VcrMAeg+VO88lHxICmYOsOoRViAieRWTm49+Zr2fvI5Na4nom1nwg3de321EGcby7lZ7xzL+qttk/Onus5jcifFDe2f3yaOL9e5W7k5Mwc7MLFMQJ/46AEu9m+x5zaEFQy8SURJZAs9j6godcowoobtkAHXwd45So/6M0ycgbWbdAQRkRKQNSMICoqJBCrnNQ9emH4yZHa2CoklwCeNbAQeIHUQ6xr23dB2ZkwcaSvSMGMjdyMPhIUxlMH28+I0kOXjk1l3XKPIEyMEOoSaQh1Jxp2DYIdlmsTztJ5mZqxodBX53wQ7v9YfjV1U70ndODo26CVU1r+4ZAUzJ7s7WMdxyzsuyRMriiHI2bdLcxVovY8oucxKJIYdQVRAxCYFjpM/w2Gf6nBf5mr/GUXYpf3xwbMhQTDwFfQebzPw1PjLzUVT0zYEw69VEmK7xFarae8ToUHPHMtCE0/RQIYKYGQHfbyXIRNA+4K1EisIhmBd5rLOaR46oNyW6xkTyn/JpzcLXOTCVejAGpof8iw1o/hINryb+2PZcRyfkTfX7fHa4FXR3b5hZA4OkewDmUDrTTYa/19bmbRigTXX1xwJeOZ+jEREN3+h+nQf5tf/N/eyOicaO75qhcW/aetFWeBvIiCq12bON//Rk7414jzvzYXMdsBYdut+JekyRaNzHTMOIGtRo06yvcedDBvaPH9wxbc68t8zHKGwQwc3dWQScxqzwdk7mDiWJ/j1CKS9Pz4ssyvWbb7758svfvjxfc1oBen66PD09uRFYrtvWvNmqtVYtpZo6XJgAIycIQV0dZurEA/0VHhkpByCcyXWiLvHuoUU6h0EaU56D6AiEQ2AOYWtuHrh4A6OmfZcbk5nte4nEDjOregsaMDFxSg1uI267lSskIRmq08XF6+WlfHi6Pqx6PgGJ1BA2OphQy086hP8F04P//1xEc4H2oY594ouWp4+KKj8glKa73N4wfuBmMbNr2PqH90UHvgAR68Tg8UOrv+rwGYNPZwk2y/+Zke9enBl2rHDmmqFlhhRNYyi8qrru6Pgx7Qadve9ACzu0SX+XmaqSgCKwOvI5kB5596G2MTmv0w27YzbFMbv8utmy+ZnNLOZvDP06Xh96d37/LIlmKdbYbAL5GZ9lTsyJ2agVa4UtS6VUswYNIpyYtHrZ933JZ7vNAY0jFDmGtA6kkBY56KzOE9S+dw156IM5Quk35y0g7Rt1BECYWl1ZS8gxc44hDvu+B40DMMECoOXKxUDV1B1qHqDIW5diABIFbGMjAKakTrXaQOwQSS2w6uxtKhE5mCQz+77VolVVpVrQn1lW8+CZtGRmpBRBR1NVeDVvTdkRogqdQ+RHLB/MTKJwP0Cux0ZZH649jqNzgplZGNoTF3mPXfQGd7NWH0VG5iLNAQCQGEhcKwmDzIWFiVNKeUkMTYnVRW5Nh8GKS15yVREqNSi2opPfZOIc0KxDTgGgjk/92sqJEoj4wyC+EHyN+aFoRkU3cQJH5BbUmEF6hJGaVR03a8UEtyZsXzlFDq41oLp5H4YeXDZatOOM1IytDTJtp2Pk7GYmnAYmzUz8OSciuCP6lQenj+D7bNa4u4i46UiiUjOUf8KLZuoRmpbzaJ8AImsStEGBTe3u5lBzl2Q4jC1vOCBkZgYCOxjM7NYN96OxYRhAsSERQAlsgKBhV3WKql+0HnifWBsIg6D1OsZIXefUE58t83AYUk1zWD9uIooZ6M0NaJ5zfIc1WHNqlh1mITk2c5xmr2k8+pNDiXg6tnpmQDMDbpQcjaiKO/lkzrphAj0KyzUcQodmPgdBem/PC5nQ/zUiSiktSwp02ecLStESrYXkIjFRlz15MR062D3oRMZgYnTZe7fm4SGMpxv66zWNTRTYLe87vrsNTY47j++Nxz/obeozn3Xr+BNRmw4yvnRemlkEEqfnGhGLVg0FQKLPhaKwEyDrvfAtYIERd2j3j+HMISU8/F54RBZShxvtdO+qMZuqPezsjzjUnXSE5M1iHLk70o1cJYr5URhp6mGpTdEHGo85n+PN49MBLjUocCgIAoINQTdpt/meTS12GyN6zmcS6o7lTzVYThbqcfoM6tIDMyrMrSHYjJn+c3Rq8DQPs9lIATLU7Z+mIvmmMbvdftolJrJbz5mIEshfRaiH4TvvBnVQuiZeqSMPE9GkEcaD3Dz+5Igen4o7939HHI2I2ISYTJ0o+gMr4GCAHN7qtRkJ2y4i2OvT+w+6a3rMzx9ffvvl73784T2zpIR3P3549/HJFJKX56cXVQ+hHu1OFvFu5oW5etA3jFxrVUNEmRt1gxhO3GrKklcABg1BgzaR0AHIaFZv7MXUSl8JsERMqSNBqBOLmdUJViB2yYTdvQQOAgoJA0iUCIToFqGUc3YmM6tuC0t1Q60GVEUGf7xs33//8ZPHz9c1E6rB4ckIjYGn65+oH4gWve0uU7wSJtFNCY/fGF1hyg5kcoO1JpnbmHWtTZ4wY4x/A4WfAgB0iG5imNb543HYM7/glpVqvYERHtxxV2Y4dNDkfB6qCh374CCb7i6loUolETgfKDLRy4jSFEjsktqYa9FvJ0A1Q61VKFHzSVpTDSaZMr4VAPdZLmgeFwd1iQh5JFvpRgpk9novj4wQ6iGiHQMGM75xWJnjdNsZULTaxjiaHgswGkZAjCgkkBuMwiLPEshdTmggvCBYHwGPJmxJ6KgjVTMrpsttaLnthnDoPYdWC1wpmjdKVQca22vbYsjxcfZN5TsG1mj7UvPwr8JOkv41bl6rMrciRNU9xikDbE7bvleVJDF4IwKE7t7S5WquYasauXv2GovR2rQjyIhTK5Ib2QBvZ1FLK6cMCBam1DQTnKK8bDwsE0lUK3WFza081cwCgZMm9RNVu0HoZiNT1/Cv40XV4k6gVohYyh4tKG1QjLkQRyqIiNhRA2wndKqg+RIcYyadyHsQNEpEmBIRUWahlESklm1NC4c75Ict6LhBLJ2zZNyqGr0/V5TLkqkS3CzHwwqxztp7JvIbc+dQyd5Tc9xLfdwPCIS5X/mnKyoGUd1SoE9XrB+AqxE164Z7+Xc8e6QyVEeIgeH3URszSymL8BzYHjwOdJDynuBAWJjmAXQwGw2qapG+rmoMczf3UuodV8X+cDfIep5/SPtRciwkUShhTjB4RHnNLeDLDNUdBKNmEHJP3XQn3wMv7sj7hgpQNSGP9Daj1aKTO5G27h5oMFHreSCOjn60zWmxb5FEh1EVfxpndFMq3wjMmvkx7hxXq8AHpaBZNIHsN8iNwDQ5ox/fXRUlCSeIMEZCLIbpjXKAFqQzb5mEUR/OzEKshLGN41jZod3hb5O8bnVngFKQtJH0QTy1VhEwCxEHsDrcrE2Rye6GY1LfgeUzjmk8OKYAyvhTfKpZbDNH3DYjzExkZtLdvOHxziq80cwU/VEtM/fFNQdAZ62hqjPoy3EuOOYGh7bq7iKPb4z1taeT5HvpBHUjvIIk281tSA5K6wojVVUvsasiiVICit+Ga83M3FqAcpSHjAkZNw0XBz+yZ0C5mXXBLMArweWzNsS97Hr9a4v9j+16VdDb3jAx1Xyg426DVuh2ZslYDHcwCb/NEKIesavj1Q4e77epMyKaq8CJxpzGjr7TX2kbyxyNEcedh3eXjq7j2csNQd01+2F1tB/M1Q9oaIrQ0fSw1mYl3DTj9CpXeO9191uTZjzgzAtEHUrn9RmbjwoyMxsxD1OlyD0nAeDDrGp0g+vzS91q4vzHP3z3H/+H//h3v/z773+4MDNItq1odZLkhutemNmVohKqVqWYUyrigNdq5uakWltLCNOalurmbgJKOeecU2Zm5ig6NVQjDYFMAMjdA+0DaGVEgXay5DwEswwjFg2LpE0mxOhGhSqLSOimojUg3kWkNAkjy9LaPkPmSMrqxeEMAom7PD/X3//hh7/42acPjznLYq6GpOrQguXVSKV/shf9VOFoo735FeZoqx2pWvL7cow/cR1okYdJRu5+2HsyCrynsFRIwG3bBr+M9QzexG2SkG4DWK+1ISa2mt+Grme7ecMpvDLzGn9I/SoWJm8DzwWDzM0MXY7BeWQqIrIqr1YJYET63QkoA4ARzQ6OAjACnJmFE/wAaaQes0ws7oUYY7QRhJmIOZlt1tHnADTPZ6poPY4//IQjBuxDI87IP/OGcnQ4hAiJgjvmJAtyRVfDZVcmAzjnta2ZCWQ4akeJIitoULfeNtNRv5abLoVY0l5bYyTdtmzdiU4GKbxH/0O1eNTPxzKI6JTXaLzhTnzxvTGilBBIGLp7DYJY06oa2EoSfZCRQFpSMmkgSW0boeZG5SDWIOlaTbW0kglKnSKty2gnMBhmdVe7ll22AF6vSYjd4MVhOct5ycuSnrYXgzGJU8dNNY3kKZi60TKmcRz+gGr8oTXHcy/vBDhLWrLsna+oT1E7BIF5QKgFUIT3lFdKycvVrDo4gtfuDYHLrBKBFGrVrMLVrapqTozwkZspAQY5U+BWj0Chj+I9Bay69cEJfkiBW+Pm2HZEzXCnhCEG+nsbOPh8tXfYMRe5+Ye3MtJ75KnbizfVPnHNQupYXixgpO6HuRC/jqGUYyXRjz090XjY2DMi8oAMrjVPtkuILHdlpyhKHPvjUJDNsMZxaQfMbLc+DLD7Ij00z5AAB5GDQEJg4iP/7NNFDTVV3SF9OkxUEXf/B91A1EYLCDOLJYW1G8wLGkTV0EzgoFZwZU4UIEHxsI3yFczuItks3F4d7VV+VJ4cARfuoVFMXDNOdui8hiU46JCOOUhE5M7B2jGKtjetUfQep5S0kWAip9Ee2cQ7exRNsFaWtp509Ox1fEim5FA+zoUiSMvRg8PRtYlbvrAGdAl3lyRRdCASdQl9A1qtIty9lKvChbOkBIoAKux2Kv248zh3m4C8u3DmWbnQVEQ9E9hIogb+VpSwjjfg1uHE5BPOrDduPnPN4P2eGTtWO1gYAIu7Yl6edY0f9wsRGp8SdINjLjYG4M7cJxYQOR/pRy/m7pIXYrbq27a9XDYzN9/uFK6IQKL1piFOizB7662qrcesCbJQrqCjTjIaZI9tbZ4LxSMcBgmNkoh7Hh87dkiP6bDm12kMfO/0P/r0xjtvpE0HQRhf5KM7dOp1xPwt02eH5waAkkSzws06DznfSbSn1nPO4XihKs9fxAlE3WawMADMPKWEwDoK9YpYyw02NU35kIMjAu1uGE4Ts4zdoNvuYiJqHSc90zDvcFffencf/qmkanuByfokFZugtt3dTElYOo27KiQRDJDt6Xl7uaz5XHf95S/+9n/8H/7TH7/9rtjp5frkTuBkhlp2TpmZi7qZdmRgZ2YnOEENRU2ra0c2cnchYbLkFrOX1kVOqwTwIRUNe7WY9ygxEdGuNSdOfQhHGGnmFvBLHChi/StgJmklIk4CiJnV3pe0qYUWiYxu2BasbualKLOP2Qax1QqtppJJ0pJSYqdL0W+//fDtXz7l5eH01l0ycVZzrVWWWzPin+BFveTneIV6VHX+f+fWwBtHuNxmZsa4KZNxd5vuF3NH4Yq7eSxAjEX0IwwXWuzIe8337GHuxuFzzmC8Z/7UrBFmLXAnl2Z/ckjgwc7MnMI/6fTXNRwAGE0CVEB2u2J3p95YYh1Y78gzDAeM2wgpvzWhunKawqutJuKw6txd3UKaDGZzI6HwPWU4P0MFhh3QzcTZG2wnMTuE81bOP7RHbuWdYBKgNKgGj6g/ldLGGESZIkBmMFOM2akNfoYC8tKjMwPpWA9gkyYY+zMf5NiQmOkc7D3e5u7tRkB4odTOui2SmRdpuAjdsmEiB7sbW2QYnAB2D1g2BrcSI2epDq/WxF9rcoQTRDKzi7iZVSt98dzB5FR1rzVad/RAVY5TZmJmMQLAxO60bVspG7kuqyQmRnXYvkvKnFnSNNMi9oMoGlduGANCIA4g26ZRoFVVtPkkaoU5z8hDTM6coqsw8vgSE+ccw3QI2WAdNo2ZVYt5dReYuzhMAWMhEQGzD39jjLg2CLcewga7bxYgkE3/EZxuoVeIwrs5MEiCX8yn4LxR88SmmF3nvUlS3BjK5GDp1nxHrxn0NnuDg/y4jWwK3mgGHxO7N5PN4xxHASRHwOm488xcPlnJdFsoPwm7PmTMelkys3AOe2AYCiENcs5ETnaT4QykL2twz8vBEsQFrUSp8fotFc0y6kYazKEi6ADlZyYzIxiTxikbufZkWxtcjgr4kmldl3Vdz5RGtjxu3M9MVLW2kScUyE/uXsbcNgcBDTDSjRGFUM1YNoK7uPuuffZJHxvT5G03keMakPHNiuo9ZsOWmu4APnop72OQwfh3wVZqoboAOZDQpN5eJ6LwDHWojNAbquQdwxbd1ifq1c5QMxC3TD71yEJ/Z/MoxiNs27YsC0HNLHGOQGFK6eUCU7gdjx8eda27OajFrRoiowib1cE+06ndhA5nAWVTJnAode+OQbz/ztnrO8bj2e+0+EyQr0Ou40xntp2vWZWMZZtZlD4N3Nr2bzKywGHnnqK3GFx0qzHbXQJzl4jQCq2DSOm6F3ePcbMvLy/v3r378OFp2zbmBgmuqteyu/uyLOu6Pjw8iMjSr4ZdSaTUAlIRO6AkzN6qu5yjUlQAM3UzN2fJ9/sQO+YNFuLur3dmyfSAP7Gl85vbYQ0i7IGD+cjQ7a1xAa2Co9YyvmI2tDLn1ycYawKB+PC5m5C5q26d6Am3pBKHLjBEs9lw+7pkJiayG1i+m7/2qMeAihnvHOUDZsa9NOJ+Jw/ZM6wXu8vH+FQdOu/YUBCCVi0xHpaGz+wtgB4jPFLOFHV37IC5gZiihg/mXpUI28uVSNzpt7/5/S//P7/69tsfymaVad+smDLrtld1W4w5p1KKqscsKyJyeCjQqrKXFg4z88isJEnwsiZJTHmRU+ZlodMiKSUruyqX4qVYtf4sTCfllGL6DiGJO+/7vm2llqa81oDDgBqRe8oi4SYSSdFaSlEnZo7q6l581zRp9PIMzRu2dJjK3oEsjAIWJ4PytdTf/P47x5svyvrmi4e8PBAnMxccQfOfEDT/1C66pT38pPCMMUgtrNnkM1MYzK4dhe7QAjfQBGgfBLVKUSJMxREMqjfYchgy6MaemZhRbnu2x88/KbJoCkHOVyxgRCHn+6SwyVS19j+rqmqz8qP0Gk3pOrW62eO+REQk7iUqP5lTNBUMOKOhhIZkUXVvPWk0dRhaw2YsdTiWBnetUTWnWmqt6p7pqOohooCpHJ2Q7cEomtPszr20hp/WngjDN+j+6k0MlTmlpA23KqC3R4QU0YXSn0vMaimllHKWLtfC28gJtcWTmmXT9HrbvRAl6DomWv1EZN/3QWSGBvk1iKALxONFDsucaDTXtQmKvtNkiBNRFlFguzaFJHxzOYFYUnhx5KpOiSWly+XCo7GnY3eAqPSiPpHMzICZm4Jkye5ezV0LYhYZCRGlyPESgwGGmutWTIv5/kbPp1WE3GG8IXIRD29OaB1cMLgTg6NOLKxtMnIiSWFvGWqtDje00SbR7ZIyBwg+sLh3WlIlopRWr2oE6fPm3A4lFH7SHMI3qx1t4gbMYM2ilGogFHdVTYRaa+KVmYXYSkOzECBLShykbh615nT0AlEjkhs/cfw6GDgAn+5E2ABWCTIfiFjhGqQYF9HrFsbNVfW1Q+jeM6WvzNA5cjz4K1Y3tPWdDPLuztFkxFRVY1HViFXDzG/ROGi4DZx9srFiASJE5qXWjmKHmKLuCqsBHHLM85mtO2a2Yd5NS72b/EYQp1bP7O6B0t8IgMAiogVQEIRVBCHazFQSNxgndmbOWd6+fXjz5s2nD5930ecRI/eG3Gv7Xi4v277vqj1CZ2ZbIQcRO3p6z1sJKcydhqHr1c2NzMoIQCY6fBJm7hkF4sCPFBBRjc7Gti9G7tGU41p1RLAjDcJgAtHhCA36YT7qPGnSSVGE75VU1aiVfkTWAQCRMxNL0HwYK7GvTfUohqXrAnI2gIJPRWTUclNPNPUSgMNKtjYNlWulQJplbm3hRELE7nBvgDRezR2qblZi6jT1TOl4LuspmllTjF11d7MbcI5BcBGgHJmxcSgI4G32Jv9ZAKhpvH9mhP5v9VeGAm4dD7q9xjunmzTncKzT7GCoXgS4gilVrrVW89zXQxyAtAyrZB49lrFMd7cOognn6+X69PTj+/cf371798O7D8/Pz7XWlNKbN28eHh5U9enpadu2lNLpdHp4eEgpret6Pp/XdV2WJU5zeWynEMAWGWyJiEw4ERyIiRTUmnYBTglRSjds1yDFaIm93YeQe5O0vIkEtbQBM/zw2CRl9E1GOF3eUgdBBDT8zwkQYsiru29v11QG0m4Yd7izUKcMxrgn9UoBPx4TQhSl3XHf8YYWver360UpkOHHTp3Z8RVzYD1oLM5lDnwAYEc0PZuEvcc8pSDC+hyACDwVI6CvPx5f9QaniqdEwvx1bRuplZ+CQG4sCwCyEiILRKTK0uR/JB+ESB2qnpgArPn04f3LL37xt7/5ze+ul3J92b8vu4gkWfdaajUWUdD15eruauajk7xnOKvzphbTa6PNTAiZLLGvmR9Oy2lJSWhNOJ98WaiWXErZ2Heg1obLRySyLCHomBMlAXBhY92uxRI8C8dATrM2WkBVnYWYAd6rUaUKY4Kvbc62dcfDzMLgGR+MmPVh8qlXd9Kq5pRXyjlZ/vaP788npJVOn7yR/JDSyVq4ZyLGoCL807tGD+G4vGe/ZxoDsO07eiESZ2ngCMRu+/ggME1jVPUJwAJBt8I+Tffp9OwEeL3fP5tAwrwLVppsMOsVFrMi8NsKFL+t5PrJTTgSlb1s0MxaC34J/6bppGbtDRFmZfecmmoEhwYVEYKoXkNqxJZt20bkOWfrMOXbdQPgFs3YwhxpliYZRcQM27Ytec05xxRjUy86CjOaEFmWpbqxIjzYCIFEvi4YIADHI8rIgqenp1DqzFxrNdMwGVlkaOVgjLAGdBrMPW+rGWq1UgqccxoHw1E8MGRWrQZgXVfq5V7omxyPkHOOrrllWVS3y3YdpsZhbbfWQsRTqGq1w4CmFh4wd49hU8y0LAsRaU/TuWtgJyxL6gG8NlB7TWtKidxLKWaWc25CP4YEpiWeXXKb46RmwrIu2cxK9PtFsk7twK0CiCjnNSR+hxJt0pzAkAAvNTMXibOrZm2R7uGzKbHD8OH5qdT89vFhleRO163knF+u13VdU07mxFwBBVirOYt6hEEZOPT6MHybd02Nmfd9NzO1Em2hOYtZjUZWdkQusNG83bQPze4HEQkRWCLxsiyrMvRFY+tK2YjTw8PDw8OD1p2ZrSA/tEF58MhoLbFXe9nDhDXbtNRSNtWHJaXL3lHXMMYKU611EQ64wJSFmU0rgNPplHZS6jguE0R+J3IOtB4GWe+JD6K9sTjN5ywQZoyKjrw/pExQ2uhBGlZOv5pUFZFo7CilaKnX6zUF3s5UI8c9bBEfY06EmIeelpyIKEkudR9fFywc91+WZGYx6i3nfL1eAaTMWuqS8romMzudTuSIYU0PDw8vly2CSuggQzAHB9Rde4qiOxEF1IG7u7e4SRJRLW4eKSXAElNiT8mKV3dLlMjNvIp4PmXVsp6Wt28fHx8fU2IReXx8/OTTN+Iansm+79frVoqmlJb1dD4/mmLf96en55eXl+s13qJMpNUAixAPolOXmUDVvTXuDogvN3djoiQ8rC5mZsK6rr0BpqqqoiQ7XBq/vciRc7ZuN6zrejqdPMDKI6rdcpNt9KI3UJnRaRkisQKwBCbKOWfKzA3KuFZd1jTij8zY9x0CESllE5GUWjwlsqbErd81CQcN7OUKIEvKOb88f4TUF1zC+gmKOp/P1+uVc/UWEafr9aqqIm3m++VyeXx8bKYnJSLKuRFzzjnnxT0arWmI6Nicua91UL5NuB1jT2jqrRpsdaALdHk1h2YmtWgjqD+7lzOCzvjSuOfg0LEkm5B74lNjnmGoxaEi0au4jbaYYWkoXCnSvOBWj04R5HYdeCplLymlvZRaTURK0Q/vny6Xy48fPjw/XZ6fn7et7HsNhUVEjvzxafvw8Xq5XPZ9Tymdz6xWQPXl5UOtNfR4nOPpdHp4m06n07rm0+n02eeffvHFF8Kp1h0ws5qXBBdoIUhapGHXBGYpy+GnNQl2K99EEJA7E+xKBFdCNIWphW54DenW/BBvkNltP0dp6G1V5zC5hmdFd26hCNTCf5Ocfde2yePEw1GPSr+Z6EJNHIhccHfhNCgLBjiIU08qOjORl2DMMD9qrQywSC1TxnKi7fAf2nxRgIi2bdNaRSQONOTSMFG4VR1H8dORjm6LnNIhLcbKh+6YqXdUNjZt2PNa3AfWm1lEp4jICVodvg1irvtOFAOTq6TUXGtmV4V6WhacH07Xen0p3/7h+6+/+uMfvn13ebqoenVSQ9UCgDgVM2wbgFIrM4uQO9e69+ZSL5wMrMQwWxKt62lNlMkeFvn08fxwSgw9Cb99c17XTECSTy+Xy75nJ9RaL5cNQF6WCAJWC93qYMrKUjjazNbEn7x9XJe07/u2be6+FQ2/2wkL50W4NMHuEMpCrgar7fTNukljIf2oV9kUc5EMcXdX8lLrxXYjoVq/f/f8w8fv6JQvL/tnkCWf4OY4qLcRSRwQ7q846v/sPYf+6qu8E9ZYlPlczhjR6vZOa0HGKdUZrPGqfCDo8HQ+j/fF/8wMVjmlaKILSujxaAKBWlLDrEU3AKB0/gpK79zry7KM4EtTx242NcEBoKnUs5q2aG9KwwbrN+t1H70/uZpyz3MMDTJstphsgUlYuXsvXeuaLPA2mDmx1FTUvbk3bSNcvTUZh87TVsp5A5UzHsRHZcV0NsPp2rZteLohblJKtVYwRdtvR94jZq4eWrNBgKaUhbO6xgD6UCFDsoMa6CV6Jx71PAOTSxtmlTD1TM9bM9tGAV4ytjL0oKqu6xrPJZLDLXGVLsCtunl1YWVV9QbiikjUugcwQKBEDBNK3exA2gDQa3AOqj1ka86ZB1aKV1WFec6S8yqJCOZmKedlWbZdmxGjupVivX4gRsxHcY6ItMB/Rz2N59RAdNFu38CY2dwEFL49Ea18Dikc5mYU0JpZktyJLxikRXEjdeaAhbNtqlqpwnzPqQ+0iMJqddXdIG6UswOc0uIutWj10RjTo6VNWISVH7nfxGwdsPFofAIZyAS0SApGzSxMMXksTpwzZ1y3QXs4eNXHiymLiHjEW1sFvwDNHAxZsCxsfqwz3mlmA7zBeyCnx3Ju8sAtZzj0NICAtkSYL6HUj0YEDPRMtVa63BNuY5zDTZps8OM0YTxeZ28FFaM+fsgs3GZI/M6+meuQuLmjzjwcv/G8nenuw1dDIDS+M8xfASCa9Nw9EXNuZrGIOHRYF9K7RrdtAxP1XtCGYUJO5pFZip5+8YZQ1asJDxikCErVuhNBEpidyZn5fM7n89kvz3Q+hTwJ40aEchYiMq+AZZSF82ldHldZSIm97Kp1Ny0wYzJhLFnKdgUgzG8eH5Jw4suzG7kxLVstqkpgM6OQLqrhb8ORhIUlLWts76VeeiiEaIptXS4X3B59He0EEzFQU/hwNUYb0xIBuOBfpuYuAA2caRxNbys7RGgIWFMK5CmyYLdqFsVX2bzGaQ6SCXHER95J3dv3DQJp1G4eIFJmFtAmw+Mdkj+34sNGdSmlbT9i542GjVpzDoZWPipKxs/x5qGGZw6iTvfeeylHoXu8EgG4sTnjT4MdfArozow5h3ibPpIE3ESgoxMyxhW4twVHwSf3xOOddvYb/zP0rFtUpHNBg//xom7qHCG1HhUyVY9RPUkopXJ1UFKtLy9XprRr/e7HH7/9w3fv3r17uW7bVswMfji913fbKBkFiDhVFeH0/XcfX15egsxU9eXlJZzVP//nb+Lg3rx5+Bf/4l9otc+/+AyC5fyI/Vr2SrUCSImBBGItxXs597AHeOrJORR9FJ+LULfqvBt8NoN5vpJOr+sfmNluUf5ef3zsP/XSJB+ZwJGaGD/PnySiwEchohgEAkCtdTSFcu3C+SfxwOavDs+107mPT8y0503VOYPInMy9qtoRoAzKHAnMY0unBwTQahjmR2Fw132N4Pl+u+LXOUAflhuFk5wTvPdides2KsgbH3LznNttSZoCNkDAnN2qq9NlE8mXl+df//offv33v3n/4cUVtarlTAY1c/cawZcANjczIqXo43CP19xpJRERUiZeBOdMD4uchD9/+/DF2/Pb8yLQxPRwWnIS15qy1PVU61JKqVX0vKgZkZhZUYu0ZOCog6pnkiUDOJ1Onz6uy7LUhcsaGZFk8GqoapWc4JnEMqtmZa4izpyI3C06oU/LgpinNPF+NU0u7uZmkEAw8K1q1frZ+Xzd1W3743fvf/f7P3zys//t+YsVhhu6/mkWAe6KJv8pXEQC6Rh7g66meFDzgDqAmZdCPcDERxWGozRQjW7eOABxnwIcN7LF55bCrg6oVy7Mb+vy/waLYSiOkeUa72yPABpCb3zdzLB3zJvQI3+hsYc0TJKyCGkd3SaBigsHMzNJDwgl5lJ2ACAIUW2tH615iUW67BvDi/oDx7plKK6m5Lrghnf16ZSobnutFc4x1QCAurXi6Y7EHUsqpaiVsS+vD378jxnMrWdyvJejHgYOcBiHfb/aPoTzP14fP8cW+uGruLoNh9AaAjgi7xfvr1rMEjmjgbvU+c7jCH+SdAa9hkuwrBHXDtEJEcnMiegaUk0tKlqnUKgdfRqq7gOFtiO/WcTgyKqW0kB0RMRh3lx9AEhLjhPkhgTYJgG4NWd7pt2mwO5j3kEaZE7XUuypllIez+uyLMy4XjdVV6WUkoENpiDVwHt1ayHcjn5npm7cOpVoKF3rEA61VgFBzfrzesv7sZubG9SQenDEPOb4zpqSmWG9f2+aqaWqrjDCvu+JBWSRvq5mHt2nrceYgt6YOYZ9U/deAs7xlaDq/kmLE5mZOWtiNubEAZnbMxh0DJiSBm00yL5tiHqNhMHgjOhsjpRjM1a8jT6OP6NlvdVH0Qi1HaejDH+45MH9jVPa2kRGEmOQBEWAoM9FbAUbGK4ydYGAW4JRi8mwagHl2s4XCvMBtTJk4ugYFFBKydlhxuqq2i3nFj4UQIhLNE2CHA0Xyl0BclMWEkZmBmnOcj7xm8f8+T/7y+h6CgIM04WZ97L1MvKNyJN4EidUM9r3675fAZZEgKQkOctmJdytdV2ajQdb1vz0tLtrANalRCnnE1pCviEONDxXhSpAWYgITEELFuXOITVmPTHcwJR4Em3wBtjh6NiewZ8jbnUTonK6napEUZP1St6StaWEgOVIgA0AboruDOaezjK4RktkjNB4bS8yqOMKhr9yk8TDBKvdxIIeMm0mQvQmyxD01I3KIRl4Ht3xKmg4RPEkzY4WCZ8ia4fBepuEmRW/TXA18/vHU7h7d/z87iPzr4PZx9pomhcSS4rqiVFMMRy2vRQRiZQpE5NE+lQatzCbaq1ubebstl3E7PJ0edn33RQ//PDDl1/+9tvv/vjZZ1+oeqm27/u27dvWkvNal1HLk1KqRlqxpfr+/fvwBiNuXRWlulb99d/95nQ6reuaEv/ww7v379//63/9r//8z3+2pl1jPidIEoMZqjGnqNOJzxf1aqNBvU3+35ZyDWk8tvTuvHzkGiYDYz64cV7AYSe8vo7X/QCYmZd39+bxvf2liBT2TP30UP3vXRf0tbV3DZpuor1JTBFpOcnOJ9TTKN5NYbsFNzp6+25tytvFu98jygAHiBqrHXpnPgWblh0KBcw4zmtYXAQALOq9taedF6gTd0TpiQJhFcyp1uq7ueVv/vDH//iffv4PX/6+Fl/XR5gF6qmp1u4RdMPazYzdIE1TGMXI4CJMSWRJOGf55Jw+eVgel/SzT86fPK5vzpmtsumSJAu7EzgRZVXdNqo1AbBwROFFfdtw3UuNwdeLJEoLrwI6nZa352VZkq8oJeovkrkXtb3aXnUrXLRqtSpSq9dEpCIpAl5SK5acVJsmgYQ8iZNidXU4ibEgcA+1+L5I+fACXL/5+ru//dU//Ff/8v/8s3+1Qm/tk2Eh4FUJ5j+di3r+8O5FTGQZasPMHE5dvxCRdHgFH1m+mzBfC8/haLhtsexx88iTd9kedhH3eFm7ZsVNdCijSMZRbyzEnQSI773Fhf1TimOwUtJG7hq9N1FQ1PmwrVuYhRiCRVKgR1IXtyMajSmoyQ0AI1zHmBvmMBqij3pBC3PKOSfJrZCpOYnRZdKlEh+6zazUKu4Uk9ijUHMWwUOgcIdSHYo59g7dMThcI4AcI5M+TituUjv2BncMtPii6/UacWjVIrIwMyUTSUXVOXJizeoNnEFmdpDCQwJKB99PCWN5w99zd7qFYx7nLb3n092EWYRSWon8fD47lB255Q+51nrdXqJUsn+knVeU+wKI6rXxgF3cH8OLYia0tsHxCKRvhQUmBBEFjCimrJFIdvfr9Uq9TiY84fYeNOvU4BY97ERCMJOcGG77Fn3bEsVpLDCYUzWIu9dqWhtIsqHfd6J16u6OD3pg709UoYSOOmBeCcRxdrgp0YGPvo6mH7n7Nimluu+11m3bVuZodMw5+2ULo7dWNbMkJBHdMDcYOZjIIe4NKkZEGMYCEXGLzLACIDfyY7xvzDcdhvhM6mPPmZkqnG40sUzodiNv5u7CR/2nN/isG6NWptrR1+Q3vneg7d1ZPHfLs6nCbbB3EGGttXfR3URA5ls1wdLmYMHMvB5YO4f4I2Mn77NMvbn6qlrWlIMOWURAzjBV92JmTgyAvcMkti8FAEW4O9bGOsJZaEnImbOwpOW08OM5PZzwxWdvcs5EKKWYQQREprUkttPDA2AvL3K5XGDVdSdba7G6F5ivaxKJInlhQmK67lp0t6zuDi9ZeM0nSetoeY1jXdc15/z8/BwRKzPUWrdtv+611iKAqpUyTDWyNksuWxsc0+y6LqoRZVcNL97cqQVKIwKoHa2Rmql0E2UkOgBq+pFFP6EeitAi1MHM4Da7pc3SDKQQwEHJuUa5QytlROtEEkAk7Xt5RSHmPvoXjmBKPJW2ycshxzxa32uts0L1bvJF7IYOO2BmtJ8Iow46nzli8CN6M8LYolGoST12MxSNT/BLfnuN98/a7W4B45VxIvN9cAskM95mZqVsPuXNunxQUiIQCzslETblWnUv27t3HyLwwTmZaSnlen3Z9/3yzFGLm3P+8PT8i1/84ne/+505bbsBVM1KKZfLtu+7ujFz2UqplpKmlER02+sLX2MN5sTEplC1Wmzfqrura06w7M/P16enl+tld6eIbD48npclSaTw1WHOPR5xt2NzUnf8qYtAu9/MOPrXSC2vjmA+R8kZXX1zh7sc8vPuFuM/3iu+Gr29IrPxjRxIMIdt1upgUY75fj4lNKQHDmgS4xReMfEw5QeZtwFr3mOBjmiIBLdMnZmFHpkJJspwtQN9eQ/Nzo/A7ERsHfUXQFS4EBrW3bz5gyaH5+kD/iAqzDGepak1kICJBr6aR1M1N1ucKKaquDtqpahuVQfoxx/f/d3f//Y3v/vmstWczk4J7E6upjVaagOYtkf5EyHMSSZi9xS9v1JPS3o4rY9renNePj2vn39yfntKp0RvTsuayXbzqkvGmhOxVAWcnCXTYtXMrJqrcCklAZSEXDd4JWWShSlBWPCwpseFl4XJRRMAlEIGGKiob3t5uW7PFy1mlak4NnYIcmamBPMdEYY2ITYmQtTHeBhUSOQxe8uUAx46p5fLBXpZV7x/9/TrX3/5b3/3zb/63++UHyDWSeeerv/E9Y9rUMWNVO8YOURkVmHezFoAgTIwo4kewoQBRDSfe2Ed3ToOiL4lp+bwNO4cA8V6JuAWxXfwMCZ28Jb16S4lNVeQejn3YB/vb5tF3Eha3D4+Zsl/vBkIlNH7NQGthSmWYx0BvwePbFhi47oVf9xx1eOLp6RZf4Z5KYd8oSmOG1P+Oo7ouq6llFKiDwHjW8aDjQWklMiOxN1xopExoFbA2VpcXunLoYO7SAq0TAlO6b2eYxOPaVrgxMyb9axIh3gBHOQpcTU3b14Zcwr/99Tr7+Mpmr/nxq+22AggJAp7Fzkv67LkLD3WSzAb4rjUbdu27XKVtLiRGzGlmITTHgGqWrVOSTyCw8seG0Kurd5jRLjZj0c3C7hVTFnNVt3hXqOqVtqEyTZfJL6l1t2ncDX3Qiwt+7pmEXKrUCtFt22zUt98+oYrTKvWjmPGKa/5aOAMQBgid69uY7pTEFFUzrXnax4dB4LsGFzp3uYYJWaCWIxjaHx1ZGVFhHqnr5lFIoTiMB0ppZhGxMw5L0tOXnXjLYqp0VIlRwk0w9khxImgjIAvm1jUyTFqEgQ0xESnLushlRaOsYC6bS2Do9LsxuihQEbttIvJVoiwjnSVPzhihIfuLKpZVs68fMduY6QHWkxrZCdoFnnuRGN6io3ZkjJEB9AwaXJuc2XG5NYYPxChuuhNVdWUOLoy7uQME7F2cTS+cV4zPDbfW5K0ZT6XREtODyuvp/Rwzufz8nBecxZmV91HYCVldvdSyvl8Vi0RQjqdTilJ4GSsnMO1O5/PKS2XyyV6Qsx136/7vkeepNbiAAjnhyyyAqj7FsEdkcLsbx6XlFLOK3PSatu2Xa97KeXD9VpKuV72Uqu6Uc+Wm1UAWcJMSOHgEZGWPXDSWuMzt2Ekg4kIzTX0vkXjT8E+psRy71oA7YhrraRppJY9fL6bos1ml6J3ebXgHRt5Y+34ZgSYpAe2Wbs6oi8PPo1LWq+4uXsSyTmH3As5MDRRZwGL7sihXIdDO+45LfiGdF/9ekRqvadBQqrfaV/vhvtPvj6uO0YbbxhLpW6vY+qPoCnHcq+qAcBqbZCDZmZWuZfYSMpEYkrqMLPrZfvh3fv3757+4be/Oa0Pj2/fPL45n89nMF5eXp6enp7fHw3533zzzS9/9esPHz588sknf//lb9b1nJasquExgqOsOlc3NzUFu0mtGwBgXVczo4pAsdn2rdQCIKdUiprt7urQDx+efv+7r/d9P5/PKaWcc63GBouxSedT8v3uwUWk98jdQCwMuTfLq7FXcw/bOE0ArWT69gjmA5p/nn89PuLTPIlXcbfmHdqNsL2Trj7qS197m7eLfn2T3tcUFde3didh5EoOOu8EjClsNBmBPV86ccG84E6MPu9hUyhoyMOT0Lj/FbesRB3jYPxJVYngFuijIfCZnJ26ORenJhKOaxKGiFm9Xq6/+ru///nPf/Hh6Xk9vTHIy65upFzDMBpKM4SdgFgoMTPMTYl8EUkpvTnR+Zw/e3N+8/jw5pTfnpdPHs+Pp4S6PyyShI0WSyQiKYswJ43iBTrnk6pv25bUsKTnZydVLDGscN8Kq6oJ5ZQyy+mcH1ZeF2GX8CtsEYWroyquSZKwELYN5iKumwiJnlJyElMh82upTEwsBtLIDToRsStBiIxqrdUKi64snDMZgWhZTtd9+/3v/vDrX//D/+H/9Mc//6/+zURg+FM+4T/+etFerTi9clfcDKCd/iETmrCN7HS0BHe0qQn71wCKyQhHsr1hRR0sgBGdmQRFU6zeFGsXzjcjrME0sllR4udT1Om1uLCp28KnC1MGqH1vbzJPwzL+SYUXDqGqqlpANcbicm6To+OzR3bvPv7ajLnhd8XupZREcqB6RFdISguRjh429Pyku6uWWJsI0HvnIsjdPbTW69+gNUnmAO3QmtwL32fZFA14r0mhf0tk4Zp6MKsAB5TggHRvuxmHd7N7AUtLzMxJsFcfk9nQBsWMmjcaRT6Em/nssQlMoSliw3POp3Vd17VXfDi5Skoi7O61bNHGllKiXv3i3YaLX9WKNVg5ibCQAxrRSgBtbEIDlQVgpao7C7glbJ3YgYSOTGNmBNF2Xq6qSEYpxw6H4y2cjVo3gvdcsbQxid4Sfo2XXCSntDw/X8Z7Ym+X0yo5uVHMo4yUuDPBnQ1zHyaFZzW4xQ69ElIViDFKIFDLARITM7mHBemmg4C6YR19fdE1thBsL0X7dDP31sEYysum6jmiQ5VyX2Hzab2GqxT15bHA8RS3dDk9Ti+8mQ63zeekAzbKZ6HADkVUet9rX4+RhHTcc3xL1IdGZCOA4ZppMFYCGgHD17bnzE2zTOoMZW0Woh9RJBwBpoNVW2BiXaRD0XhLvLsQm08GSqgktejtHDspxJIziHOpIjIKnjDtYVgSw38hImZkIoIvCefT+vbN8vjm9PZxXRZholKLBp6t24A0AJBSiiJtZn7z5k3Yr+7+8PBmf/s2JJUbCcir7nvdL1erhdzJNUnilGutbgq3nE7rutqZ911K3cyMXCVLznw6SZLEzqprAKO/edlfrtePHz++vLzsVd3dwACpGUkSyWAGWN209RIHlmc7aOnVya24gZpkU3VwBN3uYa8BmM09EuPiVnV/+C3HPge1D+MhKD6CBal3BAW9qDacqvheVTW7QdREBx5Dd9vurMnBaGjpGTOr1mtGGnFC+0SSm8LL+XlGzJGmcvRJ1LT3cK/CmBXieOf8L4AxCHReLU2DwoejOyvWO85CN7LHY45vYUpBkrcPxekY5qne6+JUlShAxbyold3fvf/4+9//4fdf/+HLL3+b8rqeT2/ePLz59JPTadn3/en5A+3r4+Pjvu9f/+Gbr7766sP7JyMu6ntVYnVwMd2rmTm8GlxY3F2dxUWIo8xARJ4uTzHfKBRiXmQ9ZRF5//5HMGKM5L7r09Pzd9/94O6//+JrEWGhZUkpn8hIrbBqzxL5Yb/0U583CpNAHgc9/ztv8qHi3fkWdXO8QUsZHx+HOIyTG35pfuDxSoSfMWQ44EwTXRw5gWjcR6cMV+XJpvTIZk0C9idpuK0qRgbR0WVgfdLM/Fx0692ho1jPcQp00zq+7e4b52W8ZpZhT87v5Ck5M3Y1/qDz9w7txa05IvQvdRpwd6vu0IWZ2rcz1F9eXn7xqy//u//uP/zil3973VXWs+1QVwepar21oclBzFYLRIiZzBmemM6n03ld/tm5PDzmT98sb87p4SQPi5yyrkJuJsyLsEs2a2EadV9yCgNgyUsPtXtOK8Cl1qKW1dK+8r7ttbhTzrSkfF6Xdc1LbtjVgHNKqnqtuhVNTCKUhcopY5MLoSwiyMu6EDd4qlrhIk4SQKlQ82bRCDupl3AIiSsSS6ZTFoYw81785eXy9e+//fqrb//8f/d/vDnXW5+K/oT2/ydxiRy9qfQq7jlfCpdetBQIhO4+IikHv0xs6N5CH7il59kEGxJm+ALjbTR5Z5Fmeq2G4polVVxyWzI6q9HxXd6DsO6eBlrUUGNh7s/6Y36SaI6fHUK/9VBnl5K6AzP8onlZ8YSxlGVZorM2yULsoeUDFct7X36Yt9xkDs3fRd0C9tvr9apG0YVM2R7rKFjznqqqgqwdjLtTrRWFo02uH/PQqW3HZGlWhao6QMmZJbNcOkiJCKPUWnfzNjNjWDDtth1Fe5y6EeiQ+2DmZcnRlFVrASwxL6cFgJmaaq2lBzttTE/Q6cF7sHwQWWyqCccUigh+qI3MXgcd8ZarUXewEvhQQaoKVMKoaB3FHod2qbprKxkFM/s8JY+l1lrU4CrkKaVTXs/n8/sP70optZYW0gaqutsxERho86hwq3qbopg8K+t1uf09xkhmFh2Q1JDXbWTYWvyga7IR+3g1kJPMzBsoFKtqKSo0MAATTWD9qlZrRRUfp9vzFdbbtF7LoHtuZxotXrbbeKLYn1koIAz9ANVwtEYI7sU29zfHvHt3f8Wkrd09Wn3maxYC1DdQiFNKI8QVo/nmkzo+fjcUaCpnYmYiMMsgwsEdLDAjNR1N1aXcTL4eNzQz4sQiyZv40lJrMEWXSGam7EQNvA5CiSDEwrYu6fHx/Omnj588ruuJl8yC6ubXvZ1dJJCdiJmXnCXnWgnAej4/Pj4uDVUc8cO2bZfLZd/q9Xp9eXm5XC7PzxfrSR4RyZm5YN93sBObJFskPTwmkbfBcLXWFHAGxDBy4/NJAHz2Z3/+fHl5//79x48fn16ul23f972os+QYMRo9AmOwRVp6eT8Rg9osPxyuxVb2PWa09O7u8e84wWgHmKl0EKGIkDH80AiD5nGr8IjJ7NBBzOwEA7lbb6gYIdcjXibSTJz5hrE/Qy5RPfqZq462jequY4zepGsPQN3ZMh5UFD+Mntj5ibwb+jwVDc7MOMh+MMtwSMamjc/OOm58URSWv5YMQ4XdcWIrEcfRKRfLC4fQrKp2cBF3ANumcK5VX671+en6x+/e/ebLr7787Vfv3n/k9MLMnNPDm/PDw9ndX7brFw9fqOPjx4/ffPPt+/cfnUWY96LbtahTNgUgOS0dk+Zyvbo7+VGzlFJa8zK2SPoKc87g3K60gHzbtv26Xy7Xy2X79a9/Xcr2ww+ffvLp27/4i3+2rllVCQWn4fpOwuS29H28TkSj0mTsdgyT8O7gxSLn7b05waDqHggYZ4delh1/Hac/jtImIGuio2EhNEVjlsOrnSztydwc6vVOXBO1WjWeHEjMLmL4gV3mjqcz+MCMaGKt14WNlpOZWmambkHQbn/eaQeRHmp+FcWYd2Ywxd39j3OpZf5GbtXnNGAj++sN64EZtTog4JZWfXl++fbb7/76r//6l7/61Y8fPoo8uppBOC8xX56IXBjUPWQHEZIIgwQkKS2Uz6flzZuHx/P5L88fzsvy+CgPJ14yrdkX8YXUMwkjSnaEsxHMY7wnhQkdu7Wuq6oxydu3b7e97qUW05SV85JrcfdTQs55XdIpp5xYmABjB+dUtKaiS/Jr1lREyDfhIksi0u20JD09PBAH/v/CdC1Aqd5Btpu+S5TUd40Aoe/E2GkT5/X0oKqXy169JEpff/XNL3/5t/+X/+b/xo+P/7OLRf8xXtSLrTBlBQkEGG4ZbbDqzedDp1gHFxmqsJfGNo4bNl+7/z2QzPhprlDACA/1NzfXoIOiB3DA4A6doPVn36cvM/TvfcnuUAG41WjBU2M4wU08iSbVJcxZUtRhpnxEvOKdqlp2VdcuDYNzmb3F72uvcaduSsY7AyZ+WN4i4k7RqwYyosMijKSctkFwrf+Ne+9imdT22NmhTe+0tbvr1D0YH4zppXeqou04SA8vgtQKavOXxuNEvkJVCQCMU3Ms1Rokk4gkSV3ZS+TcSilRQhMQqdQLWd0dfE86s2Za1ygVS0wUKQgRkpzXdb1cLi8vLzHwgIhUSyllWcP3EzOPIcsx7imQ4ohiZIECHmg9vhvR7PHGSIhW0gMyESZYVBkxo4wCvFvmGXQlwjNuUNmew8nsre3k1E18MwKC0lR12wozf/L205eXl33f4+bVNGrzTqdT2yKO6cStijLKwrvCQEMzbWHRo12BO1WX2g/Iaq01ZkXKFAYerBhRiZSS1epubWRFD50AYJIorFPVCAqmtNR6+I/uHq4LV43aToyYTbcjZ2V5IzsAHGA/FMglIqLa5lUy8TAh+vNGzrPBJIRXz2DpBtCssOMN4+uGKKj1DiOkXfJqpnyjVeGxw3Dm7hB2k+uGxdz/ZNVT/54ObUIxya2F0OJERvey9sHZY0kCcpEIuDTjxgkMSWn0xLIawsqZ10AW5Y5omDOIyaBv3779/LPHLz5/e16ZqZBXtaKqOT+GJInG3dj5GGOTc46J2zHiBUBK6fn5ed/3p6enD++fAmnjer1eLpdohw42CQ4NDrpeX1TLtj3nJT08PLzJ55wzyN6cP2n9LuqmTiQppcxZHn728vLycD6dz+fTh6d3Hz58BHQrxBKgvtUAbjDLwgnY2+BKR2LJIonbnLHWALyxXi6jeb0WzBJpPs35WL27KOMN0c030YyOU47qB2cZofRJE8UPhwy/U9s4AgSNQ6MF0XqmbnwwRN+2YwQLQjOgh6X6TW3S1z4rXbrVNUOY3GkZ6uG8OcozM/Xtx+9RuCMSte+ll0Uc5ElEajf8OG54FwEZ9+9y+Dgm6g4zEZnpwDfv+hFEXopeni7ff//u66+//fK3X/32t7/Py4mTGAFX2sp+2QoL9n1/kz65XN69f//+et05La5aVaHVDPteS1EQ5UVSSlV133eRRVs1RKuwWZZFV7WqQ7DkDgO+LMuS1uv1+ny9ZOFlOaXERHS9XssfN9Xy29/Sm7eP//bf/pt//pd/dX5Y5XQqHaQkrqbI7JhyOWg19qFHwGkQUuwXdasufCfumbSobpl3GT0FLQEB6j6aCYXa5HoaLaMeIJU+L2PcxztoxCwo478jfo2pBIaZTVuMv/07HeVx54lc6KeW5JNj5lOGUFWZePBU89K7h3zHqmEKM7eqovnmXZgccY1xNHOWfmKBIw4ylhrvSAHf5QHYETaxOyEtzR5Acx2JostrydK0DBOZqz4/P//xj3/89g9/vF73ZVmI0stLreZJTkVLRZsLHfYY1AiejNOahcBMa0rnU3r7+ObTt4/n8/mLdc85P57zacnLktYsS5LRs6oOUzg7cWKwEch1XVd38qpEWJbV3WuxvK6OQDDNxGYiUpM5PWbNOYc3mJMkQswsNTMgMaWUTNQA3rdSUNYlua7ruojow8OZUgYLp93Bl1qr7nM0zQwBpq0xJ7YbBbVW1YjCbwZ1Sl9//fXf/M3f/F///u//zX/9X///KBb9J3S5+/wUvU4w6r+69gn7Y5IbQwdN0rXHeoLg681Ai6HF5gDVpMuUZcUUOkR3TGxKyI1IqHfvYA44hqi8Xq/jDpgE4Dy+Yr6kj6wY6wxdmfxyzWbE65WkuLJL4nXBuiYQbZterZQK2HIy52uVE5u7kZZl32Lc3Q7frxfbzig7ETELg8WgMJhStMBajRSKg6r5SlnYCNVUIZLzypBSCzmYTVXBiBntZpWJU16I6vVi1ZRIl0VEqPq+7du2W4t3klndlZjykjmXlxcRdqLi1dg9SwmA8fVN8auqYy+nvKiqaeUmnow5DStTREgSE/TaUNfXNZ8Wrl5LKSyVpdZad4MQ0inV3bZtWy6rguBJCCIpc8yR24Up6pRSOo1y2eteT7nBNripuznBzapblLRBzUxFpGV+TZ0r82pWrvsemdVlyc7+8eOHsHRLtXD8RCTls9puvTjqdF4fTueQ6apkZm6eueU8ba9KKiJAZUZewIpa92rVYEwKAZEolIhkWVPOLLKQhbuuqlrdQSnJKjlAehJxwHkwtxTup59+GuwXDVHMycyu276mrIATjNjZzeBgGF9eisjp/PaBiEop2K+qWqs/f3zOOT+eT+u6JmaGEzGIP+ruaqCGvlNrFaHTaa1ahVw4My3OJefVDOpOlN1k091UlyUDrKV6MdTywHytpuZMSdUvpZ7Pj379SFrd4QpyYZbEqOSOi7kSc7VKdOLELy+VwFy3lLN7BXM+r65iLMSJCbWaGLsiERtw1e3x7af0DHNd1hyQ+ufzOUZ42V6Ch9U2lpMkKWUzXB9ZiF3JHWRCRgQPbDFe0iLE7BbIe5W8srNLlMfWqi1xJ71cIaYsNlHWipcCIghtvErrbCWHVSdiIQaUDk2ia4rRh6R1JzOIXC/P67qyoF53gYRhsSxLKSWnZJJVzUpdUgAdVYMnJBGpdY/C3T1m0wmX3T0vFZRTTmtL9jIIMAEnYQCJEmWBPO/XUpTW9JhzKralzJx8L08KIrZd92qVRCC8aSUGJxQrVN+4VZDlBQmFbVvF3pzW/82ff/rpW8l5W84nIO2FiBfdNqqm2356fGQiU2URteLVzuezqrKg6l51e/vwCOCH77/9/vsftm3bthLomFpp33y7gjm5K7lb1VpektR1zQ8P5/ffFyumRbHbSzHb6ul0kiUx1Tfnh4c3j+QopbR5YkzuP37+6fLZJ59+/in94Zs9gRP8Ifm+X72nZ813AFnyuq4vLz+ktLj74+Oj+/727duc6XxevvjiC8nLenr821/95ue/+NJdsp9th+oLjRxFT2kyw6oCKNtORFbVXdec3F3W5EruLJxSpl7rq+EAu5nWnTgFqtmSicUv5WXft3NmNzbfWzDba1BptSLG5N3xfimLZFFk/rTuGWC1i9Ilr1R2KluGffL28dMf372si6h9EGZ4Vm0T9lLiUOVlN/SeZ1Dg0BjIU8rDQkVHixnBiGHLHplDTq0M1kFG7AyNAvgWoBn/xlWnjMeUqkLOkcE7GjGGoUA9CDIr+IhHjMUcjqLvkegQkZSytEIYYkjZquSHh9O6ay1F91K3bXsBpbQ8Xe0P7y5f/+HDL/72d19++Yd9x5lErE2R1cpJPCXxIt+9PO37/vTy8WpFFlqXB6n15fK0ntanjy/7vq/ryna2omYusvguMPJerpJSYk+6c0pLS+oau7FacwVqeYrGWgalJDlnNr5s9eHh4d2PFyK6vPiSvsvyyV/91SeXF7Gkp9NJhOAGU4s6ndNKMaQFQATOmAgCJlfDMQE+jK84vaMIaD41WRNagNgcIEQ9PTy+CA64JBLOzVcMfConImEmRDLdY/gbebTMmC9rJhHpg5dGQu8IjLnD/Q68oRm1BCIyDcwFAhHUSdjMrNbhXLU7qPsE7BkxwNAsI3Kh7u5qMBY4nBKpqSxZRKqqM9W9LsvSQJjNBxwV4CTCQm2ey1TysO97BGS71SvSJ2A3bgECPi2eYN+vzNy6nQP5vCigvOYwjdvexNBMMHpuX1XRS97AVNTyssRZyLLart9+/f7Xv/zq5b0/LH+2y+Xj85WALFb9iWh3FWKYV/cqyR3FzVJKq6RTkofM5+RvH+zzN/tnb+W81p89fp5SOq9LSimxsCDSeARfUiZyLcVdE1kodEsLAywU3TRh56dM6+rrQvuOUPefutdqtda0JhE7ZaxrZmbXUTgm6qbVd7W0V1l0fTyVRa5b/f7Dj2sqp0/O6+qGfeP9mZ6f7PnlUn7YX95BPzpfsRRaK1F9eG+GjJxlDXvJFZb8w34hYuYzuYqcdHv+9qtf/vbX/+2/+W/+FQzYFesJLtteluWxsxbQXUXuP/8vLyK16Qb/k29r9XQ9G1cBdyiROypQAQNVwKileq3DMURvCAHOEMAICaRNihNHmoLI2dsomihohySB1lqYe1YfGsq3VZ21nqomu82UGhipB2hNEymuQfT1ehn6lFpsujXL9BxYqOx27mtaVLWUOuR/2ep+3XJM0e0CIoSGsEiWaQ4Tj8QMNyBTQwOntzDIU3xrrcfXx4pDOg8PGDCRZVkW1cuozwmeFJFREGWB5x8BS+5BLDPrcwujqSmzmB3B/nGflFK0C0piTjKWNPSx91wNQaz3O7l7sQO81czwqpMS1hqhWDilFNMIh5efuM0B620t0p8OLMRCxOg9Sj4AJ9umiwg1RhWRqM05YgkjWzJdQ7vLVC6C9jOIjv6B0UswLA83q6WF1rh3XNRaw3Ucjr514JzAsVyWJfIVWVKcRQC1uXvkmsZqHdoj66I1jWBGj1gMBWnRApCWDPRymh67ct+Z4arVqjmn3txoVYuVsbzYcCIPCOt4ccDYhtNIjlprVcqSADRQQvckKeIfROQ5ZzRbKiWpXXP2vA265WHz43hTzELkgVhevYVnepLHRXsCodWyHdCd8xEDMf0cTmxTEBRTptRH1DlCMnfNuv09OecBl6L9ggYXYHCZKoZu+Gl5GZlEgpvG4FKDm5uwMDM5YtAgRzvo6CHstUgYQWgcVdkz1/s0YtSmcoVAGyI6iCrocNQBzrwzL9jdBwKNI/rEWj40qL8xeB8EZDEUKLEwL5LGVtAtNI5q6SO7LFq959NBrygLA4zMRQjMQg4ztV0S3r59+8/+7JNPP/308XxqvWeGWivnlr0JwRXd3rE9nPl0Om3bFpuw7/vl6WXft/fv3z9dLutyfnjzBuByrdv1ip5+Z5YkRIRaLT7LzG/evPnw/LRfLgCqtYTnKfGHDx+uzy/n8/Pj+fF0Oj2cTu0sGFFx8PbtW4Ks6zmv33/48PTy8hKE2VIizDnnZVn+6q8+f/PmTc5rlBicz+fzef3ss0/MLK8nSeu3f3jHDLWCuhWluSVhbPigzHG+Q5WoKozRCsjvO+LGcXiHkzlescgnxLz7ABMlEREOw6jnFhCgUF6pp2VElmV5sj2grUIdhirRmH3XTt/ctQd2aVkWp55OBMIhpF6r7xOU4iD7QcmDIzANIB4cNKQ9psqcO40wS/i77b3jEe7pnZ/8OPVQ5viiy1Zm1raOVaCqgZKmqvteQ9g+PT1tsgDb99/9+NVX3/zmy9999dVX79+/F8mqPhVZmCTKObn75kf6aF2Xh4cHd49wT1k1brvvFUDOy7qukbydjY14qMvlEq/EaFwAtdZ9363avtW9bMxMtDBzKW5m2/YjETEs58YUz8/Pj4+P//xf/vn5fAaRVheRlBmuKAWtdOrAYHB3MswnNW+s6n0faaOubrEcXtYc4B/o8/3pCPn1abanvn2FogyeabinwyccwZcblTHlE7yXWjS6iiH1Ig2IYRSOAmk20XvOFDju3x/Wg0ytapfbx7yTmXrvcua1Vu714SNKYr2IZmjeu8TgLJLjK/ogn6N+LRRxZMK7yTvaRrjLcHf4LKTIvGjTPuV6/fGHj999993z83Mp5Xq9BqBXEFv1sQNNv6QocQqflpETn5b8cJI3D8ubx/V8Xk5LZubcNBsPSRitTrEAEWnLdaSUSi+IuyO8IUmG8AzVT4mYeU05bGztnkAMQ4r3pywLluD4sm+n0ymv68PDY1oyEZWHh8c3Dw8fH172XT5e6MPmz9WV2ZfMyX0PqIhBdU0+7EqO02lZloWZ9628e/fuV7/61f/9hx/PX/wZBLbvvDysy+qwvVgEsP4zXf/rgEn/Z2HZBIA7vXJUQwc1EgyMQXdz7XOJyd1clbshp6rScW6ZGbdNAWa98aAZ1e4WU8raK3MW0aec4fh1NjXna1RuD75zd4CHXEXXOG0Z3VTrz9hou3fhHXU6wSMpZakFoDBlulHFbtX6fa3LL0+Ziw4V3tusmftw8942Nrl51p9w8HykOLWD0MzhTAChWhLdM0y0EqgfxbjBVyYe+PVD+sRd44DNbLA39zQrM+fEa1qZYcruDdtgEtlBW+SuiVdp2LJhjYbHD6OpVYCo1upq0hrujj4QM0NML5mk//A6ZmAj73QJtOEh7Sxa5pqYmCj0hbs7QXJaclpBpqp7Lb3kj8YBBR2HX5RzTizuXkrZto2IvBf6cq99ZWZYFWIhFhEIstCSeBT5eI9paSnRRp/91EWhSEIYf+PZI/4XVaMwN6+05nBIpI8OH4fiVUtp7VghhqxxUWSswvhfK+1WKyeB+a5VAwV3WVLy8Th2mGLoT0dRJDuG8zVF5SU6Cc3M6fjGQcwaberDVZk+a1NlZkppNyVmnyyGetsjcSMCJkkUyxq0QdSSt43vwioAyCEgcgQOJPdGoHaDV7G40I7hHY9vYBy1cYNJYxTdwBNur8f3SmQnbuANqT1RDL/mmA0OGMii4IJv0auGQTCuO+7GLTZj364Qt4ebcZgQh2DTGOA4fwURJWKZvNZgiGmfezQKRAHT2qrDhMldFWQifnp4+PyT81/+xed/+c8+SwnLkq8FQdW11ki2nE/nqG3b2/h45JzX9Ry4ozGXcHu5fPjw4Xq9llIC2xNgVb/u+1aLW6sHW5a0ZHG3qnuDSAWvD6e8b9frtZqiKLCldVn9XEop123fdzcD+Zpy+KXh/MSInM8+++x8fjyfzz+8ez8wV2N4wGjVywvWdQX4dDqVUt6cT8Se19XM1vOJeDmdFhuQV3Z/iN0gGxWY906+qpKDEQUqrqqeDo2Aob3aUCJ1d+FEFOoKklkA1QZ7Fk6sCFUNyWoD+NuseX1GYOaYMl/dqmpRraZBo/OyrZVOtMGtdhgisTBmgdabqOUgp6Fi7yiZp97meZeiYGFW/3diYf6U31rMgykASLqXJzMvvz6dWOdoDEOPvcIp5ojsqqq67eXHH3/86qtvvn/Zmfn5+eWbr7/7+us//PD9u8tlWxZKCaoaY0KizpnolHO+7HGDSE21Z49S7cD7vVy26/Vaqy5LcXfUZA2jTumYPsVRUTLCRuFJurvV7Xq97vuecsMZqpViOmtYRHy5qJq7v7y8PD4+Wip/9Vf2ySdvzQxOkiXsMfgIbYdE5Abl6Xy3e2PPh9i5OYJBusN9cXf3wG8DEbpN9vpc4tgnPUIUZsAErdRyg6GPpiCXTSWa487uzjE/cDJG0c3NIe0CaI3ppm5tXnyUpN0u28bbuq/CNqm2mcZmeuaeQqDJqO2v4+5T+BOXu0vM45oaehvP3imOfsOxqWPHqAl0qVWJCCK11nfv3r179y4AmSMOIpIIXmp115QSuSP0oGpglyamVfjN+fR4Wj89r29WfnPm05IX5kCX+cln4dZsGbKdrLfDRMx5OITzIwflD5COEAJR0tNijual00/QRzj7zBnDYnxkESJJaVkMUK0pyZIkpbTu1Xnd9PlZX16eyx5dTQ2/8GgxjYSYJNYScZwMwMyenp5+/vOf//t//+//H//P/xfyAquAOVhNl5zxvyYZ+J/hupOEAzm8gcINln/dN9g+1bNzRB1/MVJ5bl7NWvs7JpNvCCjv7Ul8CzA2LiJiljDeZ7RPAEAbviCTvpgVzXyh00O/J4+Cz+5GdaSoVsw1WrQOuErMO3BEJO9D5Cnn7AZWI2pNBa2ykcNOjMc4ovIpJaKjhnW0Loy9oLFB3NBH45UOa3YTS8bIhBD3XGUVEVXG7UTgod6GSo6T4BzFlh7dIEQUAzNwyEeaF1Y1WIncnYkTcYlsWyBvsaNDPsebmFfmCEAB5m1YeRtEXt1izkELeeachcS71xo7FlMt45B6tuM4Ajefo4zwQyi3jZrezcwU22I6HDlT1+puZK6TyG6tEY4GKa6lbtpSi2Y1REzD0nQn97Brw22a99l75Bu9Ymrf97K3eZWlbMycOKdESTJnZk5EVLdSmQcOLWL4KfWEOCem1lsYDxuRdW7Tbo+G1zUvAMKLU7eA1qxmGRKt5Ga2eVH4ajmlVKMZTwSuEUUzMyJvsWF3Qe7Ub+5atUbTFOecJbn76BpthkuMvARUc6SvJ9ICbtF74w/aESzcXd2oR0x9Gt93CIY4WeJonnmp6lXhzsQCyiy7iIgUbZGYIIzEAqYMZj5qxAe5u3vLBPbPEBGoBRoGd5B5K+MZCszRUocA9V7W5mObDaOE6Ygg2DRIirpDLhPO/vj4vC3jou6fHs/W9br7vWSMv3HIE+IaBF/V+iyv8OepMZG5amBqdGwhU62KRpOmcLLsAmYiCBMRLe6FTNjevnn42Rdvfvb5p599elqW5Xp9EmpDVvZ9r1UXEbfWJlqKXq6X6/W6nE45Zwa9PF1K3aqIiFwul6enJ5g/nM8Pn3x6vV6fnp6u1z3kFvekDRGltDAjaQJV5hTC/ZNPPltOa5jFClXVbdvevn2jewmq214uhS9Lzuu6aszcQxFJKS0p8SeffLIsC0B7rURyPp+jpi6I/HSWnFcze1hPqrqu616uCuUY6kgcnYQpSU5ZvQZk+aDhQ45NHs5QFt6askZC+CYcfn+0bWYgTfRGKdyyVkcgIdaYhDnMbqHeZQhAY0xprUUrETm39rtOakHqR6WD90AhEWot1gtDQEbUcVZv9dp4cJ4ydbNRknuMZuzAUOTHSqZden2feH8UgMzfOG6Ln7oGG2JS+eMOMkLSw7ZmqdWqqTtVtffv3//6H37zt3/7t++LE6QUff/+4/c/vnvZrgpX9xoOWyIR4YJr4er68PBAPaKnGqxxjYhJqJWcc0AD1Np6BB7yJ8MhHFsUAeXxgFFM1CtEdN9rrdVd4KWmSkQMLMsScLRGvFf78PRMxB8+fNy5mML/OZ0fVkYClN2oY7vAPaiQrM94nUyFeZ8PQ62HZVtjGB+5lHHN75xfBGDHdAebEgOB9csIJmcDERjkfTTadOjjTGcCGP7bndJpvBnlbURg8q7LpLPZuOFBjTiS8/27GnbijFI4SLG9GUyvKJOoj5HpRuedmTt+CMosE3jPeEwAPa46daQjehTbI8/vV9Oo0/IG8T2dSHAQAHetvm3b5XJ9uV6u1+hY4fr/5e3PmiRJkvRAkA8RUTUz94g8qrqb0I2mJZpZGlrMy+w+7q8H4ReAaHdoMIO+0VVZR2ZkRLibmaqIMPM+sIiomHsWBsCgVynIw9zcTA8Rvo+P1VrZIaC2kgHvLRcEDAQp8LrweQ2XNV7WeF75vNApcgrIdCBCoYGheJUUqFGMWosZcQi+Rd7MH49E4hsrwkEVcHpeECFgmrGyBzqUmYGCmgAG7OV1znExMVJAjqWU0pfoTKtBOZ30dK7nvV532LKoKJPTjJJRU4EioppSupcWYvPogKr+7ne/+7f/9t9+//33//P/8/9FxCBiPuF2UAj8/+fQ6SfYlCUC8N188L5s8lW9JhN77G8wP+KwlQzAbMzGayd9EOPgZow+Bl9crzWjpU2oN+8KBqTWhkxeNYoAXofYAjpEYMajt3A6BoE17fCYrujvNxfRRh1+CyiMsoLjEVzSemXciPIMnR4APG5bzMBATLvlx73gx4yAiRBQzYSnNspxAEApZfh+zcSBAyZh6PjW8V/q2muc5lUet6uq3O0G1xAwSRZfiwYD1ecC4TRx2AWWThk8P5hZlE3qVoUBYU0uZrTdpyA2H1jVcx2uNTwFUb1OEg0IDPTYDwIgMEBgbIOG3+zoAPYYgaxObFZNSSeas5YNO257uIjUZDERIQdQrzLPZiZSEFGbgB7hNM9cRT+JD3vwjmuHyAshnJaFiEopqpUZlyVaHY2wfitti58vZ+ghAAez8cd/2fc5UNhFHoeVABZV1T5W22c23mr28yhRb+A3ANr33bOC1FqMskcTNjNXzQM2zcVTVQiRGJ3uW5G0IlQty7KEEGo1Kd5BAWYO+ipIKfa288HD1E0/x/4BFQAPkbRNOj5zeEdHEMT9HyhOjc24dwdz8xB438qGQ+Mxe2a2w+ywKenV6RZHGgd7vg4ARjkxIY6VfxB/c83qRIz+ORTr6P/HrFXswY5BnGNNtHGWx4xbthBbxhoAzUABjdjtoV+4n+Efaq/oG/djZgSdx9qtoI9M1+noMlHGAlEv7iUzMGnXU/MstFnwRKhW0VqUWLU6Z6uqj1IIIRBV9zm5Q4whKII8neLlsv7q2+dvPj6dVhKp1+sOWjPCXrKI5FKraVRExC9fvtzvd2bOpagqx9Y9a2aEwRRy3q7X637fUkqn0wmZPMFSijCFEIIqWHH5BgAQY0wpurCtVZHK6XR6+nC53+9fX19dxe/7/qtffV/Mx6+BI22o1m2T5fJERG6F7/cbEAPQsizMnKoQ0eVyOV0uxOg5TER5ejovMQZOr6+vxBCBz3GtJhwCAK1rQjIGdiClAel+0NgvRUYHKY5f/cOdlToO0xQWHGoJgADIB9wjcqdh7JBCRmQunBFRwAhVEcIUYFLVEQe2FtdQRAQmqPouHS1m7BrFadIdQmym7QPj42MJ/Ru+g6m1byjs8aT6LkMIj47czDjYI8H0mFoZcMa/uNTzc/mvVTzm8uArAmAIsJcaQjLAzy9f//4f/9Pf/cM//vZ3v8fzt9v2cr/tX7++/Pz5y7YX5qhgxKSqoiBakEyMeGcOQY1qrblsIuLdM2ZWq8N1OLBWOJ3OAJj3KlWtwatNMAkAALCua9uM7iiOuxUVMBQx1YLZiChEMsSonBKHFIhCKfL56ysAFBIGFrG/+Is/+/67b8BIRMhgCD2ftTfR7bH4M0n75DqzKXOCPSkJAH1zB1V7IHsQAB55Ax37OXNCy8k48hy2bkPA7nB2kTc2mvq1xuKg2zPeNPEYjpld1oPv/OvduxhnMzOEI4M30w8hIowsLo6H6uXTD8Rsw1PF41axAyw1XpwgDMc9zBw0fh2XOD7jz/VYSjdWdOKaQywBIGg11WqmOV+v1+v1/vXL608/fnq9b6ARgUrJVQUZzDTvRTEAqoEQWmRKDOfElyVGtESWCNaAp8jnJawLRiZ/VgKvPEI0MDsG2PS7YhwTgtF66MAQRwscAgDzoMk2apXQYogVauscMQEAYgBAVQJQM0JAQyTqilUNQqTAxIEDxsRmlnMWBTE4rfbhoruGHe/yusFePVJgAIjGSD64GWp1Oiml5MyDhF5eXv72P/7v/+7f/bsP33z7r/+H/9GDdwCgUslLF5xW/anhv+34r60XddoTQJ2rRs0E0B7fOXzFnkgUNIfdFgLEjkMLZoAKvTVslB/SSM2JcIxW6qz4yOH6RY+QdvthAIgQAAQBzAh6ygq6N4udd6aMzqEv/JO+C9vuvbWNj6jNhoheTzE4yHr+0FFGH5i6lTTm8ZnxRexDh7zyExndd8QQfBWqgYCha9NmCksdAYOj6g+1lyoRwSFZVLX0tmYAYGbVt3FTPy33tuaU0pA+MLnC48Mz8yM6Uou3Ox5Z1LZDBuDVMsBqFc0IIIUopUrZts2YYAlMjkoh2jfP783cGFarCEpoTECTpANQRjAg330v1jKfkNHjyoOR4IhSHRszv27P4gV+HXXz+K4BNCsHCQiM/H+1xreI3jRCSBamC6kaoAZi7GXuDTQBMRDFGENwo5pEOBAToIG6Re5XrFK9y5km5FwCS4FafT9x73SralJEKiICxxg93uX5Gd9GM0CfbGbO+TwYQLWqhrG8Axxp3/d56FyKsTWjS0Viim6DsKpUEQNQq8uy+K0qGKH54njeWFUVlWEq/cJe3CtCPbpDFABKr4FsRiGOINAUyDnU7VRCZt6/ER8aCKX9bBwYY1QQKsdS+yIGagBCeOg1a5wPLVUPZiYqqCB1GKzNeumC2PrvXhdLRP43xOPE4GVR0IPMfiFAr9tqrNcDz7M06eLugc4H27z56/jiHJoZrz1j08piyXwQ44jCBKTuIXZYYFEtVZgIMDISeALpaPECNWzeMrgEQ2Ds0jwSKmJKmJLFUKu09ldUBTAwlXpdL9/+2fcfv/n4FNi05orKjJFJVZ0XDDCgIwPDz58++ZhsRCTmgMFT6MuypJS01tttq7kw8xJjCuGH3//BDGOMKa2moAqo4s/GrQ4BQghIpgVrrS/7i4iczgszXy4XVfUg8Xa95ZxNKwE+X05rWnLe7ve7bZRScigg0QoizNH7pq2V9/OaOMZYA5fAudzYWZIlRAf7AQCLMToq/em8EKGIVisixbPoMB0uabjPE2tK1A6QwDfMMr44k+JBXcDjdUOtMERwDGGo1ZUcNwFvqOroF+QhGE6RAptDWEFr01Jovv6sLNWhNbp/6/jP7YnaZ8zAVNrNDAaZ1fasjPxFlTpez0/tOMkz5fufRhT1/WqMpZsX7Y1/+P61PTqrzPy2C90AAESMKHBIr/f7D7/9/d/+/T/88cdPgPz6cr/dbjnXWn0ETwCAnOv5fEZsuSZv3q9Vbrfb9bY3a4khhMCtwjzkXM2KLywiLukU2OstwNANXp99YmiEphR4LC8Q+tBLUenFSYAIBhUAEiMRCJrXvSIFBC7ZSi4Gsv2w317uXz6/XK93NPrm2w8BfTQLgE9e975B9LGrLTIxpPeR5ZsAjcfGHVsyefuzfTbTgx+ij8W96Aa8mZl1HXfod7PA3eH3fRpi2TMJANgjCC4Tj52lOYXYS1EM5vuB+U4ejzeEN45pER66HsaLN4zwSGkPWsNpfKbP2Zzz235zhofzeIxHm6I5+GVqGUSyGTvSOjQaAJRSvn79+uOPP/7hxz/+/POXUiyEaIRiKqqBW0zNyFPVFIjXGNZETyk+n9Lzsjwvy/M5PS18ipgiJiIECRyZgLwmu2tY6MY69SkdQ5+OuxubOEAfRkBhLC9OFq87bUcELaCqkgNuAaARBGD2OVktd2tGzKiqimQogSgSLimdFjgtcsuSq8rAj/CTh3YPpZQQgmNBExEThpDu9+uXL1/+4//+v3368f/9r//H/7vHRbyd5k/R1b/A8ZAefPf67a9zehB6lgh6GhzN2tA4EFQXDKCqNprIJp3VXL6G9nk0oI0dR0QAbQnhcQPWJstPRhYeKCcTK80RwzdaZrw4COkdo83UMlgPHv1MbKJH5oKCcSoiCm6sExEAOaiD/80bzJhZTQBArRIEIqhFvRNxeCxEJNMWDCPAh1yPOjH/6YWjkR6mG4/lEJGeheidVD0H6lUlVY9yUGx6uo6VMvfsoanAEZpDRFMUM6kGZAEJQojMDAhoKqq9EpeIDMRxBJAIyfq4ql6Bg4YNBQi9I19EQM0zEjUXswgjTToWpEMmqKq3VfYb65hErWXbhV2jjKNqZdARoueg2mtga86KleLalwSQW7IGAIGxeKLJl6FtBEBKAT3FasCEYKgmuewrg+ccqBUt8JKCV6kN+nMy8z1aT0utNWdDZzZ10I+KfUQCtGHrAGiqmpZYi3cKuW8KZkAYaq37fmdEwtZkMghs7LjTgCHU7FnHUGqNITjfimoWCdRILuesWpkTMoHWSFFVnR2kwxERo9aHMMwgRW0e3KF63lP4zGad02woPF9AL+SoKmmGcetlPO0SagRt0PxRh2kwMm6exB732e5HrbYOKL89m2/Pua/Zti391ZxA7P0kNj2R38YYWjg0t4GY5wGtIiIxDEjJvmg+Y8pbbCtwxG48UU/0uRE/W71DbM2iqz2dTwIkQHV0F2JspYJHcbyol2QQEiMYGXS0QIfudu8Tmh1jRBgQkIGUkIj52EFPziIomiHYmuhp5XUJjGC1mFWKEDkAWCm17plCXE+rG8qvtxtqm9xFROwPqKa1ChGlJKI1lxTCeV0jBzQwkXVd1+WMyKWUbdsrYAzDjBCrQjFETorEzLLLz59/+vwFP3z48PT0ZIiAGom37e6UIGXftlsKnCKbxi1nUCXKYIRAFIK3q0jNtVQzY8aYEHAFAGL4eP4YQsg5g4bI4XJaN4ZcNgMFRQp0Op2ISKQx4ExjE80fenFWMI2KTEFEkYGP8P+swJwZOsRROydzQADvLSQisgDgE1a81ZmrFhcRnj8SUu1E3TkL1O+EpiZ2R2vWahY9lQEGHmgZxtwIqownHaw6xPjsA8yPbFObPnQFRET7vg8WsKmnfbjQY7mGnNFp2u2xpPPA8skQP1yayYAAgMABe5VRvzqoV+8A3nP+3Q9/+Nu//4ff/PZ397yHkD5/edm2jSgghZAWZCEMpRRRRQJDEFOfy5xrue/3+616yQlzIiIA7K0ExStFETHFdVmWdV0BKG+CqNYyfv4IjMiNm9EDiEhEIqVWxT4HbwSUFEgACckUTbFWqfUYsXt/vZUsOdd9z/s9//Vf/9Wf/eq7p6eVfbaooXneqYtDnf0raPaZmTm+1Ruxj/3ziEff8vj1zRZ46o/6RCuDjsiErfBxPNe8a9JaJ2jQwOGU9uqy47sTJ46rm5n1AhY7ClabsJ0fZxw46Y4ujz2RAuMMx0lG1VXHW3pYQICRCp3vzTyuM2nz2ZFut4EP1xosMGsN+yXHFXsy1cwMBQBNCRENwYuBmEPler3ef/jt73/88dOWqynXqqreGq0iVUGYUVQAkRmXGM5rOKfwlMLzmj5eTk8pfFjTeaGFMYKCVICKS8NTBO00Y27cSmCHPzfr/aieKhoZwr41Pt0RugY3d/HehH4oAGIQERA1M2VwhHbxYCZ4MxeSSrMx1JxtPfiFCujj2sDQBE0YLDIykGozggCUrLWi1Vovl0spuyowt2IiM5SaP3369PPPP4MpIKMJAMfA8F8I6vLf5/BL9SwfYk8Pep4dWgdsl+rQOFqhE4n/pDHry8BxFsgdyP5JBrThv4zNsZZIHOZff9t4aqCz40LNIWzhKKdn66DTjyGaB1If15pcvrlXEPpOzWgg8OhAThw9Umg6Zj7REQ5wYxtDv56aZ1g7OmXy0EWM1dqbxA8jdMcDWGtCPdoA5scbZG2TB2y9hKFtr6rPcnBsYiIijuPzzOyY2tAEhO8qMR2PTUfbkjFgD1oQdKRjVUUAFbFaEPG0rMsSUyDTUkRqKRM9eb+vuik5wmM9HueKoTv9cKgr14IpLcB9eKunZ/o0yf7JWcG7hCePnjm2kZriVGgBAEYIchgHbm0y92AAuCeJrRkTURx9gQP3g1rJa8v9KSg6YlUtHUvGai01l2y7L2YIDSuImb3Z0O+lu3l9zB0zgaEF6Y6qWYvCqqpIIcaUFmbWSqUUJQZ2e2hIfiSiZz55k3cIBBa87g4RFcE6SGAb9WZ127bIobWbJouRiUhEpFZK7Mnqfd8Rzee5iUJKqdY6umEFTB2JvLMZAiwhqmqRByXExMhM07iqN8Q8qH3wXlsiAxHBwJqLG0nBW8MRxTyjPnzmRsOBuE5JjHFvzCy5tHDBKFJF85rbXzza4r4L3Q1and/xrJr+0uPMr8eawGQY+R1at5uxNwxrBxaGrv7nr49VcuOGkEBtrhYxMzgWWUDaPUQOXicMZmgGQQEcxwkAABWhQ2g0VtW2IyKCgtVARXJpRTidJZHMXJb+2fcfL5eVSbVuYBXJUL29ueFVPp/Oz5cnRH69Xe+v18u6llISBxEREKnZumsdOSgUNEkhnk4nEy17/vX338W4xBhVYd+JAJVbfK0Vb7Rec0xpBYDT0+nlH16+fv0aiD0PaSaWaF3XdVlU636/3l6vMdCHp+cPHz7A9dZWXjGEECgxkZlSTP7EptVEwYfZMJlK4IQxqcrtdjUTJAghKAAAxZTO53MIlIsAIAfWd6WSYzdh8mHGvqu3DqjXAhxQW0N0H9RFw2NB88mintaf8jB+hBDEQLSqh9UVpCfYRaTWUoq7jr24eaCDtLpBebyBhmcDDAQBABAYUFw4h8eRpLNGw24H4+TRSS81f8+PNIFCj4CXm3EHzU+dDkPgjMUEgNpg+uHNJbyM4oF9nLu7BdnjKVxNpdp6vmz3/Pnz53/659/8829++PnriwEyK3AQQIeg23JmjutpTaeViJjRzKrkEGhZFpFyv9/hVolCCCnGSEhVcsuimzW4cyMEjnHxkU5Sms0wzBdq81RleMvQcxTbti1pLVXUhIEjR8fsAaOSBRGtVCggeZc+9hMCpXRG4D/84UePGwakGH99ogAAgIYGwB4+c/f1cMAeqfGtn/MgBol6PLqlX2fh7x9uhXP0Tgj7xjFTbVBJXgVIrdm+p/6IoE+pbjE1AOzm4GC3WTLPF+GUQNW6qwyd8ZjZ7wenutyZwGaKRQCZoMvHtRomyiPewdALhEfUAzrExb7vzDiffNhs0z3YWPC+nEcaDSgAkXdnWXfj/RM+KNz1R38GsVb6R2LGKQWM27b99NOnl5crIle1WrIqqKlaraUoVg4IaoQQCGOwJfEa6ZT4FCFojshLwIUpRUg05heIYxYZkRkwYreSGwt3jyIgmoh4oc8bZn+zCzb52ENMNQGjVr3hxsDI0BDaJAMiNI5spSYgI4Yq/nUOAQGqmCHvYikExDYYbUmxqPcMlgZa2uHWnFT2Xfd9NwvMHFoRH15fv/7wm//09ccfP/z6z4Ad+0eR/qQp8t/1GNpn5L71Xd77/8QzdeJk7AoCDQBZvQjBUNW9NAYExJpzs5A9czggox5ZT6cyxjd8BFPQZJQuEbfYkD6OgYURYAohvIsJmlmpLQE2ugmGKpkv15Xsg3PYU9BlNt4AYKCuqGpw4F1ElCpVbXC+jymvWVNKalhKiemEiKPkdL52rZWYvXQKADowjLiUH2YidJORmYex6M/stj4R5byJSCkQUvQpFN5c7mBH4wwiYhVTSqXsbuOKFm/QckfIb8PLW5goMLtUXdcktWGwtGVCcu8EVAIRxwgAPibLn8I90n1vF8LWdqvbtnFH+0HEWmvDaKEmxB3bIHHkELT1lXGMaduze925apu/1AjZ3O5FRGQyPAjCm6aYWYp0VXpQnnmZGUaPBjUXmllUJWeiNgzQzLRWZIwxRg732zXGuKZl3+/7vvuKrdRwPk+nU0AqtdxrdT/T8f38iiNnm4FNq6CJiamCUQwhLMnMlhT3fX95+XJ7fUkpPT2fn57Pm2hKraxOxOtcTKqs6yoipeh+30IIKTLEqKpIdL/fRcQtnoZOTpBrcbyZfd9LIcdOjHGxei+luHFQyv76+urDwbu3c/gwRGwG6huXmJBqbeiOHnZyohUAN6SNsFHCNJbdsyu+uSml0s3BnHOgEELYJVMMLcv7WEzii+k3U0QGR0DHqt62LaXkbzLziOtA95eGUJjUBkGv1jNHNu8ZZqlVwCIGt9UGJyL2YSePpo/fpPRp9Q57bT3nP7cCDhN/vp9hHiGiz9Kw7hY6Ty3LknMWgICeJirkZf/MAEAGaVlEa4yx1szMqKalZsgpBaJQ84YGMcQQgkkFZFVNS/QijzWGV6IQwrquIfLLNZdSliUGJAFKiQBul8vp9fWmUpe03LciRf7sz3+1RPj4fLrevmaADx+flhBi4vv9FjnkbX96ejqfT7fXV1NUhCWFyGFNS605rmldV4cyX9c1cZCy5/1uoo6aCWaJgxGeUowx1qIUQlixcMk5L2nxJDwz1rwXhHS5xBiXlfY//3UKBFqvL189wFEsPz1fzuezllz2O6iUPb/CS4zxvF62bfMpntxkr4QQa9kDgZS673eRE8AiWs3ssl5E5OXl9Xw+xxjVhIxqlcvlct93RNu2OxGZSYxRpAFI6rvklb85T8MDLwmhIOWYUPL6+rp+ix6vcTORiCS3vmK3zkMIiLmUAkYKWEqFNiAAh6HpROWi0qTdRg8pwrIsOVnJpKql5FKStubn3QRyzgbCgQYxS4UUo5hVqdZqB8xbCeaSzsEdQy/QNMXBF8SDSm/0o0vUuRhshOpsApiZPYoY41jn4TvZu6DMOOGbDOQwLEAO4DdErGZSTURuP30G5n/8p3/+//6v/9sPf/jjtmeggFX3DZfUZp9+/PjRZ1QCwOm0+ECI+3Z9eXnxoujLhU7pmYhqzd6IXqt6e3nOmZkRHNwqiEgpEuNCrKK653y7381sWRYkElXNDp7VEAT80ZZ13bZ9iQkg+I6jp1k4xMhlz/tWkGxYCGeD5+fTvuV830op220HoCUk5viX/+rPiBCIAQ3VgNssxPdRbF/qKnnQG0w+WFwWULXJAkMiIOKU3PgiZk99el+X7Ptxfn/fUQZ6vE9VUY6YIIyUgUgHVjmc0mGPjhgccUMO93ieX4gBtRSABjDWRL3bNiqOg+rY361s1RoLH49fm1LzC+WcEXmE+XyPOshiW6Xxonab1W/GO+f9PMPtB9eqIhS4oRswtbYDwpBSyRmJsDXbsAEgGKgAExz4pS2HYR6XGS742E1ECjHVClVeX19fX2+1KFOIEUupiFCqbPtNQUJARKy1EIYUKDCkYCvzZYlPp/R8Wp7X5bJ4WZVYEQ2IJGZK0ZtmKxEtrZ/FVKtI63ZJfYYKIhLxwNoYOOrae6+ayu5FpGP1VNVExVrZC5FH/6GaIUHkmBKKNUA1ADWAUmqpDsRFzTzedkBKIaZUI4fQy2zLvlMIMUbqVWwMFmP0cA9R0JprrSklqeqG6Jcvn//u7/7OU2b1voXT2Yej/Qt5hD3kMt7Q+UXfdXXvvGUFzZAOkCTnLfAaSOT+XU8dCigAChCBKpi5d4TaSqoGXzCgVxObqoGpiKsqmOIyIsLdlBqxJVXwljoaBZVitUoIAdKCIo5yIR1FzIVnmFD9B2Fgm/H74DpOC3W0Mwzz1ee99YYaHTGa5g5YFZXB+IgYXNcQAZEyHopqeJ+ghhSGTmJuwG5DVIUQkrWpsoheFSWqWj2CG+MsMqDHe7wJcn4SFxBztgHGZnZ1qKruPDNFIxQRn2isBgwMPTlADpRSfTbfwWkhBGm9jp4yRnPZCgHVR7UD9uGUQKRwADrDZHYDckgxxggdZJ8pMHlbkRrg2MXZLEb0lRvICr3sxNenQd8DAIx8TTXxKQiKGA1VFX3q6UQEvrCl1hRjCGQWrbfmA8Dz5VJrdTQf7PWFjARoa1qYMedtv98BIKUQKLDYui4xBSJCaAmZbBsiSsllVzcNI3vtgUrd1xQi4wa5lGIGJuWW8/PzcwhhXdPpHPd9zznf79evXz8/f/MtIjJa6qF3qWZaA5NyEK5FrdYKal7nat0OeKD7at5VRX2p21Bjs/MpbDnf932J0QH2iRzCNDXjTxjxwBF13nL9bm1Omg3KbCvcO0vNJJcMADFGNnVDpNb6JjzmW93sG+n1A9iYZRjTaOYzJJpB+Ti1yaM77nkS0W7q3QB+2574VtWe0H57NIYdKXlnImyNO48kbdacyOOR359whHUGO89SSfsxCH58wB4PnQ5fWwRoRZ7mAtpUq2Jw1FwHPW6nAqOODWhmpj1cTXi73ZoohAOXQlUddKE9gkCBqkYijm7VwOtNCpmlhZdEpzXUsoHq+fl8XpJPgCC0KjlEcg9w123fdiI6peXD5XK/35loXVYVvV9vgHo+fQSwsmdQu6Q1Rm9RocRBUU9M5/Vki20bbdu+oy68iojFtrAighADYwikUtbIl2XdS67bDqrLsgSiumc46bIsz5enfb+XUkLgy+UyS2YAyNtuZsxBwdZ1Xde11vr188/b7frNN9/86le/ckdiifG8rjticVi5Xtaec962beyjGzSDooZCMoOhHYZP0ij5gOEw63NxAMC9ixBCCBqCgzSJ+nAIE6/8QfzlYSqqrZIQwKuXkcwAdDQE+hqqghNHP4AAs9gosEE0AGWOTKyihtjzIn4VU4Uxfu0NMQ9DYShgeNTfgzWGbp7197R0x5ln1hu/jo+9Z8yZ8T1eNt4Zp40xjVs1B10yMCQM+I//9Jv/8H/8xx/+8Pttz2JYS922LdfkPdgpJccicvt+33c369UqtXJsG5Me+5orAARORYv3aJhpB7JrrsLwct3q9fOLiFdwzKTlj+NZ8VKK1OoilAk9d+EotpKl5iIihEGq3W4bABGoiKDBp0+f/+5v/+H19VXrv/nu+w8fvv0IAcEEtKKhWKtKGMt1rC0dyz6vtslhP7UPqCLAnLOynoIDVaJZODceADjkp5lNIBRARtDzD2Ot8E/PJTsojR5810F4IwsHANDqlbyf8rCyZ16mqe2NiBz0wgOYg7VV1dHphy56//X5tNbM2alye7o09USxG6zWG9fHs7eUoyoi+p/ArRjsL9vDqZkpHg0+QNAgOrtcLVKzaC62lariBiR5Vo8YQInNGIEJFsZT4vPC5xSWiAsDm5EqmAAjEcXEDFbcMSBigg6XgDQNL5UpCoyIrrOGhBx2NR0VcIeQAQBEJkAjQjJvcRqrF7yMDTzTZWho7yBkfcCyF1yI6L7vefPEBkQmNdsIHK4NzJCaX0oGPprIGgwyiYiimsL99rqk9fPPP/3N//Ef/pfvvg+r52laK+4vkuh/l6O3Ao6f2t+R6f1xuP4yAAMcH3BmNlTvLGvGj0OMqUhDdjlyRQZTkfObK1Cr8gV43C8OwdqwpUmUhYiefpzkc601wDEjdLBM81MmcLKZoXxk8Uwnbkm5fBgqYAiuqfcKZsKrtbqS9csNrgwi5lkRVRVtnpiqAjS/AhAJlFq1j7d/6JQZM6/uGDnT+Rm6THmoxfcL+5AimSArutRz7dWc2sEz4zm9jCdwrKaeOIKuDlWV6fBm/UKqimooyMwBQnEEHRv1BoiIAUlATEFBrLQ0iylKtTbxdOw6APQ98xpuczgiJiBU82L2jpQDR98U9bL7sSzkxesDD8/veRLxbSO61Wut84QNwfDoBziIDNVRggCAmc/n87Iskl/8A8ycQmDGQEyMIpJSMLO8baXs67peTmciOi90Pp898WUNN1z2fY8xlrJ7fIuIDOTL19f7/X5+/p5PJ2YiBjZkigBAub58+blNpwy4rul8Xn1Q2F6qqUNAGRMRBSUtiOf1lEJMMeacS5aDl2IcgUkdGdpatRqoGhFimyPfYU1IFUSUyFLL4HfoP58f6coT2bG/OITAY0JDq80wM30LBmBjYf3D1GfVq/aBo51CrFvkPCp8qOvmqVzzIGyzMWWeeves10a3X6GhvwxyQtc8NhuaDxKrnblb1NhEAoDaoDRql+sU3vSq9Zyi5y7mRUBvtHLqHoQKAH6bj6mOg2Pec/EQFNTrYYgIW3TP5g8cbEdGBGJHVgQRgdDaFNn++N4YxuSuQCmFm7xjAFAxcVwxs1IKEUTGUvbA9s2H58u6Epdctpjo+XxCsroXj1Lv+346nda0gImUut1uy7J+/PgxMWUwZkLQ7X6tZT+fz46AnvedANK6BKTQSBTZ6iWEyxpDCFsILwiJwIDu9zuTmVmpAmDexxsDaZHn0xrAvr5cr/ddc8kGWiWFqFUw8rIsAFqyEXGMkTBEDu5fu7gupRAV5lAwI6JYM7b2/S5Saq2+n56SMrPSsQoQec/1ft+s+T/gRYNjT2erbt4snTJmg3bmLRpnkG5ew+RNddHteetBCdJPL6rspeuIqFoZGNEYfSDlQyoPejcyoLrQvkuZ6da5kjAWMyTCHl4xkBkLbuKAQ3nDhABBPcB/GN/984fnMC3FrNrGarxfzHm5fvHAR2Ni0pI9CNILAQDAkXMAARRq0b//+7//27//x5fXWxGoatteXl9vcSGPxTGzFwWMJy2liBZ35GqVnO+11oiLiIRIRCHnm2f+AeB+v1tv+GSOIQR32MYqDetEe9LJfUvoNopvJSdgwBnDXIkAi/kkQdV9zzlvAIBMlHeksG0bAIAoAb58vYLAy5fXmstf/dVf/tW//ldPT+uyxrgmYKJS4F2pW1/tt2s+TPZ5p5qcfDdpve2Cqof/YeIOBHB8tV/e1u7lDIbxb80O4RtaguY1PeC19JuZKg/tQDIAAJ4uAYfVB/DIyHxoHEZmzHmcf4zKnR/crHVtDV02EeThOs4L9SZW4jfT/VhCRKaGigAAHRNo8gUnUJnpWq4dCIrjfyColq3mXPNec7aSqxoPn1NREdydE0AK2KdNnNI58hqICQNZICCkBiRoZAxQMvXhmX2ZaLC8tkmnx+IwYNdH6EN9aAx8gjbwyaYtNh+96IWoE8ZHlxsGgAaGgEjqrr5OQxfMTMWnEYYijXQjc4yRs0KtjiPo1a8D7RvRwMBjf+xgziLMzDGUfH9+jj//+OO///f//n/4n/7N86/+HKQAxTexu18IUf9fOZoW6AlAn47V3oHx2kAauGgn6umL40wy31zbSTPQBnpCjyg0o93XP9fexF8Am4FutA/mGScxUwoRfMIcIhMStylxRAAHvMhDE1z/rk06EQPzrFvHn2Y2tOPwbnnnuzDrGmZ2U2uEJFwdh0mFm5qCUa1aq1auqgBGgKA9FNRF+YPuqbXu+76s6yEf3YEhQ8Tci9Om1TEzSym12c2us7nprVLe1tTCBOYzVIj1Tjzv8RgiYDBhIDYv1DRQPKJ3kYKb180QdhDGFjhz+CBDBG/eVzVmBEeSARMfhUw+6AIVTM0B69G75GstKa4jSz3Lvv4KBz/3W3ioPFZT7UOB2mZzGxNvjXytUSZin3niyR9lZuS387j3fVetIYTz+fx0PiOaVqlS2DWIiEUmOJ8v64ePT6fTaeFWi0VEnkoF8JqHjIgxxrSE0VcjIvftWupuZu7FhRCJgpo9PZ2PTdQKFFJKxPhn5w+edtj33cW9VGMsIBqQOC1LiDVVkWYsovfdAXguzolhtKmYD96JcRQ01rqFEFtsW0G2wgFDCP5x4kAcqYPrAHi22XF52toiGRoRGLFDmxiYDiDp0+m03+77vhshPz15YDt3NWlm2s3ctkR9RN6sJKz5fvym8ycgSYocg5ZiI7Q5FCQa9Ds1BNMez3inYg+W6RHWcXvjY+08XmKjk3yZZ69ZG2o/IiyNSlUHvsh8wjfyaL4rfmzCPKQeAkBHeOiCewbOaBTOzX/OOdeTRIiOgebyx1XXYBmjg79UAI2YA3NAoB5QhBjj7XZri2/1vC7ffPt8XqOWewjhw4entIT7/ebnzDlLycuHZyLYb1vZ7wR6ivHD+bxte4qRiGotoPLx+el8PkvJjKS1pBAiAqKdlnQ5nWqt0SChraZr5AUS1BhAjNg0c/HmfiGkNfESQ2AixXA6Pa1LJA50zVVqrbdt//j8oda6b2ggIaQUYoiOX2EprYgl5yxijAQcVPV2u+WcQ2BnDdV6v92+fvmSlrOqllKktngHYiN2IjKrIoIcCLWIIj3gbU6HDHN5Vk5mZmieG4BJe0G3/kc3nSsnEfGNPuwSRYdr8Iilxx9VPe3eUDoQkbk3PAOiQYMRNlCrtVYpVauQOREWaB0fFaDDEWsl5jYBoFcZuVWjE6jG4AJ4hJmZdZPNRsNM550B5xfDWHzzrTenGi8AoE9zfcsgw/R8o3FqqUPlAyIYmWlV++NPP/7TP//mer3Gda1brqUW0RbJHrasFuhdZIBWasMVq7Vu9+x7t0YfWH8G1G3LMfK6fkgJAV7NLMawLKdlWdbl7NEZAaum/tPUyNo4kK1k9/qcNwV6OeW+u7oh8ulXbXiVP6zbHiri41v2vaTYktiolmIEMalaq/7TP/72drt9+vTpw8enP//z7//VX/7FcjmZtWlGb6UoouohUua/DhfCQ+4078F0huO7AyYV6HGX7fj2lCHsYblGNkOQzhQ17mqW5zNzYZ8U/T7BPn9+PuEwOXRKcbs86Sc3nP5USvFAHPOhxdpf33kDbyh2pufRiGhm8+12hjp+d6eu93219Qc6YD2xWergiA9t6oAaGAPY9fX+9evX19fbtuV9V/BxxBwYoQ1PclBxVTQlhBhojeEUQwyYAkVUQgqMXligqjkLIkZ2wIXAXqnSkwFvHtOmIAh3WFF4FCZviAea191ftyy7r7D3WLMvuRoEgOLfJQOBVmmLTIgDiyswpATrSqluvBVVdaMOzBCNgXrVhY8DUK8WIQ7aM/wxxhSZmbdt++1v/tOXnz89f/8rMAJ8t+X/vY9OHjYNk2joHv56/iQ+vvNwzBZIKxwV9IwOAIKAEniVinVeegzYmfWBMS1OTTimyhHVfWu/8lFaYp71mdPpntgBEMk22YTj3rxFzh6rQKFn/IZYaFEzptlleK+Dhqoa7yOZ9kTgKNQXkRDjAlbVqGrVekg0r4pOKaGRmJoBGAF4ffOBacYcUkpVFm9uaXTvpedkbmqY2awRtaNNdKMZR0R5/In4QfZ5JtAZr5Tq7SUYGBFFjgb9Wisg+Rkcu5/h6I9CAFPlCB7RM2MPJXikq4ejgCgwh8GKyEQKiGiKPmNewQypyG4taoACpkBiWMSYBLFtWysLFnmD4nUonikeDAYCpqqeA6FeU8rMgZoEUdUwA8sCoAkh6QT7zpHR1My27bbv94AYQvA+utNpUdVd71btfFpNpICelhVPFkJgwMQBUbdt83QcAIy+NfcSDcRfxxifni6/+tX3e/aySQUABfKEQ6v7JQSAUsq+b/eXmz/UvmXxZkGBEEJMa4gQiEQqKAIAAS0xWfBxxnWfQiZDkpqZZ5jh6HloxUhMngqI3rQjUFFQEbxJksghSckUVUCqQer8Yz2LYd6eNwKcD0p1EDCA5eydMzMIRMtWzQGUkbLWnuecOZOmtp+x74OrsXlNOJRBNVVVEVAVMHkfXvVDW4oTiWgodTUtetj0ND33LxoQPSZC2BszDke3PuCCqJqKiSjEBxSscWPzcw3Z5JLR+ophc559qrIBAIGNqEd/zOa0izVcVoEj0dSNNgeStiK1w8yM+Lr3PogPaNJalRjRzufT89MaCDEsPjCwlKyqp9NScv78+dPldPY+xrzfVcrzaf3mw9PltJABu+ZmRsQYOYSw3++msnBgYtMaOTxfTk/ny/V6vXiuWCtViQSnxKBczZ5O8QoVqiKSAC8pxIgx0jmeOYZaRUolwFzlet+lXm+327quGEBMmTktiQkc+yaEoIA511LKupxjjLXKly9fYfEBEqKquWyquizLn58/IKIW8+JSiuShB1NkJm+KdmFu1eY6lvmn24V2jEVigD4bF3oPniHiQQbz0TSkUe9WACd5J+ROz03Ie29BT/UjI3Lj6jkyaj7RjYBjm8DrYWANAbA3gXvIWVVrEaKkAEdYF9Xpug2t6XGQ9zf/htpHSdig9mEFvmGKN2sIU8BlXGvW6PPrcZJxrdleGSJlvCki1RSREFgNapXf/fCH3//+93uuy2kVhaICAGk91VxUqaXTtfYMM93v+7Zt7puVLKUUAGDmbds91aRi+76rRjNzawYRQ0gppcDJbY6cy17zoCKX8CEE7zH2yJpL+GEPwTSq28yKiYiQoeTSAhneXs5sAKXWfd/NEgCgoyzKaoYIpAovX+/3+z+HH/jz58+q+pd/+ZdpCTKhwo4FHz/fWGnzn+ZUHkw91QclNLE2wvkuoOYKf3z4783RO0rmMMR8D28u9KhT2qjrGfoC2j0bjXem4w3NeAEIAUhtjjeAUBtZ3EAcEGjwO3XMVX+ewS/DNRqi2964sj1F40s0FGIP+CIigknHagXyQOS4eU974uTV9/HupurhfQDb9/3Lly8///z5er3mnKUyMIISojGzoA+gBiIgxUCwxLCmuC5xCZgCMUIg9L47AgSEplwBA7d/3S6m2SFERO4lP9b6xEDVEKHFwaAZ291IOCSqP01tULyKMNzB3hJMRkAiggQePxmYeV64hugDz9rAc2BKKaUKQNlGRV4RRGRA9dG32sT1sizSh5f0YJOJyOUU9u3mU0NLKSACITal/AYXDgBA/8XqSPXNa+uhXujTwv5PDptSgfOLiZX8Y23VJ2UHakTUqsTn5OEUHPRvt3ZPMK3VpHs3RJGamYRzjGmKpaq+fQRrdtcxphVHaTQ81KeMk+AUVHr4E5n0w0/u0XNmDMysbKRGFIgqAPoWIhJzgqQmaqKzfwJgI0oXY1qWxUBw1JebYisGP2AG57UVEaGW3mFmouDFVK0oxa383uqNIyXYrY1SqogwKRkhYtUiUojIrQLq2PQqqqrQpgsCd2fVf47REYgE5IgtAZqoJTACIiYX/d4YSQoGYGLKregDFAGQFd1VhnEPs16Zxe5MaW90j5/SHiNk41kcVtG/0op/QHQ0M6gi4rquqrUUIVrWZU0pxRhDCPvtC4CGEBhb4U0pxUkTVGutCBow1D1/zXutmZFKKaK1S/kKAMy8romZidsy5rz7E4VIXk2wLCcz27ZcVWNMLkq8wAJaraPFGL98efXnOp3TupzXZUFgEUHkUiTnbKUJNYYQkPZt8/nyJgpqXqaCBiEG6PESDzq4CjmvS8misi9rdIgLkTYO3vG9mkruSQgz+8X41rx3Lg78GIESqCXn7EAgp9Ppmjd/LvvTyhsAFA8uoAkmF0ZUHkx9PrqZgIEPLw+h5PxwKkICMrAWK3x38ypqHb3Wv2YDZHk4ZoTqs3Gn9OBxBThoeAgpv2F7E+V9fNjZOoFHk2VIwOPXiTuaVIUuGXvgfLRkqGpYEjM3vCURHzDvU0DH1QE8NjpuBkUEKVDwBT+eyLSa2prC5ZxOSwK0hS+qmsu+bVtiYuYvt9eXl5fvvvkW1RA0MdHp/OHp+ePzU2LC02JaVXVJKUTa973sOyNUkSVFRLQqS0zP6/m8nkDrMwgAMFEAAUQLZIlykbjEsu9ClZkVjAhioKfL6QMIEO9bqevCgLtA4MQcXl5f7dtvY4woZuZzcwBJsfp0lsiAYsSIKaWU8OPHjyKy37ey57QuBpZz/vz5869/9RcxLESOpKopLEigKi6AUlqXdOobF0QEHuvEBpvQ1Bowqx/v6iYi7Am4mULG16d35rimB+mOHEWPiFeaUsrmBhCqVlGvCDWptaIFbo3EadvVVEspbXpzj9dAc3sUQBXQtCvyNtWts+079ezBqUG6R2p0YoSDGqeIz/ynmf3xnTc4s8/47vuvQ4vM8lj28XUzQ2jwV1WFOYQ+P/p6ve5FzGzL+74XFctVSA21MsYUaFlSCGHbpGg1Zcm71WKAkkVrJYDAKYSwW24CqhR/QRSWJTqOrneK9j5DuN1uW8ldtQWPmqo28JQeeVciE3FfAr3yG8xG6M1MY4y5lrpn9yd5QvfZ8r2dH9AUa9Gc88YhRk4hqsjL/vV+v4cQTqfLv/pXf/GmNW8s9RyShndi3KzLzLkfbxJxY4Pe7bKBw1qIwDE1drqudbLrX7R3ztsv3libLfRIIg+Zh/7zOFvXgLMp0vmieXTTVRCnsCwAMAWewCb8Sef448NzdRi8ccX5Z7+WjiaLfnfH+A3s4KUHo7X/VHHeoGnxESiuoGAit9vtfr/XIkQBQ5DasNabajMjtoAUGJcU1hTXFJcUY6DAwATMTF6nDgxkiBRiiDEGzMMHGOiwHocdkeLpAY/AwYgRz1tsvcDNev7Hm7CRRtAa0IA6oCCBukWPBA1+FczHqKm27LfH5auoESOQTlPNlmXZt3tRMTNDUmtmLTEPdAzsTSJ+h5L3W63ffverhpfTRmLRv6TvB9ApEKZ6UADwfqDpU+p/H8ri+NeaBhtp9I+7s21oUGtBRGrNhz15i62b983NvBHUZuYIVe6cubyFR7BoP5vr2SETzIxDy87NVhwi3u/3N3bdLASGTdUZ/Lir4SvCL4ULAcCXgucEpplqM59CK1EwAtDDNjZiZpxYGhER2dQp9UHhtbyHWUw87lV7Vae3poxn08eaokNvURM3tdp42vFIMLnR1HtwhyEyuNETmE1IIamqdNhlmoxFVeeaipiIGaRl8vwDjkZA1voSzUFd2sN2uUmkCETBzBzsA4AoxIgUEH24JxERtApdYvYc2tjOacMAujRvyUwkaKih7WHFHB1ZAUCtlU0e7TetZv3oPg8p+tynlJKWV8cdqbWWIlKqmhDgtm3nNaUQRUviABwcjrbW4pICW+lp8sK8z58/L8uyrAPfpVFSjGldYlpOKSUzXNdCFDhGD/oWqY68IlK8KeWv/8rPMLyCprPJKGBlwAy5ZAcoasUYzmBNcI9kchUpVVWNxxYgInnYY/AAEamaxwVMSWVYS0RtWvehMt8zz1Dopkot+90WGUwHt4/w/0zVfvifPHqyxBRCGPQ0vjJiiqptGtugRoBWAGkACqaIQD5OiFxHU6Xposedv0nFOAuQgc+Jht4hSURIpqACNosf7CHwgGkEj1VhtDB4UAq7oYCIRK3cdDDvvIY4MeD84C0AhBgG0HtT9DhWxvPS42x9AoXpSLwjhOgt+w14iTrgsmd4VAFEQHvjtTbH3oxCCKdTujydljWZCWopNas1xIv7/X69Xp+fn8/ns1plXr777jup9bSspzWpqome11ORWkqRksu+mdn5fAazGCOqUIrffHhaTykwPp0vT9rQIA1BAWhNSHajIqrEEAQwcKnVpIRAHz48PWtB5LIoERHesRREjjGpKgPWWlXBGmyShRCoFSaEdV3NdhEBsXRa/uIv/uLl5eWnn35yFJmYuIq8vr7+/OnLt99+y8yI7NEfcEw8IkT0AvFapFZhSrWPbJ0F77w1NoEGtV9R6Z2hMOxU7KaSerzHv2KiWkWC56mwEwJMEQdRJQLVwF5urCYOBqzqpaGtp98M1EC09ej6uMpHk33kLQkJaYyH8h7eDkb3LgIyLLb5fZz4AQ7bGubXs9ZTbZUdb958L0zGck3K6CHf8v4OH3ifyDPje8mvr7fPX6+/+8Pvt20TgP12u912CKHWSiFFMDP1fyK1lFxKQWyE0YW5EDZn4HQ6xRiJsNZ6Wi8hEjOntD4/f6y1ruvKFLe8AYAZbNtW9BdQW/0RmHld11GC6FTEoXUoiFb3uUIIXh5SmQIED31aB4wtpQTaiQL0Wce16E57rbgu6ZQWEcnb/unTp8+fP3/33XfLKbzZ1vESZnjPkfJibmm3R9+7S8g3LXww5//am/5PZkO2UYh/aHYI/9SeHn/qy0hTg5D1GusHkutJjFkg+xdxspJ78B0AwFQDjVJ/GgIZm0PY/PDZgfnPUOB80YNHTOcPQLfBnAaGiWXuUro2B3cFJi7+pRAMtkJ1Aq2lFEe2s9nyVDQDZRMR0UomxBQCxcjLEtMSorceWGVk8ngpKlAgYiBjDhwTSgtGj8Uhagk7AJiRRbiVkISxVtgToU7A49mlexfYMGMa3fSlIwCoeiTEWh71QUoYQK+4AgEkEXETwPkrhMQs5K3mtZRSkIM3W4pIFQEg6PB6RDhwR1QLEpdSrl9fEA0YYUIOgzcU/9/leCwVQRie4aMr2Cjhl/llHBND6Zs3rQfEZ0Idn3//zvh1Pjxjoao4uWee0Tk8QyAihiaybL7EYKU5bfifeZZO7ccn+R1sMkyqZNxnA/iYxhL6qcL8aaKD55m5IfFVUUCGZrCqaggcQojMTiU+L0jN1lMCN8FbiQIwM7RWtMnAtUPUOgCmqkJ8i7c7HtUFwfTwPVFJ1HowZohXJREBtdPpZGZaqqrStJdERIDyaLCqKgIDqgpUETNhC4iIQDC1zECb+YdHq4n5WE/RDvgBE0zObBgNKTT9/IWdRkQ+0FydAz3A2ypbvFZn3JI/PlFrMzudTufzmYiu1+unn38UkW+fL16Dh15eWytxE75PT08EmMt2WU8phbaYak9PT8uy1AY4garVhwgRkVrtuDLoIvunnz5dLpfAOGAbQwg+ZsPMGCksIUautfUlDvyAnEszZ6tItW1zmG8WEamiqoSBiE5pcRPEvaBRJnqsgGLD32wBhToutN0diYQCOtJd6zdDxFqrmCJArdWc8Drgp8MLBjpktBlYj6sRsYjUXNjUZWsfoXH0Dc4CgplLKY7tS0QxxtHbPZjO6Zk6AMAIu1q3DqtpKQVMcJoDORrvfvFomqlFGrpOIm4zi8EUWqyjT8RAxda8h5PaZmpzaAYx+77P2OLDenO3fLDqvA6z/zC+ZWbZBzG3qDMONg5t9KhPXFTuNau9fZQ9RwrYQqoOKWcGXhhv0zzr1r1j7hUHAyDiWisHRIoxhtPptCxLCMSIt+vNUbZPpxOZfvr0SUT++q//ek0xb1sI4dtvvi05o0EkLqI55++++67U/Y9/fPV0cYwxxWiqkdCMUkqXyyVyAIDTsq5F0rqEEHKtVcULgUQ1iwWEQkSMuZhIJaLL5XLa75wiWCCORSBXKQQp8a9+9SsDuF5viJBO0QxUhYhMYN9LShhjzLnebjdTCEta1/Wbb74BgJ9//lRKqZI9cvTTTz91m54AqFXhVqUAIJKwAfOIaOCDKuxRfgIc9Pxmf83e9hS51vTIXQiBqAvMnpTwDUWu6PPD/WTcghHeTAMmjrg7ioEH6XLoXTpmbgiG5DVvwMyyebDJRuKRmTCQiM2xGGiFxw9j6HBKpMgEoD/CVYiIb8CoHvUdTDHNIbJwcpBGxsAmh/n9MWvSmdfGOYcAIeSWPKyQc/ny9fU3v/nhN7//49/8zd/8/PPPYT1vW7nnfaFgyMuyyD17WbvLydvttu87c6i15r2aIXNFxCW1tjGvQymleLIuhJap+/DhQ845peT9ng5AKiIceAhhmLS5qvqQGPcJVdUtePJ8JZFZcintas6/q6qntCCid5gTkWrNmAGqhSjrqHBGohBCXNdzTJzzpgLX6/3r16/fPD2POxmHfwlHYm1aYSDCR4sQftH86sBdOkUnhwVwRIKR4KiMcDqm6Sx/0tUfMvaB9uztAQe6WIOFxZ7ht9505w9FflcdzYXIw/Q0ispwxGKmsUM2IUi5iTlc4uN5j9cPXZR++NzOwRHQg90HJx7xI53jCNitMK8goT7f+WAcIiSEW7Za932/3+/btuWcS5EixnxSO+6/SgUpEOAST0QUmKNHTk2O9magQLFVTplVVaw1TFJrfuQmJayNjR9sDj0dNBxCX0yHIRh69uALsUY1XYw2RICB6t/TOB7xYWZtMxQMkRUBwdGXlB1ZMKVlWUNoY8aI0arknDlCjNEjCVUkxoWIyEA6rqHf+XoKFOO2bZ8+/dQwHWqllOBf+OiPf2QIBwDPf915OsjTAVyACISuRzxIoFMV6APnAjQ8+gcPBdtp3S6VNs4Ae1gHrCH0RG4xjnEzZqbd//d3xjywGTV68PKbi85vvhNfD3phVjFOY26rzFzm3w0v+RY5VMhVNkRInBgFyYoBIlZkYyAiw4K4n9a6YSyllFLxfE6IZmgIHBKZKFBVZQ6IqmBoSMglF6mKHGJaOUSUrVZVCabRVBBqCMaMwKpQDVri2oxAwWuq1ZSNY1qRZNcmj4wQgKpUlcBAiJZim+tiZhjo6+uLaTU2IFAVqa3noe43JgQDEy81JufIWuu+ZZFuC3oJitQUFyAlqkAFkARZzKpqMTWmomVd0romIg1s27YRRvSYdBUUwEABGJWA6bSsr7d7zreQokDe6w0DSo3SByJ5x7wWAYBaPObkepd93JCZbd3yRlMiCkQIJIha6nNKnk+7vV4//fxjzvlyubi9m4iu23a/3szktCxLBBX54x9+XpYlBvp5vwak8/l8uaznS1DV++3WqApURAmYAAlIxWrefCSj+vxJwVu+ff7jl1JKWML5fM7L3UfMXS6XUja3j/fbq1sMDUgmi4qel9O6nm+37dPPX87nJw8/V1MIxsQiNde6BsagO9ouctvrVhWRVg6kqrBxgEiac7VqEGJVNaIQIgZWxL0aghJBZQvEVfbbvps9Pz8/B7YMGZksMxibkoq2OjQDVQsUETdEZCRkFLPIxIHOHOF8eZGXlE5CfM3ldDrd73cwWihJvi8eKJEayNZTLC9ZHCcWCc0CEnAwUSlVyGKMsQqHgFz2kgUkIQeke5HQJkFLICSQEFHFEMXHahcV8h5WeY2QI2pWUQNFRiRVC4uJ7B6lBCQFEwExXfionUaXGoqIZCMA13+6hS66cZtNqj6e0SstvKfR0VYNAcgMVUg3IQYLIN5xEZAiYSJUqRFhlwohIpEgQYhbLkz+FFZECZiRgVVEALHUK7GJ1bisZkpEqmYIbcjkns2MkckbHZFrqTFGIC6lureR7/cd74E5pZhVqgoiikpAWEm3fAMAZD7Hp2/OT0mTSFnTN6TXxHRZwn79uuD+4fvL2fb69frrj999XNdYysLMAUV2se3PfnW533+utZ5Yar3GEM5LsPzKtbLgaUnfPT1/mzBR9dK3p3MAEDY7pagW73lHEKOIADEQIlcT1v2STr+KdKn7nyVSq1X3FOrpW/6a4s8vr19vd41BKb5e99dbsXo+n59CfGKLAe6ShQg+fvx44uUffv7ycr2fT2t6etpLuZxW0OdPXz6XUolISpVUrreXdV3Eto8fP3LA+31D0hCSGO1beX7+eFqXbXsx2FKCPR+lXIhgPc5aVYgICNs00uBAO5VTlKxmEo0EYC+lAp9PpyqSRY1D1s2I7nuNiZDCXb4iVwvIcTGIUsjA9j17MZoXPJooABJFxCCgYFYA1Pwfl0ql4n2vCEuMkYhDSDE2yN8YQKohBoLIGCKnkgXQzDgi+eRGkQJgHMwYa8mH1m8mndveItJsQTODHjD1MF6z/xit1y9lnwvX0/J+BCY374bKH34d9dzLrOC14SojEQW38kRAkalN2WVkYgZr/mqMkYBfX64c0571N7/98dPnr7/9/c//n//1b/7pN58Uz9eXets2rZrrbU0n2mtIT4i4bZtDd9Zacy4AxY2/nHMI4ZtvvjEot3s5n8/V+HJ+sk3rvRJDFsVqz8tJrcaI9/vL/b77ZKWSBRE8AIQOCc7soy8/f/7Zi0uZIQT88OEppfT69eunT58Cp5XT+Xw2wvv9vm13X+f1fAqJW4hNS7lW2VRAY41VmIh2gWuRE+LpdLJIgvBlv9nCT5flclnCErPaLZfvN2AmYAaylhZ2nHrymqlWRGxAHtql4kljd+ceeuSQiFv7nJm2jSOO0E1JVa1VESt7isPMS65a5RExGgzUK0BDsjG0Aj0G4q4Cs3uSpooIXuRiOnyt5lSqWnsoNwQ9KDkaASazVIqYmY/sChQ8yqaqZh5Jce/l8AZ95sRwLIefk3NOSzR7yPW9sVm7qerFU7hQMDERh69gRDQEpoDMOiYts8NEGaAcJSrDHUAkQqkGhF48QoERCQy0CCcUw/XDRwyX3/7ui8CpWjawLHdw0DhTBGVMaKsZxpWJDUCYlK1qrgJmgYpJCEFC9b6QNRAjEle04DqWAcmAe7YM0RJGIRBDpMhxNeIqEmk3Q2YOIVUFcb1LCRlVVVTEWhaEKCDySl5SB2aIngUBU4VCpBAR0NxgFkKzhZd7NhNBLagVpM2dUFWKgZiFhGq9xP2beM3wk+UvL/ZEi7CZ6p7FOKwhLkYKFMwEfEgd4RIY0WqVew7ntAoyxvgf/uN/+Ov/x79BFQAFC2At8zIBk9OfBHf5U8eMau4rebQIGoAC1hb4NQNQBO2elyAAgKDK6AAHVG/T8Q8AgEEF9PDwaMpso90Atb0g50OjHp8xM1EhAyRiA0EjAgwIoo5w1gBawYC5O4EtIe+MD6ZaCzJTj86oKZKBYuQ0RP0SfRx3SctyhHWseb8ImFLUVlR8RIJmv25okJaLAmHPbbXwf4s1PDqTrvWqqgYR4QmE0K38UooXDY4LdMXWPc6pBsbvZpT4w7sEJfRgnmrxrL3HEcd5u6RgM6/j6uUHiAguzqiUIubj2gxbXWibmqB9+gVMad8Qgs/JAfBZNC1OA70dTK3Be4xMAjMjttDXSPgguzHthYg6npqZPbatHd6tepR0SdprkBTb3rgV0CPfPGJFIlIKwFRQ3qqeevcXPIYBbCpOmEfMeYlUjEut9fPnz24re7FuKSVxKBq0FbegiOwq67IIWM65FjQTEL3dbp8/h8s5NSCZIw6hqppSooYxCKp6v9/dRIjQUFvPT0+n00lM970Uqc/PzxRYdtCtIHBMa0oKAOfLc60e8GOpsG25ij09fXh9fUVEDBxpVJNWVd03LaK7yC7goyXJYXo84QTUCjYRiFj0gCwiQvCUBwAAlf1ODGb2+nptKwYMhuu6tl418jIzQeIYj3iemXktNU7z6FsUs782s46l0TZR+pxDACCCQL0IqsqI6Y6IpssbT0KKHBULzeWekEjfBIHgTeXeFEhW1dAqpb1czoeysTcu4mNV0jinTmfo4tgG40/feIuLNfPRYPlRo6T6oBUO3u8owWbY2hk77K8qKaD7G0jVK12JEA6758iz01GMAUN6+DHk1cNXqI2UJYJ1Xc/nc0oJAVX1ut1rvhOmGsCT4YpQa71cLiGESOyD0XLZTDSGYCYgtWz3WnNKaUkpcQDksCRQW2JyBKbTsjpAS5AbEUcOiFyqEhE3doYQgpqhWuxA5pFYtcQY17CuKryFqnLP+14EmBXDRghaS953jiuyRx+HGBHVWuvtdvv69euHDx+sl9utMflCOc7ztm3runjZUpOE0JFUetLJ4+KllDGXbxZNADBG0o+NmMXXexKa+WhiI5v/6qh3Q/W8uWLb60dytR7yX5ZFawSAKjIgsoioFBjs6TfJzEjcxyvO7NAKt8azjJt/c8VBddh7vOdFeL8aMwvLVFFyMN27pRtvulYVqd4STb13btynx6rNobOriTaVer1ef/jhh7//T7/93e//+OOPPwZO4rVyCrXWWtQUU0pWWpfH2BfHJLvdbm1qpOpAglmWJStUlVKklIJirmfv971WqXu+3+85VzMkbNOhdNpsV0/O43NZhD9Iw3bKDY3Daqv38e+e15NZ8l7iUgoqBjyMCu31IA2CGEhMDWRZ0vm0uK6vtV6v15yfUkqMCL3RCMgMAM2A3NdoVQv0WB78hgBw3qTpfe3Qr+OuzBSh1URg23cCr4zzUjhrfk47n/9KhG8gOyYpOhih//qWkKxbqNYHuLmX3v+qnUwBHpuvBoIoTrEMRJxLPWfGF63wjk/xEZ7ngZseaRt7d88bTedvQXOGcaRtH1gGEd8LHERVzVkcTqYbV1hVDGGUqUOf1j1kHQAwhsDgcBRoDrYPqiqAYCJQEfFyWkBtTK308xMgEwIRieMaOrQLQKsajUSkAki4LIt32Oac3brYtg3VTueB0QjISEjICBS8w7yIQVUFA8XexIpqJgZxWbFWnzdpVkS0urwDMyCInAItS8OAhDZx1qeYmamCCLRcsja/1MgnOI69qLXqXb9+/frlyxfIGemh6Ppf9BhWE3QDY/AbTnn4N1w6U6Oax1C6t9YPIvL5rNa4v5GFdhSScS4/4eh7fiulrSE5DX7UUVc1AHVnRwnaZItxBurjyt4fZuawW0PRDHEnUwn6JGqsDSx5FEHDi8GpBc9/9UY1ZjZ6qEG38bTd8jvGyz58puvUUt6WQQ6DeMgdRwR1CaJuHYuAKjOGOFB32Ez9CugFONAaDLwSsD9Yu0K/GXXDhQC8+ZgABR42ftwwAFhL5rt3Id0hdEfsgOUZCwGPBX5j3c3M+Rl7baeIMLPDgVA3iHGymN/bB9THLvkUUezO8Jt1HhedLYNBQEuMRLTv923btEqMMUSKMa4x+Z2HEHBxmNQqoqVWFdlMEwcOCGb7Xmwz02Xa2Zb+5kBAuOVcazbRfd+3beOA67p+/+u/cGwVcmzDKqWUaPz73/9xWRYfYAhAMXKKq6perzsigmEp+6efX3788dP9viMiIYcl+ThUIjIEQAbUbbuKQTVUbZW0wcdQBq4qaApEQIE8tKtmx9AOIkLrAfiqkiia1et2J6LTaW27CVhrBTX23LQIGgTPOnd6tu4Q4hTj8L0fro5jJ5DL5RFN8JWnVmJNYF6r3EaegKjSfEJ8dLRMRGt1X9PLYnACMmui/EElH7ID1TC4R6TgKMEI8/0fn3wIFj9INzNzj3omVx2N1P3Dh/TpJ6Rep/RYS3VcwsGx8bBoW/lPj/n6NKcexFO1PmAA1Oa27HG5wbPq/WgB+n79shHvMW9mOp1Ol8slxih5FxGrLbWyLAvaibSuS3w6X87L2SG41mUxs+1+rbUua8ylioO1lHJa0xoDofnQdQJMIZ7XJTnFMhFhMG+wTWZolrmZYIpqKUTpAoSZyYADUdYlcFyTgw/nfcnLgopZoAKvIQakWqXmIlECAIdQavUQhhdxZakvLy/X6zUuaVkiwGnfdyLaa8k555yv1+vT04Upuq1N5FU1OMJSviCGJCINEuMdqcxUMehBRCzYEIDjmItkxq6pmqoMXK5xcuw1yU0Wm+E7ZF0zG3bMaMIxO6qqx6XnEUrvqcLV6ji3eivBOwK2Xjg9TkuPTXE6qfw3L8ZXmhiHYz2H7829HmQmcuj+Xq21GYpT2d4Qm+NCzSnKFTls1+2HH37/9//4n/7m7/7+y9dbVYwx7rXvgpF0NNEidYgUd6gclUdVL5cLAOz7Ppa31qrA27bdtvteMqLnCLKjtuy17FVUxNndTWH0DtWOLKWqSOBF1POymxkyx2URn2lUiqrmnLUKEGupSszMClr2nLddq3gmYPYq/aFyzmoUENQqMy0pxHgBgFLK6+vr9fpkZmdfybZrZoimlbBpgb4LOhc9+icPymmgVjYv/ti7N7TanLYen4LJrIKBazQO/y72WQv+ekoCzOcfPIjTu/5JGvZfD1I3EX20ob4T1v2E2lH9Br2JVuxdhcflHkPYs6lzEPAUtpveB2wtf9Pivne/zQCaZ9tbcVyDIFN0tVoJaH4QREQspby8vNy2XIoZMDINI1PG5O7AMUS0ij7z3uGpKQKYh7ANK0kIDMpAQF7PbmYDn0JEHPEFfUo4IAVWVUMFq2Ac3R7gBABVKnQ89m3LOddlQRHZ9x3VUkqQyM1UNjIgAwZDUatVS61FDGRCFqQWqaHApZRaai7V8RdVVZGcXKACIqTA5zU9rcs1BbgfQ558Z13KiAhHQiIGFtFqwt2jV1UT27bty5cvJef4dIL3O9WO/wakmTenGiCr/iedakeP6om+1QaA0OADdAQ1sJ+nK5C3Vf2N/g8EBG84N8edHoZfr5xqVpAN389gmOsDeOUN9XIIoAfqL3TKV5GxQPaYUX9ztOWYGg3gnVp5c2n0AMojDw7ZOL44r0YgPIanW19tRBQRn0vSw8QP6tl66+S4jIcPsXVDtixtL+tXA+c6GK0jAA+yu98T9QZHdIdt3C5zBPN61JaXAwAT6GXrh+He7n9k2w6ESFNVimBmOEndvkyHJ+1v+uMwM4KNp/OBV9oLQLx5yZqVH5ga4qVNZbuuXMXBvJF8hjd1q3eklfxmZrfc7+oNAR0ZGENmChyGi3+73fb9HkKIy6qqIdDT09PahwsTBIqoqlqtdhWLYBpCYgYGIFTVIpWZewcXAAAhIND19ValQU4vp/XDNx9Pp9O6rkyJQ6xm+76XKlmqu+mliELRqkT29etrzjlyMLPrtl0ulxDS9fX+8nItRVT1vmeiEKvkXGOMIXHPxVmtCkjExBxWJLcDImNgglq0FlUHigA12KuEQKbGasYCwIhmSkDW55KDGey14IYxxpQa4guoQQigVlopPzoYX2e8Bx6btZr14OLBHV06+McCYUHk0StIBK6AVdWMkUGFDAjBg0r0OIXCus+PiAjDEkYGtIb6M/fmwRiriv2+++sWNvO8bt/Z45h5eWYNtaodB8J6QVS7WfSRQIYABv4MB3wrERkQ2BQM6+hwZubWFbZK1MMWISJGc3yUEAg6vu4ENArD4m/xuYlnB+MQUeiB3nbmHp0xj/CoEEKM0RsIsfdzb9vGZFqTVQG1EMIS05rSuixkDazYpIKJx5elZlRRKQi6xhSYyWxdEgBEDpf1dF5PpyXGQGgqpZ4cWYgJDDHEHIX3DKIisoRYpGazwMxIYMpIKSKTsqlKDVbPIdh6DsafX++RcONwjnEX6gR1SHlFCEtaL+fw+lJrfXn5coGnZYncR4cjouSCiNu2+aQZA3TeBwDyAcphGc0MXhVj/fzj53HFOcc35QDfGJo2tbo5pfS9Q9Uy4vSISEhG5HdlXZkf7OD7biaOpA/mFA09kgqKoQ1GbyFXF9eDFLv36Hi27t15wK619VepCEfMZeaLN08KA3dUcb7VnrcAr/mHnqjxUDIiAh4s3zUdD9Tc2bHsYscQgaw9+1iQdiddZDStD2YQtNrv/vjjf/ybv/3tb3/35cvLniWtT7lI3kuu0pND5AmQqmA++9WwiEmRXLWIhbSu56fT6ZRzdsDGIvLl5bqeLz5UtqE0kbGFWuuyLMyRqEBIzrbcgNY6P3Yn31sHfS6ub5aZVcmlZtESiAlQSi2llD2rqhDXUErOzrZ1LyaSQlCiUso8c1VE9pLjtkVlOi01ZzNNKSxLTCnlnG83/vz5MyKu60pMAK22zPGszfVHDzgDoqNRTpT8gEvR5dADO4xg/yEAFeARDBCAoE95B9C34F5+HEMd3DZ+UBCzSnr4LuLxxTfhCbdS/FciUH1rirdtehvZHw87jxA8HOBO8OPn8eBdL4yTAIDrZTpgIExbM2NLAbUbboY+jAHU9kDqR2cmT+DY7buBiWjLeyllL2gDApTgaEhDpP4+GXUXnRAJjPaaA5ISqkJVI0Fzj49oWU7o2T8ARmOkEEJkL8gyIkBkVQUTt/wcu80IjdBUzbCUps5ETBWISEGrtchgOp29pkzBqlqVuhfdSr3dNkf4EhGXTy7NVG6tAqKKmREDETMiM4tqrVnBGOCc4vPTueTn32XTagFJQc0coMad8153gMiMBq11VkQCRBc4nz9/fr1+/fbpo4kgx1+gnv+rx3+u3BQfEUf7m+YA0WatXhQahbz9ZOt5gPafiBjI4HYzo57LaQK8GylO7TRhSj+el+AXpxMdkv9tCH6wxmBnVzf2ePiHvaJqjgQNL+P95wGg230weNB/hj5LfDyRH2H8LtVExKZc5GBnf9DxGEQNBHe2AMYlxeGDCbuea5DEKfoMU7eKicjH0LHDihIyoBGRossEP6cTIQKQeHF1KxhzULjADA5kMvIA2mPA040dT4FTQGvkVVoWZVLzvq3cajsBEbkDsfgCvo+luWrHnifEaeXGNrRMYGAfbOq7wsBemYaOnDCkPA6cvGN5AYAjewUUuegJbWLH7XaTUkvZHXUNHW6Ukw3fD7HVTxq1kL+4HUMK5Li5gFhEgTBQQGY0qbXmWs0kpXQ6X2IKgdhb/1s12u3ruq6KVA20CgDmIrf77iAoInJKS9nz7XZ7fnpa1zWtT7XA/Xa933cR45gSMXILQqvqXrJYCMFEpJQdEICQKRIHQYQ2knFMtVYnXeNoSIBcawEAAWBBi2bGSkYCAJhNTCwEAqN73lU1pVbfa2jM7O10qiqm0FEoiASkixVRoIDAYARGpggN4yZs2zaw1tCdfWQiT1NarVVrobAyM0g1VcREANThXpybtEo8rYMOcToaeetgXXS7BQP6xSY6RN/j7mv5PZEYiMj7WoQha/wEMOlyABi4rLPun9XtLK0a/QOKGqI2n9VGRq5bBv1Wf0Fu9vyMqiIyNYSD5pCMetHBEQToVT7oE2uppf0HiNZIqPbUIwioo16lFJ8v56fz2jEpW6snUXsQNAvMKQRGSkyRU+QgJaPZEiKDqdR8u6lqZAoxLimSQWB8Op/qnpcU1yWuKSwpxBhATRWX6H1jCIAphZOl277BzcD0dDoXlR23QLQwe+1oiiFEQiukdWEK68oGpLa9XhXxEsK+nqjqrlDKDhvRAkWLitaa13V9fr5cr+dcy/V+oz6V3kk2xriuK4QkIg6muqxxXVfwgCwBocOHBfBJWcTY4c9xcmBm0epC8ghaIaoqAc+iFYCG+ejvD4DbmeYPeoa372BTUo4ra2bqYOMAgMg+cQRnQE5kREX0aMVhxR46YuTkzcyM2gg3FRHuUctZfc5aTyY0KURM7P7PW0duEP+b92dwpnfM+AtHo1L2sj0zr20hFnH81LY9RGSEoBpCer3ffvjt7/7hH/7p55+/iMKW6z2/FtFty/tevP3XE7Oq4LrDb4mnkAr3OZPn89nv1lE6ooGID4tyi4QAoEobxxjDAgHM0Kv1BMDhZ6RDrSCBdxKmEBHblHMObeL8vu8LnszM28sdUK3WmnfyL/rtLXF1YBspKh4p7nOJt21jpFKJGaXsIrUji5T7/Z5zflospfT09LTE1iMDR5Sq5fGaedD8qEP+2JQhdDbpKbfDwnsj4hjQjpK8TtITOQ2T5c2+qwjaw/tvGHDQ1bg6jNRijw8ikdYesD5iFo32BsM82BuBSdWsOEDliNf0cx6Ziv6th9t+rymGOeSHtLAIvfnKYIf+S1+rEaf2DSJEIghsVT1m1Ze0P0sRhECBc66OPohMClBVkIgoIAC3+YdArS7XMe9H6h7NEGM0ZDGsCiTme2lmVYEIgZjR1MBRaFCNOXgkKiBWITNjBCZUTv5IIdCAo3PmanZyWlW1Vr3nsiy033YiUoSqUsX2Wra93PP++nIVg1prFQ9JR2gmhGeuzEwDUYwxRk/DtOweABDjkuLHywVqvbzsJlCKsDh+vaOCuPkJIsJMFAIY+iRYDw8EDDnnH3744Q9/+MO3f/FXqvYnihz/G453TuAvDBXU/jGd3vlTxyxLe53zHBzxjLIIwciHNSLzlewuwyGczUbJmJ/hwWSCieahn8cTbDNhW88lDJ2EbnVMQZOZU+Bd7t16rhLxoYJ0cBlMqaY3f+2tOm+PB5RRRCQMhCFwQmAiCCEpFi+gJwwwBoA+hnmsA+ureoX6EeorpQ1SNBAAv2kRKTYZAW7c++f7krmsAejxYKagYMzRbXBUVPBxK73yBJEc1glNSi9kF9VHQ3Ysbl9Zj041ctaGKICI4VjBBoxeVaD1JGCDghxe5fjwiCUjojUMISHmNuswBORDbI0P90pafkMu893OYtrMkJCplS3VKlL2JSaiUEqx1S6XiweN9qaAKzUFD8wcKS7L4vWfQKBgHjYjolNMqrptm3T/JkSKHJfldDotRFRrvd23bdscxX59/h4picjtvgsYOuDbnl9fX2sVMHg+FwJc1/Of/fovLpdLFvj06dPXr68iomJbyQAUY1zSyY0ArbVqMVHPQ3GDLcGRqvbwl5bcq3RAgQgZQ0RDyy1JomhVvSixxzkFACGEFEKoFRTIkEGL9dogQMTAVGGw2XBj3jA5tk6eloLWabZelzztRZMsUr26AxuKgEVCASKiyBwIyUBFfJbX7DtZjxj5Oa3Ha1U9LfcQD6beWD3z/7CA/QwtEO7UpQeZtZV8V5B8yM1Jr78RNLMcUAEIQkIIhmpgShZsqqnjXoANaERAzNYLb81MTRUU0UopIbikViNDavQZggV/aS27wsw+PBMAeoTPfEyLUwi2dKX730BGaBYDP51PHz58uFwuKcaBOXk+nZjhlJYUSbEyhHVdE4cU4hKX4LB7pimyCtxv2/XlKwHEEM6nZQ1sossSz2vaVJbACxOhEVhkionNDCWDolrx+oBIzICgFQ2eTieRugVeYorEhBiJmSkEDkiBCdRUjEyhyG1ZMnIROycV1rLXW8lblRhWIFSwveTT5fzhm497cWasXj4XY+wd0bgsC4RUSsm5vLy8ID2t66q9O0JE2MxnXtVcLPCwzGbNNNPqLLhmykFAgIOcVJ0bjhZuM1M1EdEOq2xmDdoXcFjPQ9W5jgAABQOfM8neq91c03U51+LXAlWfAO2DVWWcv38YiVnVb3I2zpvEfkPhfrj8n9kEH0pbH7TMG0k+SxLr3uOb1Xvzlenz0xnGVwhNAY0ATME8s+q+xX3PP/7xp9/88Luffv4iCjEmxHq93ikkA/LEIBKHwNAiKqwKZmrmY+WNiFJamXnfC9G2risix7iMlqqSa1FRBFULJiKhFLndNvUBMMCqyszm9us0mpiIQuRlWSKHlFo9Dgc0Nydy0VI1mZkVqUWqAxepqpOx17gCgIeKtErNRfvCEKKI5FwJ7sFn86isp1RrzbXc7/dSCqA9LXB5fvqY95CYGFoAGKairAax6GG1Zpzi5Nu/Ifg3rDG2GL2sA+Ah6Dbi+J2JDtNqEImfRKRZtu+o6A05vaWVX0ohDmU2P8X46puTdKZTH0ClvdEGEYlshHv84yOXaD0+ON/eMCqwp+iLine2+8c8kEGPtWPk0/D8djrMY7uEAWDrI2AAZEKf39u8dysqCFSr3vYNETmycRA1RXA9gshqx9CISEyAASlxYI5mYobEiyj6X4gTxRBDjIGYsSojWDVlREJkAAasKmeOAj4Y3ml2jLZjA0DzRWm84PVQzhrMDEY551K3UvXL14yICs4CWqTectlzvV6vilR9oDQcAdAIoWFAGBBDFIkSU6AQyNvJ+iwNPYUgy/LxQwK1su8VfFqFgJDh2OKG0z7a3gZ+ZinlD3/4wx//+Mf/yeshf+HQ/0zJ6J+aD/Eu09KquPvZdDh4PnbCzACcxv6kT9hKSdtcPRxo2fNnmHlCbZ+cqFIREXsC3w0MbOYWGAL1ROWQybNVZgf7/ELsu13slyLscyXXG4Uy7FLr9rCqjtE7b2OR9HDdoanf2KvYvdBgZmDkY4UAiLurg4ieygMjnYoc3HXB6QL+JKNxv2vGhpJifaA8ERlIlQwAxEBGagKtmLUBDbfqsxYnMx/9DgAI5BtGJIio1kb/DZMCJrqbNbRNCToTNQCN2mrgPccCx+xEgKPmCoYcZ/BebjPT1oWvRg/ZG0QEIAdBoXCUiIwVgKP48Fh9niZ9j6jbuPmHp5go7L1InTESQghEaVmWMQ4Y3IFSQ/LvEpEFXlS1FCllJ0AObToNElX1FuciIoE4JY5hCSFcr9d9381s3+9mxkgi4lBjr7fb/X5/fX31GxYRtXq5XNwpejqd12X55sPH7777puz773/86evX123b3JtN7IOMl/1+BQACBVSRltQO0VuyWdGqVu8JQlOUqlIYvembqwAwC6AaEgU6cgOgLVtGChAdfh3M+1MQodaK2uJiWRW9BYUB4Oi1bYpncv4PL7HvZkSOMUKX7NZlBwESASP4XCerRVXBzOsNCIwwMI52dkAwL1xhZt9TZ6tAzMxgwtOjESAQ1ke7ZNySqhICA4KZmBqAwQMFvjE9qeOkvznbeFJ/8dbvffdhA7AGptSE48wmHTDAAHxiOdoolRpQWogO5EAGqCaqWAUxNBA8Pw80tId2WqbaowMA4N2VvnoeM8Le503QZtulFEK4fHy+rGsKofdy7HcUQINa2SqjQQxIYKaVichHGZsSgFbZb/fXr5/ZHOLi8t3HD4gIoud1OS1J9i0GSoEjU2QKhIzo/jZ7H5BDpqogKAGS6ZLSEiIjLSEyEagxoLJRQA5ImaAIBrQYauJvns5Xga0qMyVEFNOyq2qt/iyw73fVp8vllPOzi4IBmeBMGohTiEKRV/aEyeXp5NvpSdJaK5YCAAHZ8z8BqXbojjdbP+y/maj6J70X0XX5wzwVGw5hBy30FHj/Zvu+9pZx39pOwH5R9q+Qg24eGhjGPTze0ltOmSMms+BlYmAKEN6cAcCjyhScGMmzj8jICFil2iN3vP3uI1vNJUBDVQ0z+o0ZYWbuslap1PHTzUyqeqlBQ5hHQLNSyrbvXz7nf/jH3/zzb3+37QJEBgEpcFxiWolVbd+2DQyY2wSpXCoSp5SSVw5zAIC0rDHGl5eX233zmWY5lyqqBnlroNMGWmsVxBA8Cry3xnhQUwVwbI/smUNnN2ZeYvKJLH33axXHwd9KyWay12JmpZYsVUyjYQsCV1CwGJi8qkhFwTiGAc/AHZtBwVTg9fWVGTlgzvV23RAxMiHibUv3+7ZtWwghpRAX8qI9h/xs9HzglRgT+07ACNi1QtAmwtr+dllq1KsP1PQx9v8W+2LqBzmOh6jKW7lNffo5TBG6wZ2DgRqxWXMOmyliYFNLjuoDoNjggtEvas3YKHMLtz8Y2ASA1/nujd0yHs3sMVTUu+LRY74Tg/zCOkyS57jPDtszOJkcK4XAaUyq3e/3r1+/VgUKDBhKLU1Jjeq7FmcxglaGxkhoINVMzbtdDJhC5LByYCA0QFHMga0NOtbAHN2lNPu6FzKILCEEAwFRxMLMGL1Gw4uGBJEJqdasqmCkIrXUqlJqqVVut/stoxlmqdu+71X2kvdct1y+vHwFCmZW1QAgxdWtvoUNDZCMAQfKACCpQoiOWY8IiqAx0LrEX31zgiL3+12qoaIZVBBURPamoSbVxUfdI8YQrW/Q7Xa7Xq99d9z3+5Nuz3/p8Qv5wE7w2JtTQGy8OSx2bE6j3wGSwSjlMxi5QbRxj9Z+HIHvY1bnIC9wzuqoLd7n0hnKC6e78eMvRqDcm4O6CxC8hxAOGm58d3SBNlPTj9me6fdi1u20+c3xYngTfvJWIcUPoavBgx59GMyIPRwTalVhby4ngF9APAMjM7Fe+dnc2alQe1iKfkbDZvC5QeZRP59WNJp/+tRRGr4iURt06yjVblW2th9FQ/O8nO8KNcDEwIA+vwWmultvQEkpEWgZEThTL2kRqTaFpfuTaq1l7EdL3KGaCmJAAsRh1zaA6TerP5ZV+5jOY9cbOmVbE6JGUy6vvZbGz+N35c8C0z4d0hagykE9qlrNPOaNwD736enp6Xw+55zv9zsRhd4zA4Z9Gh6oqkrdtm3bbmiQUoopeOnCH3/8FCI59OKaFkSsNd9uNwYsVszMBErJe2+b+fLli5lt21ZE3LgppRCAVVliShxiCOf19PHpGRV/+O3vX15ugfhyurTARgi11teXzcEYYyAEqrWSSgAOzGBkSNWMTQkIiIBQAf2vaVnFcCtWFFW1Vg0OMwMA5noLiIiVzYxi0ColC0AOIZjg/b4vEZhIEawKGgRGDsF1qI7UXCcwZmboFc9TWgAATmmBXlg1IiNEFIglBATwmg3r6ya1AiIGQ8RAFAgTh1aX5bhivr8eNXTjxphAGTvXgCHi0NfweKhqHzippioqQB6rlDd0NW4VAPCxJtOfeiBtzOcfK2OPWnzggvf1aXc73mxfR0NEBmyxDEUPmjoTM0EI7nirqg50X3ocRgsAZA2dL3EQQCMUEe/49ZHK7hMCGqCSkrEhgKFGZgyUUoghMHNAEq3bti0QFMnvJFBcEq0xghmZzwIVRATTkvfb9eX165dfff+tlvrN0+X7bz46oNvldFpT2pmXFM/rsq7rElMMBCJay5LYC8V79Q0tgZcUXvc7IzARAzKRT8UENUN111m0ilY2ILYlhfN5lb0SoXeJBNEQQlbzCW+q6qVxbiUg2vm8mmHkEDkEYi8NaoGSELWKHu1kwMyGVLPWo3C6h0LsSCDPmmbIc+xgvEMpHHveaGDyjmyyjx8TLON4sD5nd9HeqMPRKYBiuG2bVabQyoYR0UGqWwbo3SWatdA7UhGRQ0AwKw/kPQ5PT/VW5wf/7c3P+cV7VvWE7cibjfPDOyaFYWdXEe9Pa5Y9ymQTI6IBVJE9123LX19vv//xp08/v5RSAXkvVURjjCmtxLZn8QmBMQIilCLFaowxpeTzbJ2DiOh8Pl+vV39YnzzsBR1FxVUwEdUizKxixiBVmQBwDEhvZZ+EwU1kU7W+46XsjOYTic3MvQ7pg+bBp154n5U3k5kBIKEhUYjRHT8OYVnX6/VqraqWQ6twJCbe9t2MHFP3dgvEYOsaI5fswDMl54wMwRghePgOptJfYwJAM6UQ3RsEOyCLJuE2DI7e1WZG6M2cD47QTNVv6N93fWx5Yx6iBrj1tv+wE/ADPwK4JTpTZucfP9soprCjhhP+1DHHQN0E8kzReN/MoA3P6E/xSMP0OEp6fIt96Lm77q3ErKcK53vqlnpDGe3TAIYIYGZgBiKw1oTpcK24nlRyzvl23USEkBQt1+pIa+gJlloZCVTRDBGgzyEwAxEDjyVjoBCRF0WqYqWI1qJacbnUWiXvABADLTGlQES4xESggdFroa3PG0wrLMuiWvf7nYjO62pm+9YwPFV1y9mt5Fxk27Zdkojc9+31tl3vty3n6/2+5aIAFKKBRwRoXdGQFiQrW5NLBMwYsIUzBNT7sEULiYJZIFhT+BYu++vta4wlilbTauLzpYwRgYgBvHmnIEAIrKpiGqzNnHT21FpbIgRhyvz95zoA/4uPZp97Q6C1VCEAqIGOyxkIPkSzZfL6Grq+mQEq2EGcNhVOg5u2Hk2GI+TB6lVKTcg/JAPnu+x0ePwJWwdveycEEAVVm4pTcCoqecMj702mWVzMzgtOQwuxZ+aHY1Xq/c13D8fkzf2bmVnQNiUzOKiYoxjte1EFXlDEVCTGBcmIuM0+7g/vIt5MRk6DmV+ur+uanFBSSvf7Tn24U4zxdFpEJEb2uiGzBqkHAA7lpAiOz9nMC2vzbRygzDfMgzv+2A7nKlqko0V5gbcjXEODqaCi4kWt8XxCRE+81JoBgncIMLNbPyGSmW77jZnXdbWOl8OMzImZzcQzlm1jsNtAJGbm+snMdhGgVukmItAzP+4fpuCgoHutx8YMmvAMZoMn7l7i+AxicH/AzKrI6HBwLQ4A1+vddWophde1Vt22m1YJIaQlbpveX68hBLVKwGqtW8NjKZLLsjwvy+Lwm0RkVeqejTlFJqSimreiVkMITOTVuWgQPOdplghjjJfLZb/dgWxNy7cfPqqUP/z0x7zfQa0UHz0SHPAmo2WTQEAmATGlAKkpGyLkuKhANomB1UC01lpNyodTyjnLfoeQQPF227aqHJKDCtCUoFZVAUOAUiq1eQMmIsBtalSpFQG8VkS1KgADp5RAZN8zIsYYXULEGHXf1zVtG5dSkEi07Pl+Op3IwG0aAA2BPB6SUkKJOeda9sREa6QYRERVUoohJgHvLKUYo4JZbVh8nWgrEZ3X07bdBwMTUcDDjA6BTycqSnuFLBla5q3LKC+qRAQEL8njI7SBA41tSAqcX/ezHFNWe0JjSBA/D3dQRO0dTQYkImrHNHNVXdf1fr+7JMs5p7XJO/LZboDgc2IYETEg5Zw5tGFR5BnjEEqpWoUMGKmULOxQTtWs+ULrunIIWurwCUMI5t3wMRJANWHCZUml7ufTsixRVYvlxv6BlyUdLddqqhqJVVVN1rjWvJuoqRLa89NTRDx9/PDh+enbbz5ev77cbuVyOtVSYqCPT5cYQslbRAjLBYILCgWtQMhIDqLPzEuIkRBET0s6nxYpeY3pfFpAa4hRwSoIR2IKIIpoy2m51RoEl0ShAFSJiU90sm0v+So1xxgpct7v8HR+fnoqOe/7vizL7fb661//+nK5/Pjjj23OBCWzmFLa9+3Tjz+tp/TtN9+/3q4fvvkWCa/3vG25eeOIpRQIi05oGaPMyeV/rdWb0OQYAkQiYlKSDfhQ88p2hDZqzwMfCi1W6OrNFxygA8wQqbJ2dajeYto1KBF7F9m6rl+7g1rAGHHfdwAwwxgjYTAr3euQPl2JSykhrG5amZnLBzUxwshLv8TDHCCaYoI8NdCs6yr9sEc/eXxx/tVhxEc0c7bKdTIdmmdDlKVSD0h1vY6ImFIyL3ngqKWUKoh4z/vvfveH3/3+j8whxOX1vsW0nuKy50qBt+urqj4/f8w5v7y8+M1bUanVVBEgMF/O5/v9fr/dCDEwq6rUGkPQlErO6hO9AUZbYMCmsI48kpnrVkeakWq1ViQLIQQkLbWqhUj7vu/7HdQ8A0dEMdC+77is+75Xq5QY1WotojWlRIHF9LZvQeq6riklU825rOs6BJSagQiAKqmnZeKyhBDu9zu36fYkIj/9+DNh+Ov/27/+5rtvEU1y5iWUXACAAnEIYFarmCkGdpPOH5YDoY8dUwVmUB0ZOQ9RmI0GpMnfQ2ztfTTSfYdNNgw06J+CYQDo8YF2QsTR7DRSaNAAMJCYx234RkD3RVW158TaXcUYwQEY+vn9W7U2LEdmJuZuelqfA3RUnXivEHOYTdXZaWwU0qEHXUTE4BXs9XgqRJhQELFVu5D7gdKbg47FJAIiLdW8eM2fqCNG8p4B4NNPn30S9/1+p7QO78vUQIUAg5cibzsu6uBhrmRVVYsSCjA+nS4coqjd9r2WrCWrFFtRRKTkWiuaeL1VQDitKyOsKawprSlxgycqcZfAN7f2GeVrreaVXN4cW6uIbLne7/d9L1UkW9227Xq/51pzLbdt23LJtYa0DACYqnbbtlxr3LZv1rUD9qLXxIrpfS8ExphqLUxwWRcRsaxrWi4Yv/twkfpt+PL65Xr/cttZTdFESogLEalVIjqfTiKy71tK67JEwoCIl8vF8cloLhntCr/T8Z8OM/zS0cMqUxgODI7aaseSOZLhhAjouGUNMaH5bH2ECwC0ur93ucejkQE8BIMg6lbTQdsKVit0PwIRzXM74kMXOsf13E++NwfMH8RldQgBarV6zDFqmS0i7WMX0F0D9xZERhB2zjy5Bz76eGdGG8wFXXGIHM0R8zFO5WcYGtx/dYzKhlAkYn1ZfY5fQ2hAxFqLAyiNnYDumzrwyZz0m13blIKqmokqD9+m4aHnXEpxDMPJyhREBCYmdocQCaB67TIHFaKiBiZaoYoI9udtbjESMzZl45hRRDgkTTsMQL2bbu7F7CM7HoY29tGuPSzRZavNdcZ2lNKNVKx2BPkmmPqFEJB7PtfM5sthD6f5vo3bhSmHo/oAtuvbB2AM6KXn4t2xAO4oeqJARGMMTFxyBRXwEl8R1cpIMbE7IWTw/N33KSVm9C6OolrKnvedmSs7JVQ0Y0AQLbKTgidrne5rrQWACWTfPlzO33777ffffstI99er1nxOETqlRsYYGREXTiWgU4XPahu0bmanFMVUNCiSmomEXLZaFaxSwFxl3257MTBc0kocVbJV8VQAddx8MwPEwNzQzLT1u4rIuqxsPeNaRXwOILQpjjM7+a8xBP9iKcXjZJ4AbyaCF8bNiIsgI61FjATBevu4gQK4OGtfqVbtMXzlDgl0Gh2tdATYCnbo6JAehIGI1NLMCGBko6URxrXclHwjIgcbvn//DZW+OdvxyMjSMNq8sJsHI4yvs2f4kY4KQA/gWavgsIZDg+7KtqczQAOtrd/DmXqI1yFDVJVUoWe0VBoZaKkIRsSJQwgkWhxsMBBWxzTMGaZ5aKCmqCKgTEBkqlal6Fb2bKAB4ZSWJfDTuj4/P3/74SMUkVJPa/JKo4DEjm0DGEIL76spIxi0qlGukALFwCG2DwPTElORnRAThzUtGIyYyeH11LlazGBZ4m7GjIQAJqhoKiY1eDmDSdn2Ldz2p6d1Xc/ndSgMA0kpffjwYds2Z2dmNkPmwEx1l33fmZmBjbHHwlw5EeJjYdlkOPIEbz2TSggBAAkYaiOVIc1cWuaca9FSCqmpiWDzphC8kLT1ELYCmNb283AVZg6BGOvgrPakSsyk2OBh3Cdx4p3lqp+k1grG3apWAAOtigB4AAiPb5mZthHxOBSC/3XfR8XK4cr2R26fn2N8A9364NxuB7znMoADRrj/oX1x2zaOycw7mYiNjRARX15vuVYg5BSTYkgLAxIvani5PBndrq/3+76LGRntWwlLmP3bociGXHpjqfsHqMlXRqRSpJQ7M5t0eRJQtHnIoOAbK1oU0ae11YKllFpziJQsAqibR6CeLSxWvcUJEC2E6MK53aFU27dcWwD0+bSamfZLQy9PoNbGQkU0eBmnopnlKqvBlstPP/0cQnj+8MSMKkYUAJS8Cg7RvXX2cfBdGKoIddVAUyXk3NYxKOzBqOhn6D/hwLxiGhIQJj5ySf5WMs8y+fFP6vcwiW6/BI8OqgeNAKrVhyF2KToykwCAgwakz+gSKYgz6MMDSYwn9bgJdgcMes/RICcVA2jw2Z7LUoPZXvKjWq9SCccc1PkYC9Rf+w8wxX3L9/tdRBHZ+VWncWWgiqYCQICCZiyB8eTjrwzEW86Jifi+l/vectdoYipWyz3fSikmgmi11ppLDHQ+r69bRrCAFANFpjWFJSZmej6d3Z5nIgYjBhN14aRqRWotet322/1eigDA61Z2R9gtecv7nquaukyGJosgNa+DmVG1qoKZT/ZSkRIIiSAG2ktJgZnZsR44BjBYSM4RL0vMp1Rr3WqpxYopqmfgTYuaVCMisNjn3DIAIl4ul+fnZzDTmimc4O1h4KV5/1UHvmkv9PZv692D2g1vT/3BYx5SJ+TRo9sQ4K03qKpTeEVhBG5mcupZryGuxW3vLgDMzHzYu5l184M74PyIv4wrWqe6oUqICLnL9qmTkNpsoSMZOBRHW9l3ecLx/jjaUxxK6njfeg/8OMPQqoE5dgUw9Ba5HdDdZTKzWjSQeOWnqlovXKQOKzfCEs2iwnYfNKVEEdFcG6BxIKqtdrKLBhAxz0oBOHxs70hBR9ft5wFFRCZwZJrxkNZTIiOxZmZaxSaH01tLh5CiPhXDvcHhzfYbs8m/dqHREBdgqgJyGdGIxiefvOvaPGJpvX7GobRVD2Fqk1IZEYJxV9SGpfaNB+zZ8PZ5Fd22DADecubfEim+pCmtBHa/37UWZgYTEzVTjpw4rSkFYgBITKiSc805e/ZPtJQ9Q4yOgsKAMfrURNFaIwIjRCbyWlg2UEvEMYbny+nj5bww3q/X6+sXKSWE8LSgiAcaICavuQIRQnUqajFI6PB9T+siZobkk0xqrbmAFNr2G3EqVT+9bmQ1YjJCQxWzoiIiiYmIOLgylxgTBUoczKxWR6NVVd33PYSwttCA93QBU5RHfjZz5C7zaIJIUa1eJ46jeB0N0Ri94gQHUTUj0gMQSKpVfyHWq6ptmuycARjCaHD4iLqY9yI6hXR7teHRe+JxkEqXDzOzAMAMlTWe9M2h+PAV/3R7Ihg/XDaqtcYNMq/MeEQA14bYBADpcUmbN/jGIXRaMGuCujGhtISJY+55jWVWjSGQy3BRJSVrBec+BoYAWv4KzT0NM/lw+fDU6wXu9+u+370MRkRqNUYDMAFQIzNGBDUrpdy3m6qeFmbG0+ny8Xz5+PHjh6fnL19/lprPp2ctNefslaiJSRECHUaMmSBy02dkBL2/2gxMIoUQQsUMANymVKGZCSA1t8MAwMiWNUWREDkyMhiYsmokhGKAYKBFt9vVttfL/4+zP+uRJGm2BLEji6q5x5JZ9S33Ts/09EwPQAwJkk2ADzMA//8LfwAfSAIEZu27fFtlZUS4maqI8EFU1c0j695ZDIWoSA9zW0VlPXKkihaW16drxOAiu16vP//88//wP/wPZtaRAcCgZt33/e3t7fX1i5mBRXDPUxLcHTEdybMAnyVzSc5SaMQJEB79ojzCzFko6EMGmAnB/QcLh2nPpugNrZsScRwHk4iWQJiFDeZkERGbre4+bWc/FVKWWM5LfcgaRng6pn5yaH5cI0uw1wGz92Op3/OKO3/ij8y9n7TBeUmur59d7fPzyafRrLMWiyAzdzixddv39pe//e39/b1ZgKQUYhE4s8b+vkstF3+6fRySYyaCjuO41soTpRInM7caRnK720oPDoCVB40c9aNlqZYi/f5gwM2jW3AgsywAGEQUCE+lGZ2JlKWweER2Iwlh32/oHUSLw9MAAQAASURBVJHBo3gE3MzaSlUv5ZCX2swo4xYDgBjpjMAgzkFrDTJhOJ32W2sX+/7re4KGIuLl6+tWsuVVAJohNxMBJPjBLqyoWETATDGnk62OQbrHKrmC4zGkWZMScucHlX0WufmPs8OzhIQej/mjlA6OvrgzGH0SpOWznQPa9Pd8dmX7pDTsvQOWwyHOXtMna/VJS5wvKb/oh62Myeo//PTdB2n/zTs8I1TP/wHR/Ljd3t8/MhDNa3Z3IfYkffAgRFpsFrhZEd1KpYQ/uAPczUopt6PlEE4AQvB+9HZ89972A0AiFD4+3rdSPKgWEYqGfhwQxt70Uq2UEt1L5uYZzFDiJGMfNLyOWzve3j7eP/ZuzszvB9y9u6X9cu+iUi412bWX1l0Kp8RgXS1FA95ag6NuCqC7VSlaSkQEhUrtvVfEc2V72QBv4Tdr3Q/vCAoKgw/CHoqAiAjc3ZqXbYuIl5eXn3/+GWuw0FgyuS0W0P+tcwgfA7nxyWMO+jfAqJ/Cwjv7aOBMtHeHfd6XQMw8xSymg5xzkEmEr1yYz/WL4Xjk4SPG7MGxLTn/tL4e8yA/mpJPVu8c+8UpLkgAzgq7lpY+W97z9uO6O1/DOtRavw/1/ZF3R8LlIcwEMetJdh9BZlZKSWTp+Qb81DGyriwvN/OyKbJahA6a1UjzmDz9zITpMw0MuvcwsWEbicjDGBJhHh2GCANU1lCpfFvmDnInmDsPj9MRK6CiGWdnjd7MPHoihnEHygeNATJp7C1C5lvBfI4Pam7lzzADuezPGUWqaRiygGnhrbV934/jsDb81vme7m9uxkWfp74Ky1LNANbFK3HSx4AcPGWXKIJIigrFYNUBEcODwMrEXIqysohzTty6vX/E5H0lQJU1Rj+NsCgxC4roQIix1Mn8BoYwHOrEteof/vAHIoId778e+76T9cKxKUHUnQEXka2ICHkkDt7hAYSIT1xKiQiecyfyuZvTvvdevJUraTEnVaVfb98+7NZvBjWPdBc6D2JdIsq2PyLyDF9LYebWdzP72O0S26ZZmeQZEA723fQs+9QF7g4NkLNAlESJOMJ7a/sKCAHnCHLLT0b0JlN3C4lJa40ERMREDCYlEg4eWmDVZGKVvFLpz4guIpAIZGY7ObIMWi12RBRxYjYnGhO6TgbbF5fM4tL4F7yKT1oJjy7F0i8R4WMoHJLgYiz8ATqdHi25jNDyX9KMQ22parhbD17pUCCHOka3JAAcj3Emp9KhyUQdeQTFaEeeqZ1u3Y9+udTXl+fBtRuWtLrbth2tc4cXxeDdvdNAJ2nwvu/eDqFLKcJC1+u1KCPsuO0ACsv7+3s7jueffqrCRPd20Jgt5pnYWRmrCBv5od7rRS6lmh6J9nEzHNbYVSQ8aI6TJnYgVPlStAirO3evAS2ltY+s5XXiMG/7rR1bhoKt2aYFwNPz5Y9//OP//A//8TgOsGZppVattdxutyA8PT231kbZf1IfD51z7xh4EJVVOFrKf2ladlEuS1RoYIxpPQFmFemqZG5nNAdTTmQmotFrSnw+78xTZMoyPDvrUsEu33XoT2GRoirJev14C9OgnhRthh9EIHuoRq5tGZQlwzNTc2IdmA8tryGLtOevLw0fZ0zBD/DU89L4cb3kplqJyM2SjP6w/v3t7Z/++Z//9u3b28dH7x5gJybz/Tjc8f3jffMLwIm3TBf//f19U2VimHvrwaLEl1I5YK1HhB3tOA4RqbUKkgKamAuPQpz03iPIHeRETgineyERgLPzTOUQEfHgnCMRBVCKCOM4LBM/8FCAmUiUmcOph7t3242IWGvObw30Mc6RR8lgPv6HyI1mtMPMETRxenIcHdiP42BmqUVquV6v5tnWFZn7Hh2AJ08JQKbjltqJTNecArmYAczSrmf5GSsFuA9dIAq31c+3pGhcNh7U7/mmIkaxYh0es2y4dHvMwsWQf/8sWvcgaoL/17uLiJzldBZRIB/2A4ve9GTu6n39/unixxrnwe+9DpK/LYc4d5A5EMDpB5Phyfo3g8AZhA+PPMiaW+vJp+Ru7pbL3W3QjxHARApSlq3odStFKKw7j8GzAFmPo/XW3T0A72HH/tFu+w5f+BQzA8gijm77vhflqiwEISTDs7Lg+WXrfcWEns0cg//MWve99aPZ0bqZQTTiJF1EolxquW4bhBOalGPGdHJ9cffupsrbViLihm4RzACThZEwabFjB0hEeuuFO1UB1SDs5rdj/2htN89SCBLlmHUXyypZDxqIRFXNMUVcSrgTrYBnBjMI+g2SmH9tO49yARIWkISiI+0+yEVhpyDQTz9/41AL33eSMcKcNXUuq40dsij5GDjlykn7O8NFgINWJT9/ptwukZ5/ZebEQa09h8B4PKyCWV6S2UPop4bhpXnwuNDOduFHk3H+ZP1z/XxQDoDu+161zN4JhN8BNnXbRORj/wCQRHxmIUznw52PlTKNoQdpFJEWt3hYTrnI9ir3wTGTxDMqOYOoBg3O3FRaADGxCK/WlIgw770rzezg2j6pIWbOKbAjKjspkqwKevQ1eANAcuecj2lmk/VuiBQ9bulUj9eTPsEsxZ4vBhPVpqreW++970dEiEBloHDT51/qeyInH+qHEUEqS47PKjixjswsKtk6FTPNkASY+76HuTIgEpYOpW6lKjPMmh3JlVIqeQQTVSYiEhZiCIM8FCQMDsp0Wslm1kwckROzqqa+3rbtj7//3X589P1w92vhQlVApZTCJZtOVUULU6IAzZLSGoBQgh/GOCxvTpRsG0JEHnwUb43cS3PvzlxeOvi2//q274EeNOdoMxtMgpjzCWT+x6iUqkULY8/M0GFmza3kJIcIc7ceOfV4Jd5iLdStLInCyhA/csn6ZEBJ3DIrszERBQezgCn5XSDpd96rwYATILN+slwNVRGRYI6YAAbhZJoZLbMnlbGuKpVpul+JvFQiO4n3J9fyUXV83j6b4R/+Ok49jsqRiLHp6S53eezMsYia8vujV2B+cFd2J68i0rZhYbfDzMrk9lhLW2YfqYjQ4KwiVS2iBLN2dDu+fH2uVSk8zI/jyE6zlBmMeQOiGmMMAezj48NbT31NAFGIctWy1eruHx8f7v58uRLRfrtlh9IZepe3lDJARBkNmTUPSR58AL0fG122S2mHpg0+juOiwcqU4MkIBBI92qIL86XohZWaxcdNSC/b5dAKIJgojAgUQQEC3W63/Tiu12d3r7X+4e/++PPPP//1r3/1qO49W8FLUQCOeH9/fyKpsoloKaVIVdIg1qAWnwz2kLckWTm7epg2j3AfbpTojJyBsSYV0eBPim4t9CzPaVaJiHrvEc4PFi6IaLtcKCTr8czMKu5+HEdm8dxHxznNPvsj7qyJuPud5J61wJS6IAJn0mJARjNJcTfoRBxBEYu/Nv9K66WfAzxMFFDaxwTG39fCCe0cj48XJ+E/r8H7XzNJROBh35goDPH+/v7P//zn//F//J9vt9tx9KO3IGGp5P12u1mQmb29v2fKK3u5ieh6vRaOiHDrvR1FpZSy1VJUUsI7wa2HmzARoqgc3Vmyo55hDgMHqhQfWHciBIVQOIMYLDRSxjq51VIpUADkCIZFWI8IFmHgRTQLjU5wchUekwaJszA+1MBUDcNuzh4w5pVKChBl/JV2qncnMqatdyPqEfL2/eOf/vFPtdbX19d0IBaEMqMN+FDRa2DZeEEIdwvzjMoSqhARbnafInGS6mHKaQWCdxVhbrze+6JOSbG/H+FBEpY/sATmvkbivAGAzEw3zQ7G+/YYwuV27DvdPc67SJ/MgaeXsjTwj9v5ROtS8ggDPTFvkO7Zt7smIZqDCind8FOFf7nds8lw+OI5FCWCqRBRQvIj4N1CuLBCYH6ih4WLUGH5+lqv20YUbh4E5gLAPPZ29I42ONvdjr23w617NGEGwtpBRE+XjZlba95bODMUTGN0SXhIP/bLeA7Ozsh1m+6l+92FJiIDRfOkwmrWI1xVVGTbtm0rTy/XWuvlcnl+utRaKbmmzPqtH8fNEbUWR4Bqa80SkOLRzbrbSCIHedCFugoTq0d8tPJ9v1w+jo+jhatZdxA5sQgTuXfvxlqyr3sVe+AOmW/55CPMmO1/R4WQI2K00qy1No4Z6+dp/7n9yNt7F78RVrnZLHsxATSdN2amddjHgZ/phaTfdBf63kcX7nl1eSzSvk/e0vDBMBbAWqR0EvVP0IBlINZPngx/51vL7ZS4eVj0LP5p5/MOnw5FWSGcgYy4jfFF6XTm2IkV2+TVpMieT7kuPUEXKSs840aefDPZPT9aBOeL9MiGWi66lbKVItb3iAAsq7XpzAEwa8wacyTlXXEc4xoW+6LIOON6UkS0oPMgJ87OdaEH3eqr7LaqDWb2G/JMnuz3OL1LZqZ7V3lEhCHW0zhrN0zDz8zbtgWN3DmPLp3p0I83N4Qplvti8+s0akTjdfYucx53Es6k5spPIqKZwUPLRRg9mhC2sl23MriVuxFCmOy4pZQPFypAFMoSFAlA5wALJSqViMT3stVs/yu1RkQwtm1TOIniSSVhq0cDoKov9SnxaaJpcM1Mej/MIzFyqso8icjNS70GE3h0XbrbUah36f342I+9I1iet7JVrjs1RxBI0pfLt2BEtRSNiPA+qnRKWX4kou+/vn+kGHvUUui+Zh8Ksz63WktriQxys2xJisTESXba20Alg4IYOUDZRRaFwIqLhjTx3exlK3z5sUI4o1z4eTHntd2N6xTIyOiXGSeS5btiipVK+C0XMx6M/b9YMPy8JqbCchcwHMnRaw1hJr33bD8eSyPbMETOU17WkXOtjHucd3qHxMg905GHynA5Z6Ks44hIJJtrkOUg0Kmjcv7Y8/VJRDIUvL1/JG1VzIkL+Zib9W5HtBa92dHYqJbCjFL0er1ul5qTOZn5dnuvosnuGxGXyyVrce7Oc6aZT55Nlow8U1tYGtaETWAylJi1o93e37+/Pj1rpNMQ8KCZN6GAAkWkMqkH7Z0omMplS5KGaK3HEf2pUYRWaY3Xw3X3y/X569evv/zyi3Bp3VPP32576o/v37+DBFwTa4STsV/isXRa7pAlhR8/FxFywuPUWaY2E5+Dmqt357BuPeTcfzuu+GRt7uWLDN56N4ogOMfIuGVwktrfTihHw/32z9Ke9v9RsFeik3INDV95lugH1aW52RnGGR6WPSHrQ5rZ34TencnEpxg/tJSsr/Q5QnOpoLnSPPxhSUcEksxzPnC72S/fvv/TP/3TP/zDP+ytZ2tWXlA3y7KwFN1vh9mhWk3Muy1Dnxc27fXglLper8ucTZMqT09Prb/lt9zCPLFCrIze+gg9mENOwa3HgKoPQDlgnSAxGjcm3pdYVd27hpqZuVk3QwhQRcBilkUDeCCIlCn9hKz7EZHDOUZ6zonSafaJtkiHRPWJSPZbsx71WmM/jr/2p6en3//+96+vz0REGCmIIdup2JcpPymcZHDJ9BtONmPo/N/CQTBGCmyFfPeXnMc/WaO7Q7kWFxEBPh3ET+tumQ+sNbt2oJm6O9UrMqH8SWjX75h6SZV75/PB5wXGOi9OVmZ9njRddKLNo8lafP/62J/oNMAwRqFi+I2ZEP/0MNd8gbG5j8pMBAIZDSY94f06AQ5YBNwtnD24UNXydKmi5Ga97xxiDA8yoHXcuh3Neu+97d4OphCRftu5FIDNOjGLCsJ7a5etKIPCMyYSJlWuqrdjR3YCZ8qWIZO8Jt3P8Kw5kRssbPKIQgtrKaXKtm1S5evX122rL9enp+dL1SyWtHC3PfZ9771D2MJF+Ha7ZRwbhL03OYSJhLkjIKzenFGDt8pP1+31uf3y/vF+aG8Bh5szazqZFBzhw70WYaXjON7e3kCf1VGMJpr/ndvQA6d/DsU7osEMAX67KjjxooOP9BMredoP99PwhsUyah5Yy+GO2JzfenCQpinC3D/P/LAA6fGxtNYKL94QQvJNuKdYzsLY2OQ0vWxVCNeiiKkQfjwLfvDfFixlfTE3/oHsKgYmS6qcZ6z7/Uw+BmjUVRkrpezt9ulAmJCbsKmLJ6A0b2BR36Q6dvccwI3BB6Dna3Xv66kjwn2MWGqtqY7SGcBVB89nxDF058lwno42yKyS4aL3rjboT+g0kismHvdczZj2Yx6ah/SsZzdckxmIBsgsWs61m/Mi162VUsJH+fQczS/Rj4hTjiBsKMR70v1+S7MqzcxMjGHy+HTZti4gaPS4CxeWAWQtpWwimxZlhgeROgUTM+iYlErEI6wSZmVhxqZFVZMnetMcU4OrPJWt5iOVUoI8vYeWWV6QFo4In/yZL2VDArtgQHj03uE9iAvnDJ/MF4+bpa1ckClw5mCK3gprl2gMCpAEjF6frl9f+63h148DQIQtYzfJ/VORJfwa3Z0igPFyW2vejTzw9LTp4M/wxWNxwoDxhDIOp+W08JQlJFbif73f8ZaZRnGPxrRrS2hluIdbuMEcQ++cK2nL/VpaZoEth1n1H6zjfQmAOL2Oh0VxP9pp5/WVH4/zm/88e/+PR4gIeASfHJSzb+GTAot/oM2Iew/hWdg9R4p/eiY6F6+ZqQgFmDhBmee7y4Wgkf20th97RFyv29evr0Kc4872/cPda60RoXMqWnNja37cnJm8RDcNyfGFl61cr9e6lbLV/KI3FyFmTqaWpJp0J/ee1vquLtSDRiHO3S0SQt+A+6tvrfX9uGzbvu/Un3OOD/kdBEURQtQjNGgTuahcmQ9H6SavV5+529vtdnx8WOuXy+V6vfapf263W922l5eXiCjMrKWwtJaouQ3A29sbS5FyFd3uYjx/OTtqSww+1dLHehnjUmj4Ovzb5S+fraQicvyY6byflJaHvOLP3jtDFon+8F+ZzLpZ4AeytbVkP5nSvNzZSD+MmrvL4z54TM8ta82zmP8po7S+dY72McspKyBcqmY9k0/bOiPxg/O7LBENzi0Pt2/fvv3jP/7jn/70p+/vb60xM0sp5PCIfT/e91vRjbWspOdxHJmwY2bdBplWR7PWj9uetvL5+TnchfhStyQ8UBYp9bb1+RwyLStE7memnDRmbp5zfYKQfFEgJgYNoixVJfi2lVKKD+5xMjO8NSg7i6lHNkgQIqKRjIoAA8JFCqtExNHaelw8CcBzA7i1RqRmdvvY3X3btvf9ZmZE8ezXa1zM+P39/fv378/PV+bPcZy7g07+1o8VNuH7n3iMzfmk5c7E92ePZawjlXtL0jngnIMi1s6rnHc/+5Lkh8saizQ7as2OKWkDorZkNR4XeG6rpf+sw0UkKZ3nnrEclfOe5+MsN+a0Uk51xdMVn+3d6clMhUOKH68z5qI4109GqZDWAkwnJLMn5tmSl93mUUS2bfvy+lqKCyjCyMPJW2sBDma7T7WJIEC4FNmKXqWLFBKOiO7jrrc6enQi3M1gIcEGOXoPbEJMHOFkQAg5I0uvmV1K5dZm/w8ra2EFay01ESlVay1VpCoXBSMQLQJKAUFRUqlm0t26MzNE6ej9OHo++cPsabuAqB8NxMoeTEBsIU+X8ny9vDxdvt/6rd867l5u5pOqDKedma/XC4DjOKAKM/xQ7V7v81/4/H/TtgyHYZAafALP/yvV6XMPYa6FdDUiYs2fpJUEpIf8YyjxIvZbhs/M+VE7UETmjoCReKWziGYwRY8J01MktkzeuqmVkcFJyePRbJ0N8XmHs7+B+w0+fHe5r/fSFwBA15fH921kAVeQmmOs8vdSytFJREDBd1CiL/N8eg3zgJM9EpTlx9FYWEoBqNZK4IQcjHL5AyXomNaQzXgiwkzaOjAI5XwhM0dsnzfm5KGqiEE0er6wPHU6XjJHR7j3JIpcD3pGm76irPOtPaSvaOwfIOYcW/B5Tgjl6IIc4zf5LQf8Vetpt3sAufq7PiloBA/kxnShM/xLBE4ef7Fo5v0efjBo0yLCnjgKUCklmUzIhreUcK6ESvLoWHPOgU5MSqyqtWgp5Vq3gVJw//ufLshYGpFQHFW9Pl+2bbvdbkmSW7UkmfxWCg9V17x3j27GXRDK1+sGjB426/dKGif0C5wQAmMIUTBC6Pl6EQ866OD4qdP3j/7r23vkjJMIBhGHu+bEESJiGYZhxKAR6ejcbre2H4lhUwKDMHNIdGrazs36EYAIlZK+Xbh3s5ZNNcyZ5Uzssbl3O8EbYvrQInJYz+fe4P7oBP/mIjqb2yV7Pvu5P30LiDs557T6PsRsSFQe5ceTftrOCarzJyL3TO06yBA/QIgDSOEZAyZVw0qt1cyyvFxzauUtE0B5kLuKnAsz+68gIKXRRXnO2qBbuGPG53GiwBkvmlZULzm3XFVfXl6en58jIo4RnRJFrbW1prUwHrp5h0JQFWcRYeG8nXTRWmsZWUXAzG63W5buk8wGGPjVfFmehM7kzn1MXcPQoqkk8/lExKK4sN4lFOIUAQohgCgoY99AmBBfS73UjVsIcTZ1dHd3zFa68b6IKNtrcybQy8tL4vBnVc1v70fKxtHaUiArV5jErej9RyGJyTKNU+JvZVJmzwBnmLAQHH7fIs8rLIvpCHeruewWiD6vpm3bhJTg0Y+xutuIh3sPkju19RTjzw5rfjRLFGkNLbJhFAFv67txCobPeOBlID6toyXGNOdwrOP4LFZk0h2P1jpmG+GnD9PPGNDKh41VtZl1t33f//qXX/785z//+uuvnqVXZmY+utlut9vt4+MjNmIPYS2l9D5aLvOqGo8VERHHcS8WTTqolGTLJaPKtdbeezuMyZjZlXvv7sglgllScx8kYUUKTnEFJbUGy3YpgCfxr4cREYW3QCkkIqQCIAFvx9H21nRjB3e34fR4txbuXmpZT15Za63bttVScrn11oii9xIRHtaf+/tb2HzFqlrKFhH7vpsFsxPJKXSHu2s59byd6gaZ42YWnIgoxkv8QR6GWzUFIukSh1pWxWQXwyedH7F04/qQmBeHzfocRNZanABpACJAk/SImR/0UjLizOOffcLzh9NvaavBaW0xC33npMZZqjOAlNOAIqRSMhBlNppjuOp3o3PPxuJ/xeaB6QKnvgIRjCLC+wz/iAjU3bu11hqZgcHMpZTnp6fX19dSv6tywjh59r+JCLrl9ZNKcQnrVXkr+uXLhYhYNZdY1tVrrbfbe5jDLfygcEYgrHsnKt1NnXtAibIfjihXXAbq3hOIF04kxJHMXlrSyCH1p1vzjtZp8OABjGDm6FihSsBEaOONRFQrCe/7DgepkEceXxjO4AAziuh1K1uptRa83Zg5C5zeDUBhEZXdukYAyMbj8eDdaaJG/1e+q39p+6GHkE4fPjhC/7tCzSBRwGP2sjEGjTmpAquwfI/TGHRHgc57G7cZ9ywMiM5z531iBe9fvK+UiLla/ETPSTMhmOvxU51sxQLLXq5buucpfour7EEtnC5+BjhxPktE6O3WttKu12tvZkcrVQmxqXrfg2JTeTvevZuIRLeyQUa7CJg1kG1RZN1I5Ukut2OvtQIMD2u9atn77mEAwqM1U+XL5dIPUy5E9P72cdyaahXlnMdt9akfu7sLMTHDoyEcKIhOwSwQ2o8OkqpiezcAwkRiEY6oIiRiFISAaITByREGdpCBikktl9vxASE0YqIn3nZnNGgIpCYLXnqWzhRd3d1NFFu3jkaAwIM82GMTBaP3ozWJiI/9zXuUbauyQfFx7EeP4GaiEnyEHqS/Nn/7fju6B20glFJPjpEtS6lLPgy9d3Nj5iK6o1G6BBn9hQHBEcQwb91ilHN5dFtVPJGEKFRCFILIbjbzMAc5ITVMrWAKCo4PFSlCwiHhVfhS6FKVvRWNraDUuBSvW6u1lqKmHy9Pz73729tbKeX19bUUARrRcdmMLlSKqIZq1CqXS+3vvu/7cSCquFPv5K7E4e4lPYTTqnB3cpiHJ5UVsUhVFXjt/WitFSdlU3b9WvzG7VvcekuP+XCUsh3g/X2/PD9dVcmP6J2JXlSDtn1v7x/7y0uppCRBIX5Ed6gwER2HbZtOygeLiO7tdnw81yciopC2jwHo/XB+KhHc9oNChMp1k2527PbltYrt+7F773K9ioi3w8x6YOVKBEQwjXc2IqLWdtUvl02FqR0dSVrLXGu93W4cFFT6nJDiltmru05x8wBYACanpPLD6EQNCkJmfGbWFSPfRoAHcQz8yolZsZa6FNBZpyxo1lJA6zKUJaJpYW/NvHPdut/EO8SJ3Y4OoNaLysWNBTDr7k4IkmCiEOoIRT1CN3mSwv24veeYFB7w723bLirNw8JB1M2aQISP3kGE8GvgZXv+W/uLb7twH8GwRncrRf/w9/+mBxFTcPnl17+axc9ff2JE229Mdr1eu0NMyCRsu5TamlPfL8rRbpetXMi13S60KZzLl3/4j/8cEX//8+/e3t6OW//DH34PAxTebUyYNOv9SK9L9CmADnJx597a0frOveP2zu/v9Ov3F+Y/Xp7+8n6To33ZNuPiWhydyFXksHbc3iOshxMrVbo807VJ+/b9e3v7+vRzjeu2XfdmB/e+scf+8fa37aKlXi6bfn97h/Uq/P7t15fn69cvL/uvb7VeA9RENt3cCUZfrq9+O+LjjS8X7ni6arChVBLdbqPoSqMZLEnO/YjGwQ4ERSOz6OTUohV7CnKRAN2cvTvMn8Veen/ae2EVEwuC6rU74IJQpivHDqAU6RbNnJTNozWEoRJzKcxsYYe1K1+IOcyUmInafjBFJfpl/+C4SICBqiUcBbJpfTsaUUGoe2SyIlMYvR/CCQCmLG8HFOQ5TSkF3sKEBYC5CRPCh7KVoBxu4bbJNhMWC+Xu7p6o9Vw+q9wNIMEIEWF2ygamqMwBPDa5/pl59FoHklqYKaErzcN72y/bZd/x7a+/vv3i7Xb125dffv2PtdYqpaje9recMrLvO/ZWL5uQtNZ6O0Aog9hAI1xUhcndW9s9eb97UwblbJ79I3qPpvDLZm1j9is8ZDc/mjVqwRHMbeIt3ZyJShXyeObW7diofn16fbluCCM3FXz9ssE6yGvtIuzRW2vGDZcrAGUkRri1eH8/brGTaAt00BHoIb1bgAzBPdUzQggSUId2EmLq3u1SuVY1MzA14E/ff63GIlKq9PYGoKqi0ZfrK3UnJmTkrxStmZnWQmcqZBBlRMusPZgLnKK1cIgWgNCdlBCBlVYDEXl+E2e9yuPdo+fMtGwbHlmK4XcSWCQe/T+2dBMz/pwqetHe8nB380ThUevoYaNRqMxkmt+dzkeae6bN+ixuhxMqE1ozEclwKT1oosgoxDyUJTHVNJnM3L1o5ay7Gwgk2XQTaJRzNTMRCyfQnN58cuMjmAAwKNLlZQbfqWgQAXYgILOPjALoCIDt9ecnfeK3j++3Y7e4CEuRYi00PMJKb1flV7Wv1b7I/mUL+IeDg9CDHBROsB4RZB39ELiAatHn58vr0/N24TErK2MVj+M42m3XZixEVGDSWmutuUV4lEI4ejejIlzUOFlTHExB6L0dvh++g1qpIgKZDdgaqrRJSP9Aez/qtex7v+24PGndNOtS5BTm5ImMcyXO18QGMkfrF2e4x/cPFfpaioh8sxdBlKJP5n58b75/wf63/r7hCK5Wq0OsI7lpClSfdL99/N320+2Xb//Z/+X/9J//V//Zu/8q5UroBWALGCgCGgdTBz2dBkGcw5hPceP6k09W32wniYjsRSQigk/eaL7Pk0f6IQCcButMMLqFDYa/caJ1OgeIREUC5hE9Bnv3kBsQkHRi+cmshJsZr+wYjUt1s5WizAtS4sgpBkjFwIADUQohevY/MyngRBAhemQgjxFGzpr2Z4jl5yJhhOVKTeZOVRElGh2e5mZuWHN9QV5YHNR7j97GArNObpoazSy5KwZndE/z49ZaK/WScWStFZJx5yhbzZsc7zUtXzb9fwpeASQQKx7TTg+5n/PGbG0fZYHMc3wqq+J+qDy+iLS2R05WnQ+x1tHjxzwCo7uCUyUZ6sbMIKg40QDSScuk6P1wnfkEWttHxDJxKZTTgQW1VmbtbqV4n2jdrFdkLpaZax05thb3R3EO6Kdk3Dl2R6pAOCKIzzIBjM6c/DpH+ExacSVlCmIXMgonyu62SBVGFAQwUxHKmWZMVbNFEMFEVaRuuhWtUmuRrcqlaM4t3EpRVX2uzHy73V5eXr5+/fry8hLmR7tlhTlzbzkWchRAxuodEUXdkp1fe++LlHX4QKOiijEzhjholvu9t0bMTB4hYhwN/Xq9Pl1F35gch3uYm0XmsFprNw8lkxwe4C2ZskSkN6/1IlJyCKGzOJG7b9u2qhzMnH1fSwAWQC5tOeaEboyGqN5ngeX5+TntwWAVSkypxxKtBYZZYdWiYjpLHaY/wfeNcOoh+V+5fVLE65N/aYCsrQnjc+dPmafz9mnt45RXO9/IeTkA5xFAE1Y0V+vKeK0vulF+sY7ZKpS9fyWIVRwIeJxAR3kuTZRm+LZtz69PWdbo1m63m7snAuJ+ivTeInv5rPfOwPVyKTTqNmd4pLszg0Hfv3//+PjYtsybemsGjwRqpaInj5kJDiIaA7hmATNXx7YVZr1ctxS/UwXNCJ4zNmTMR0F3I4+kFigsq0lmYMK37TA73DI7TtwxIYvZtr0xv76+fv/L30CiZSulVIse45nnrbXWjmZZPCfqiPuMtUmrQTQ7Ns9vXDGy/uf85Se5ys3dEeSzny6ba8wsqAeah2SxhCbYDGvOG0NEeutB8H5AevidSnrbNjs4Zj2w9763/TiOCISP19p77116d8xB2ziX5ZNygh7U71qqfUHrT7VBPs1YwqwNLrVwNoKrxb3W7bxYePKJr7xYfn3VZDhrLH4yjR6YTS/5yvZ9v91uCdDIajDAxMrZtc4STomlz8GnEeGtk+q2bcSD7oWZhYJCKaDKfT9cUESJoqiGm1n/+HhPq3cpW/cws8N6Za7CI0XOCAtmCEAhLMF+XEp5fn76+cvry/O1gFVwqUqUPd5RqlbhiGje3H0/aPScMwmo934p9fZ0fNx2dRwGWOQAFyMukA9zZyLhrVS9llIKM+CRfYkzQnWeftjoZ+YC5n3fv3379vX11cxKKenkwx3+UFI+6zvMIbFEBB+QQWZexb0ZOs7f11fNzuK0pIjcH3aLWBHgOvvZIsDPn8wDzpUep6IKg8CUsMn19Xlwl5xrt7CsSblBZIf7Y88CMyfbTdBpfOLsKgpicLrT9/WSpnZ58fclBmgdIDWcMKvr3sdJZ6fGveoChD/aoEFtczJw+Qvz/r5///U9a3fCYhG9HUAwBRETKKsUdSvMPHqucl1HclOf6kXMtZRr3bZartftUrevX7dEWGRg4A5v/bge27Z5t94tUzA5orn3br1FMIGYIcIyuyH8hBqrwqg1PAczhqiWUoikh/Xm6b5++/UNwvWot26lFKJI3lHbP1R127ZSanT7+NgTDra3Gz0KW+qWum3R21wUrKq11oxAzGzve4eQg4KD4L35Hk/X68f3t//yv/h3/4//5r/9u59+t7s7N4XEGMFFYAKB7nSgS8HFD9CGhxcNjMQIHjGQqUFP3/jEL4rJ+TJFAZlAoBkHRu4QyfO0NPbELwDofdFhnvq5gPq46PK7K1W3NMLyi6z30+cDQTmO9oNgR0QGhDS1xLrle0VkLdWx3SOsdT08pxhgOjAxy4BxH7kZwJgNdj7gKmnmt+5jJ4YRitGBlkcxb713jjvS9G6rcjzFCVO+LuV8rcSUI1YSnpSoLRY9ux0j5SxyPsI4CBMI2TfFcXdbT95bTzRCYh1b74Az13UL64GuZ0dE0wVxInJ9QISeN5r6fQF+mLmIsKqZnzO46R+01pTLUnZDPN1h1sOP47jdbsfeu1sSTAAgKXkQPrFXr1c4LuzslAwWbMgD/RK5G7PQaClMYRXVImYIJ+tOxjRq3wziCAILCSMKkzAXYeaRTlAGAwqqRZ62ei36dN2q6tOl1qpauIgMpJlSRBTR63b58vJ6uVxa34m3JNVIxKCILNCvQAAo4ziOVL611lq1tUZDTBNSy2bs7mhhCA8Cy3wIFkHCUNUSkOZUPKR/eX9+fXkqH83dKL2xPsYPHLemNbSOKCpth6qylG+//Pn5+bmoNvMYsxB1NUEtNSBzDixOOQKcbNsSewAJhhvSosKnrok0DBjK3yjIZ7VwKJQ7b5Odj58bTjmXs9r617c4oTFPov6wlPDo7cTJqT2vhfVXfiQtWNeTGguna6MZ4H26pHO8h+EhjUvlOU76x/0Tc76SZ0J6t+J5OkZah3xrhr7+at22bXt9fWVmIhzHsQ8K5eGUlFLczVrPOSUR1HtvjFK4lIuEJVwn4Z2JpY8wZWbm79+/e++/+93vtiJpbJhRVNNGCkYqvmrpffRFLwXYe49JT8KTGqr3/vb29uXrK+DwcHjEGLARQaVwu/XUe8/XeHp6+vXtPcxTV1eVy+Wy937cPsystQaiYMnlkAGhEv/+p5//+X/6j2YG6qXWAH//eM/wD8CYjtP9Lp/nqIam7ZkPHpNfcVimhIH53chlUfF840u6psEKYDY/uMPMB+3zgw4fiwsshTEDSKe1Lqh5qFY7bBnUpH0/L7QZqQ4VT6Q0AMmYRneQtZxNsk86fjsxg68lwI/80ssQ0L8s/5lIWiuI75XDgdDDo8I5HyESGE8jQHV3wDL3lP+c8b8TtVSfpZQMCJn1drv13kvZCsvBu6o+Pz+VmXJ1934kNp+UAYARBC/EpRQh924RsakWESJSplCVih4ISm6M4X5RIBk4lPgVfLnUr1+//vz1p8tWhFBVrlvpx+7RgdCS8wxtcwXQLpJOm2STj/v1Ulvvv3x/88Cthx4u3bsHSMG8f7QeBosIK3zJQabuBsoJRui9g1mZ4H4ch1JdonUcx/fvgzFYRCAEy1F1M/oCkPMw8hXMVosh0vmuM17yMYd68I0+Iiw+/c6TCpWmos49Um7oNNTq/MVla+g0FwcTMkojfLrDL+PHYOkuiiBypsF6DwzXbj0ZWu4j7sdGOoGPJmhkZPwHF+iHjX4rA57a/9N6iXsP1cnsnrznzGfGwucOb3xEq+8ft+/fvx/HQVKFpPdMx7gQEQUDtdbnp8u1bozMpadvRkQCjLg9PQdlvV6vr8/X62UrpRSW5220k0SEG5y8Oanq9Xpte4vY3T2KqCikQ3r3BngEm1m3EC8MZREf5Pk9vDOzEicmxlR63/e+M2sQt8Peb7f32+379/fMdtHslcjWjJ9fVUS+fPnyu9/9rkptrdF93DAREWcToOTIDSuq5hbTXalz27btYx/izSyFWIg1qLMJgd3/63//7/8P/9m/paNdtmsPckpSqKR2QPK+5Mt4fOtLEvLz34gP5yvOBMX4hHJO8SN30PxCYI6tX7Gfh2eyfUlWlsTlnH8nT51PPjiK43FGNCYpy3JshtD6mAYCjzitlLxWnvNvMInWx7emJJ8dd58tGGcrMI3t3T6uFfHJH6M5bmR9ssI3Sq817qspImxWPh/dyHuMqr33Zj3TVfMUTDpE/GjtOA4BqbJIvR+aSU6ddamSkrTgfLnpfCTzXgYGrfXT8r7bRXd38js2fXSy0ic/9fx0IqKHt2YZpmYmhmYW33LSoFuYZUY/1206st2NJtY8+X6IiEgQPS+JwCwkIBrMYQ/qO1Ottda9HT67LzAalyOCmIctH4fNItXtdrvdjuNY2YwYTtR9Wy7L+jkOwinr40VmE+ddCgGR4dAkB10EeTcDuXXAGcZizJ7VPwYpSEkYLsRVWDVqCRGRwvlDhYRwLfW6lap62cqmcrlsW9V0i7Noth9HKeXl+lRrLUL9uCHi+XLN+SqqWlSWcx/mFKjKytdaNd/ReC8jwxbu8MEhHU6gCEFYUICyPYBCAiA4MzSIGCFmJM9Ply/Pz/Lnb4VBLN2IQHO691QKRAkhYFISzXxVloBSZfps8FjLYb0CcAQsHcElumCi3nvvWQlfqkEmu0wmSuahZqNO21NcGRwjWSj0yEBAp4KhPU4wyxXCLP6YTv7Xt4jAby2l397zwQx/jgbPP9du521Fh0uD2xxsPeV5rEeLYYpi9B+OXImWhzaViPDo7gGmoJUaiNmf5t2Ngs2M745CNnp1976On1PX9v3j5eUlAx7MfsgYFeNu1hPRRCAKIx/sNSo0Eqi1lFJEVQhHC7POXLz3bdt++vKS76tupajWqiJCHk4kSqpDKuABvwtYHj8mqUA+mdbax8eHmVUVYg73zIilshSAISIgLrX6T1++fnzc3m8HkJP7oCLMlFl5cnP3Yagtk9aFiL58+fL1p59++eWXfd+3qR5ba+/v79dLnRnrEaOGsDmIhJmTRMHM3C2DW2ZGMIbXzQDIQT6nhmD0tH56p3NpcIyBq3kKmmr5xAR+MnurdB8RwiykFMZskRj6IWlhZjQC+0hAh0gOIvPJ5jdH1IomZjlOU8iJODhgWLudr3mlYz8tHJ/YTtzl8D48CndcwANkJk5GOj9fiIN12KEQBmSUp6vEswUmZ0+PC1jtPdkFdzt2JnOgNWvm4VQrpf9XWFBKqvXL5WIfbzn/0SKQRQyGqpRRxncBQC4qUoqIPJfSrPfWjXm7bLZtt6PdjqNcqplFJ0RweDJgVy0/1+3Ly+tPP/30fN0YBLdS5VK0M8I1wjJwnZ0g9LQ9uftEi3R3EmLVGu5GfDVcWn+79f0wYyaiA3y0tnvPFEBwLD1ctYwyVD7kiN576Ohw9iTegGd9tfe++KUidfSczn6P35gp7hS1vniDmBPgdX7vv7k9qF+aHbOL8JMIzHm1CYA6i9yDezB96MjBxnwvpo3NBxnPWZM/liLpFJTd1bvMh0CnJM45wZdpy4QaIYLcmQeJ5vke07eVkwLIv/aZe83D0YTkxA92LbXBvNMf7315+SMaHOBq0uZmEaRFXCwIyZViDs6emSjKl62KUFhzcQDR89piGeSkIqxaLpet1lq0qooQt333ZBHHmBvRWrMerbfuZuEBBoGYRZWY2QZXYiqr6BxFOFCFPcekMrk7O/KRkAaBPaK77x/7t7f3b7/8+v6RJ3EbnHlBzNfr9XK57G83M7tenr///fvf//GPWSbJEYiecb6waMJMnJwz78zMiOHZyhxvqNblOIBQ4cqFCeIoT+Ifx+//+Hd/fHr5n/7f/9/vf/3lv/q//9/065ceFpn5SSgvOcMJv4k7ui+KUT/87IPck2jJoXWShR/4RU+MpoEAjJNclHxVj0DO468hXKbwOPHom80JOIhIyqjcYYx9xkCCLK8Gy15HLGZRotGSy6N5NdfGCAHADDM6laNWsNv6vUf99Mt9fS2bcpb29ftaymufXHrLZok8sPJGjFGKvfu9IZ+IeeymMVET7t7DJYhpMrhgMHThRC1gEX4KYpY9I2HGPZF5X/DuzUymFcciZys5YHB4IWubzVHm7gaoqgacaMy2Pp00j5PRIIDjOFhQ58x0maQ456cpk0UmPWye2L/eB0mAraEOwKgSDPG7nzEf3CJdoMmcwcw9/Hhr421BggfpPHNprR0DeUWiQiTpe9wOmw7QveIUP0Dy1j8VvMBanGnTiHAXVYS5x4jWCZGE+3CmYInCUKGiUoQYVEBCpMQqXISKUN2kiEC4qNRaN9WielGpWqowcVTVrcpWNA1ikh/WIskEMAP+xsRXra3vzCQMzkniyFLqePLCRCCBRAQzEYJYAo4IEQ7OZm81GGtYhAAW5OmgAQzOOTIBCoYROdHrtb6+PF3Le9ihUiKoOStxzMmQvYFhksBUHgT2Ty/PianLAeLHcSDGEMIlZmtFLQEYRg6jOfihcn5qrDezZK/h2YbU6V5DEBEWATEhSimi1HuIUNagVLV3X9DTUZJyEyVVVWEVcXc7DvzWdlYWOBUJ6WTLH6Rr7v/jX3/c+ezFrq/Mkkss1bl2yQuhGLJLJ6oe9JXVftCA6+AsSI4JZlaV1kcAmWfMHkqafl6amnvyWOCt+wl3muv9OA7MChgz86wyEUeEMwQAAzbLPinYm9S0lCJiEezOTN69t53gddOfv75u2/b9+zeKuGzXkjV38+6NQYm+2/d93/fwQRuoLNu2PT095ZXkI1LV69OFPgjA7f2jCNcisdhrBwi0B1hYjt7h/vry8vb2/vHxTwgzb2SIMJlZDmZGRHTzQIt9yqzVWr9+/fr29vbt+y+G2C7XwnKL9vHxtlVNrwUgZRailuxq9FC5XVImy6WbL3+8oFnYx7ncTXdRISIm9oFCHTmRFA5i9shqA3Aa0buaIqw7B1F262HwSPsgcemO0Gkveu/uXUTMWgaNZrZm5WWT/1nh5oMjlhytjpMxjumgLxOwoCI+w4D1lZjR4/KwYwrV/Pmg9tcFZMJoGSyfhSD/AUSUR8iA4bi1iFgV5oTJuXvrDnQH2mG9e3fs+17KVkUjgomylH3cPvrbdxFSEQCFqWjZalVlb53gREL5VplrLbXWV9GPvd2we4CEAqikVyZm7jM4oBBlbFu91O33V3z9+vK7ry+liJtxqKrUImbi3gOWF2wxvZmi7h4GM3OhMLecxGsdLB1SjsbMRdrRPQhbqelFGCHHfKhq5paYORH7cYrne+8DW9RaoFXl4zj+8pe//PGPv3+Rp4TlL8fDsnI4A8J7QEW0yI3x4Njdy4OftpUvXhI19ifG7NAeKNf8esaWj8jJH7fBQTt3GD6PR0wSxTEeI2JBEpZflHHkTJhm7TkByffr/F+0DvfdmCgeyBKJKGhMWVo7f+KHv+c7zyHfWn2n2O/kI80w8cFlmjhbcC2Xp5fn6/V6dNv3Pljnw9ydzVmiCDGD0M9js62HJc/n9BKrlm3bikhSg7lx54DwyOE7TUhFi0CmGhEMSfw5EYtAuFn6CwHjCOJgplJEpIo0wEEU++5kAmLhX/f36/W5Sv3l+/d//NM///Vv3yO4bFc73Jx6S7GTKlXr03Z9RRzW+7dv37x3b/7HP/7x+emJKFprgDMAiDCSrIGEbq1xoIrmnT7IjHmYOwjmAUtVVbWWTf/+py/vf/7n/8+f/3z56fWw/l//t/+NvDxnMzmNLJ4DIb8xh5AGN8xJZpfYALhHKfddsjkwA798xT5F3X2GiAEHcpRLzj8Mmufi9DvuX0WE8QzJ0qQSUU5OXlZmXK6MEWY0WwCWoMVMLZ2vPyJH03pM8nBmrOzdWgnr5lfmbskzEQF8B6k9/nWZkvnXmZeZWJKY1cX8XCfuEsv+zrxMvoux1iaMa0w8A9MaCZUhZu89ySFrrRE2HXqbCxY4uYCOHNvwoArz592qnf40LJw8hI55qIzLLcL7zD1PakE8wmTTDIsScvCC9zBy4rTTTy+v3Q5zjsdqZA+f3OITWYE8MFk4uYRngwqHE+j09OcIrES33htg8siE5OfI+yVh9KmSOCKilI35PZiSOTvCV3/LerD4YVtPeH1S9JSx8zu8yt0RiHDhVGEkFBFRmZhJ1VVJBUUo62JVuRAVZhUSoq3wVlEKB0iVrpW3olvRmqEJszKXIpdaVBVz1gERilRlgbmjVxZice+97cIJnSEJ4ggRdg9HcM6H8bCe6pSF0wAggiEzUUhBcASUuAAdZBHdAShRCIEis/zZqc4ger1evj5fXrZt/7iZGQXCez8I5N29Xmqa/2CoVJCjd3Oo1vfj3bttygFCRNYo7rJ9qq3bnFAyXvGnhCsSQTDeTi6iIotboucsGmYupayxM0GxLFoSSyRwt5RyHEdm0CSt+CCnNvekiv5f3s4qA6ciYUSsz0+r9TGTdPI7cdfXn8E8j+dKwryHLX2UqmX5yuupJqJiHTNtfi5SszvdORFlH1Ep5e24ufvy3mn6dqoKXprEDOGTNXupi1JKBmmeLPjzbSZRNHHy5pOICMG9h3daxV43nmDOiGitee+N2S0iwqy9XJ+eni69H94tZ2wyM8P3vnu31RcehPwKRmYqKO7Td3rv2cVQtZh2Ivr+/dullqIXudPJMhHt+85aidha70e7FH15fvrzn8XMrPWk5GeB8GjOoaHbyW2YvX3fL5eLFs5Y4na7sahIUeV9D0/Mm3eEJl8uwGYdRJFYm/nwl3VcNNw5tSHxzflwsVoszMysR/dTzwwmgeIq0S/pyrnBOdzu5AjKsONm0Y2rwJ3pjrGhiQJgZj8xVJk3kQlJGKYkfV5DKCiI0ql4gLrcY9RTS+SnhU/38Ozz8sEp3osZHy7f4lOAd15TJ08d9ywSCxERJOKeEAGQQMfWursneCwPq6rdjSzcY5FFMbB/fDBgFBToEWnfe+9X78JK7sJQ0UuVqsyMIFYSVRXiTHJlcqR2q9f6tElv3qx3x6UKX+rRmjG7IwMAJb5e6tPT9e9f9cuXl9fnS0T0luyFoOjCCW4qDnh0jzGLwiXDdU5ijyFFoAgLUgdlq7wy3Y5mjl97VwnWzQguRJN4chXrlkgACNzHpeSzSprfX3755e3t7XKtdavjLa9QRAadIiIeWGFWVneVbnmlpx62k1v44PmsgOcsTlmBZJH4YSA1ftgEA8h9F79U94ux5vT1s5jhB1Rn5MJw936vNqz9+ZG8PfVzyqefMiAZBK1ZTWMtTFcqL0DKRA/lVU0/7dMTG18PrOadOdc3ln893sWpbZKISOXydH1+ftZSLDIZNH13N8BVuRQuEowIGKkShCOCzR0CcPYIiCaWUkf3aTZ7mpXNuvXe931/f//+/v6eXbt0mk21FjgRCcNHNjBEoxS9XOrTdQMTCzx67/04CEmIqly8RMT7/v7x8ZEM+bVeL08vKruZt+4RBKai2/V6vV6uX55+33u/vd0+Pj7+4R/+wXr/4x//+PJ0ba0RRREGjKkkHTqzwBzMRKFCbTEmTEWHnFJNTESl6KalVPlPfv+H3z0/v//jn57K9vbLL/8v0H/5f/yvLy9PIHd0R7Z5OcEZHrgnBAn0m/HhozAPPyQiQKN5/vRXz7A2Z/KuT/L9Z20wL9vdQZ5ERec8Apw8uwJWcGWZxXvI3dBqSZ0DDJZx4RhJfwbFGFJLY1rRrLO7u8/oIE89vYxM6MyowV1P+LWz/qfH8O9xrT38sra7zKdjmdHKaGAa6i73MUIZliULg3PSQIQSBqvBREKbrSl2uoZSoIeTU8BnCXLkLN09JT+hcXdlxASm8Ehd7KeZvCu7z8znoGgogqxPEqeC0elw9N5VNcP1cf/DVxvgzPSofJI3fhpJ6XP4YyeUUrioEgM0x/3ds3TnmJOIcCp7DkrHWZYkmQwoGI5Oa03mJA8kcUImTlmvl0spYzaue0tvVbhkCTsixgDGUyo6z3WWDyKKcNxdQwIwyP2Sj4iFJbthnMHEYI6iVKpuVYpEcofC41qkiChLIQhDlS5VSiERVZZa+VK4FtlUVVmYEyRXa44fHJO783EpsXt3NxvsYBTmIqppHd3IMwNHHkgWejODdRAJszAiPCITTMtykKdCiEYkAvaApHXL8e/ZIx9hBGGqTJdNXy71Dz///Pb9+9utMSitliOyU2BIQlB34z5iSouRqXX3CKT+Dh/B9tlwzj6uAGeJkz69LExuoSXM7iNJknstbUtTkDy6I8BjDbKM1kqc7HSalp4jBHpPfnMZTdK/0Zvxr2/rds4OQUwgwSeX9LzDWiBnh+MssfktJmYaAwYw88FEIxO2vh4zGhxPmDwvAIEI65ar73zG4Zcw67pCkcIjFeUysxRnA7y8vTwaTyopZsoUTCll20r23WZ7Q3gUYSHKMtwgXwVkLvbmhgDcsn6oLNtWyONyuYjS/vEOcuUaYZRz9zzbu7kIAxBVVzUzxxiSGREq8vz0lEQyEcYs27YNLdQ7cWRxGBjgDOFC6O7OvGIkvtbt+fpkbfeS+Wy+1O1jb+bRj1bThxMd5YHeb7dbQqOfn5+/7Me39xzGSLVWN+u99V5670aLxIVUBI9NC2mxlht6ZxsDGJT4iHyFd729BGAm5O+fI4iYKFlPfObjMpl7D+/TuRl2Lsl1zIgG2w0llTJm0OgtKHIoRk8cOU3f7NStFMO5jOELZwwL46CcDzZnFY2uwtYSSnSfH3jS0/f19UmHnNdX3nvvxrNj9mzsP2HIl5EC7r410Z3Bb9/3kSGdbdvjWyQEIXIggaY5dYAAtNbyBXm3sJY3cq2cAH8hFKVrLYXZ3UtVAalQKZol9DIc3nyz2mpvXXo4kbBIa2SdAeQ4ZiG+Xq/Pz89/+KIvL8+1FmvtANPo00yadc2OtObi7iRcShae87FIKoFRMIvnpE4s3QuLCm9Feu+/WARgRImNMQp3cycW6b1jlvgigolrKZd6yedca70+lculunt2drg7mHNK5gzu1yzQod/PbkMWE3JVjypijFRffIr3Zgw5ZMUHMwyI4IEz0Cs/JFq9RmfFi1OKnOegofvy/KGmR0Qw/83juDvTQ61mGKgf5tzGY8A2rcj4b+0/VxkFxofLatgMC9JM0j14zsX3OR25roAmlG4e+zGoBjIlS6c/g0UEBLFhU5iZWmtMEW7KdN3KZSvKLAQuIlxW9Z4m89wCD8PckmjGRj7rn3uCPj4+Pj5uH2/7vgtFKVq1sKBIsoUlwJSJSHNpc5BTKVqL5FRbwIXQe6+6H8qECiJmeXp5vt2O94/9yzO9Pr2SbExCJFquZtF9eoMEEVGpF/5uZrfX2y+//PL+9vbt2zdVpfC0CLPaOV7Q0DwToimTGCJ/r+pFFGGK2ASvT9eXp6u+8h+/vl6ZpPWNy/Fxa798025pvA2eyjMT+oQ7J+KUkgfhneLzIGPpD2RyPld/JBsijRrgigBx+p3XwHryiNGuELhT4g3JCcYJgT+XodNEvkTEiazE3Sa+5v4tAsAiK5TNA2FpiemzzaL+aEM93/yyCz7JwB4F+44ji9OG33D5xp8GghLACbECjLQsAOIg8EpuBtMaYR1B812ERiQlj7CKSHGPZAkY8aF1R0a6waKqOhKuMYPOiIw7/LRNwAHRZMvJYImIRElI0q0Z0qnCOKFGQQCHSEQIYU1BPPdVj+c7cnLR+0FE23YlonbLXpGSvfWfvmVmnQkLgArcCzWzl6mg9Ohr/yLCdw0zwm5yZx59iUhcq46RnRQEJoIQ9ZS3BTnIMybJAjGp1Fpr90ke4JwrYcllPLCVjGxB320aIF6ynNevLFWZyBHG7iyhJEWlFLlsdN20FhJyuLHHVkphKSxKEEZVqlVrkefKPPo9eCtSilQtSZsxp69xJghyIRSm66UCtbUmGeAJ11qt9bzVAJwCQohAWFZdmEIYzFSYEN57n7n6YdjCzHuzdlD0IAmWkczItg0OciN3ImQFUokr0yb8n/zu65//+R9bawQOYift4UQYnaZEMeL5AA0s9FavHr3tB/qo/X7st63UMyPWeYGdTexdtCZJVyZHlskc0zXhIpqOms9BcBGRw2yZRZW3bXP3fd9ba4nWc/e0PnnhJ+1ggP6Ye/7XtziVB8+3EPf07W8ckGax9H6QHwqDZw8VxEE0q31DQ01M/Wf/+CTqzmPKHJl5zoFkphnOdXe33s1MaiEWRwxcIYFBfdGKYF0JmNM1lXzmAMBkZr0fWX3N3sLMeHj0GcBHVc1SVnQDggIw5zroWLqAhQnk1imiVnq+bq01LUyB4zhqrUllkVciIkBcak021DZvX7JJQbBtG8Dbtr2+fFFVd2eVCEp5ePv+t3QpotMYmEbERKWU5kHgIqTUDSiiz9fLL29v3k22IC01vAg1673tWgsBQpx1FjNr++FPHhHX58uX+HL4wCmoqqoMTjwzIwpzBrEyQc15ZRzXO12F8YhkdngIbCJCIgjG/LmwdhIh3B3ruc84IDNA3e4sRzMHP4jUALjBNJiFSYPu1LgLo661iIgBrY0ludJtEWuo/fA3pojmbXEakQV1ycvICau5ouPkIq8g85MtX99dOyxzybPZ47ya8qTn3FDeu3lTVTDPrPRYZ8krKz1uuy0lk/xAiWUxs27Rezcbq68fDR61SHTb970oPz8/czQOMLGwXKs8VS1MZlAh70bmTFFKUU7m/7hes9c0ROrThR3Re+9u11q9D+OYYefz9fLycn15oqeLqvKBhPqO8Zg52DbLPkfvzTozqYqb5TSFlAV3T3zvy/O1WxzmzCbDhKE3+QlyuB1uB2BZIHC3nry+6PDU0SRUtro9XStqay16bKVcr1dhpK8yKlGpfyKngNKdDWLKKoAEA0TvOfYBc0ZIdhL+qE8xw62zRwGAIjI4Py8Qv2OyHlbNUqESE7JBOEEkVsbhTlU11pSPvHw2UOHkTREziOle1gDmRNZ1AcMTAzHf2WRi9HrloR5qGsNLnJHzyKnQyP9FoLfGU9dj1Ibu1nYcfy2BlYcSmr1qc2m05nSipF7/jab9tu+7u6vWIOp2JD8FM0rRUoQFoqEsPgNUIlkrmDz6fpCHtz4UFDwpIf5///H70fa2H70fYU2Fnp+21yd5v30Xggo9XQrjqQoIyu4i14CJU/BEo4iIUK1XImqtHbU+O2VihYg+nIrUl6vEBiIhyRIOl626gZnpxFoHgAO99+ulPj89/e1vf3v//nbcPj4+6uvzVaTUWmsREcrOFFEuAe/Dp0lrWFjG1F8R5R4gClfh5618fX7SZ8JxiPCXqvb+3Y7j969ftCgQ6eHN/5zyk7i3GEwM50ORMEakNDMCsOmlZDToERnmxdzJR+YOAJzvYaEvsCjIFUo0zjXw0kjpJJ9KmAdiP4h0ra4cA7Igx8IMEaQJIxLcZ5Au9wU/WLQhpSwx854iWc7IL91LUNO9n7ZvtCCK327rOGen65wi5FktQ9bAUgam15H/dHjC7uTUu5ENBSktONkX5GB6MzuOI+MWM7roFUCWio6bH8eRcPKj982su/feB1kQkaeT1Ht20wahRIWP7nxVZWuXyyU7ImqtHv3orXBZ0+GJyD2SpaS1dsSR05NzDiZlbGCmtZhZtAYMPiURiRhTlc+5OmQR4NRzglm0vVwuVYv7qEmTyqXUJD5tzbJe391ZUEpJgm9mThRWPkfRAbJNY5D7X6UACEKzXqgex6ECp4yciYRJJR19BDOzTefm/f09KCd/sIwZymOAgczWzZX8ytuRUc1PIDzcXZi3WrxbVVaBgJhERZQREbVoKVKrioRZB8XTVp8v19v7x2UrT5erEocfQrhc9brV4m+1SK1lAIZFapGUtm1TlZyaOOKE3jvX0vZdZlIC7qIqkYRQznPCR98PZt60tOYAmDIPPWxM2BjEmaKYj3qrlQDsFhROpETKmgBgb42ZPXr0IOLC0q0j+vNTfb7Rf/rH37XWvn3sSnqznYgZYmYu6E5MIVKyG+T6/JJDLwlyuTyhOHrjOQMghUdV0zshomY9r9DnsOmZxBiL9oGnhImLUu+1KlDzi5sWUdpoS/u0XS6s5ehjBkNr7WnbUpJtULAW83a0WybRi9DlciFCa41mrcAnj4WZuUNURwH/rnKHzokTueVZc6UknxjA7h4ATriFcyB3Mpx3bRIRBs/xILfbbdOchBtaC0VoZl6JWCWL6pwtPUmVl5FO+gluZlZKFaXL5XJ8vGdd1Mz2o4Voumvp5xbRrdSI3dxE2SevhrtnJSQiSpHeeztaKeLuz8/PAS+lXK/X1DaX6+Xt7c3dL7Watarl2D9U6efXnxGJLNrc/TgOgiVhizKLyOWyRcT1enX3j4+PbSvZsrRtWz4uVV2XkQUmVe3NReRyuUTEvjeBQFiUkt4tb+26XYjo65cnVU2wsYiYdXe0tkNUKFrvrRkzh0V+5X2/ifC2bVIKhF+aBd32PpjrWu8k2lp7enpS1X40wL99+1Yv14yvLtftr3/96+VyMbN9v13b7oS6KQuVUmxvfZKO8qRKT3GVMiCvABjwCHcvIvtxpPWU8KB7OWVFlb333oy4iqhbmFneLwVKLbCYwplpRDBnDzMlipgh4WEIjni/fdTLFoHW+mEDWcrMx3Hsx0fdEmCSUzeQBugcyEUMKEwSFRCNrENG8pkOoNOAIj6xRi1bZnbvGV7Wd1momO1hub+qrvGDfQySGoTvmT/FKX2zTnF2CIAIeCqHt7c3cLlcLpfLJXOU87vk7t1tghjC3RlBQkzhrXs6spfr8+V6GThq31SrirILixAE4CrMRQlFqZSylZrjKBLSCKCHkXst9KQXM5NLMvESB1T1ernUKtcnBVnrHRS1SqYYk+08TfzeW7mIBnczULD1ImU4CSwitTd/u30gyK0riVwqM1hICN/7G4M34cQUdxATjJnZ4VQ2jYgMkrWqhb/vt6iv27aJcHopRz/++PvfJ+MUM8dxmJlWQQSYwwMiSUSbqckBaM6wMD3hfJvHISKkiozbAYzS/JyIvehkmeVUguA5IH4o4dlD+CkgXP8Mm3+lAUVLOXEzmh2qD/q8Vpi5Wfjj/IkIt8EzEfNi1tJYVzuS4BR9QjcHAFsmHefE52PWEplZitqc2HbXGBkJLQxJSjvC5xHumK9ZYAxz0jtGfTzVvDBB1vzHY8liDhEK997f39+P4yZSArAMQd2yCyqT0ao5ss+It4wiRAhg7x5h5sHM+/FRa32uT/u+/+M//uOf//zX1tr/+H6ppahegqUdfhF5/fqH//Tf/N3t+y8f3/92e/v15q0KKgc6AeDKSszCojIsSKkvT0/5iPtxvL/fCC4gYWJWN9m9DXfPPfpOUkqt1A5mUuYiKAVBk3+uRTaib0X/8Lufj5fn6EYU27Zdt+2yFRFSYuJghLe+bVfjDnOftZOMYcxbmAmxkXPEUy1fn5+uVaIf16pXVT2ciZ4v2/PTBXCYuQIjL5HB4SimxojwlyhOJTY89rQYcfJJnAIEAq9KYHqK3TAhHkhcaCCZ87NKNzl9CQCNvBchFvMpaNCiIMJTx87R36lvmZkRiVpNYTbrvEpfwEoxMEvWiM/RLTPbmFuYtzw6zDOFl7zZMdFYQ+37nX7CwqPnW7hTIXo28SU4wgeE9WQafC3DrENkaubeUrgY0ZgBCJCOqKoyad4hAJFBcqGc9BQiPjFyZtZpZLDSrtCyfxlT00TOzpTPuCW+l/5Xi931es1G/zRyidhN3v2YLSUJEPKchEBMzjZvW4iRVezBbCk2eWDzDHECuMcJmERz+1T16G5klCfvvTcR8lilCyBZzzlmda7HRLqmVreckUW+Rsllut36eAgOgjBzljfNkUZoPz5y6BmzOoxzMBoNQOrUpQGAScGfkaLr+ik8WU855Z/GVQmFEFUhIVeOIlxVGPF6vYhSKVwLMTEjRIjCny41mWCUAa/KUWsppbzoVVlUE+QgyqwEDmfWAhYGmHPFNTem4Jwdlil5AMxG0eFFdK7uwakVRGG8t4+MqFMDBJiSOASGECaqpWDW3ACIchALkREjOY3cs8MMLCQpDaQem3CU8tOVfrmW5ws3VzgF62BGJEREto2rBrMGxXEc+7GTB4Mkpxo6VHglkBiav2c+aFAoLoG/q6sHL80QPvl1n8oDyxPIAQE889QYWf9ZjTFfEwtTcn22Wi0TGxEiyc2eUroEB8wjbZufT72zqnU0e0kecBqxMilnMTs5Cp8k8NMn57v7Da9lLsMMipT44c9EHv18hJi56oUsWN525gWJOSJ8cj/yqYZJkZD+ob573LtA59liLsbbqEhgoilWZ5dbGIK7EleVUor1cBujd0illFK0FOVRPBcutT5daipfIU5fGdPKwQOgII7MQzkxJCsseUEZctB0kfiOY3QiHM0izF0aRUR0a60382DY0lHp9ilDC19rKapwZ6Aw1arauLt5t6BwD1WSnClPfi5TvLy8ZGxZa3X3ImQHeu+YkzNTRAk1oWWcqyFmLdQA4mxA8LzVTFcViQhGQh/uMvPx8ZGFPmbuzawfqZvBz0NE7zrxc+F6CqcQASNZNGKqYRSFiSImZzxxMHO23WbrRBaKzS5AlFJFq1mEM8DBAcjysQmfDUrKicyXhVUGiYiIjOg+qXR/RAStWzinLM/LzU8ElXQCji55psmvQKsFA6GqYD1a2/c9w8tSCnchMhImGxz6mYCLCBWpogQ3UkEipWHtJiJFalVWdiEU5SoFEUlhnnZtK7xtoqqN2GOgGc2sjbZbFvcqYxYusxbRbau1VqcuIkxjoqZM1hwiVmWDcbZmhZt3ABcmZSKi7hTmBoAGa87IpgfTmGkBCr/06LlgHIbIGQIAsoM9y7L5SCmYYsTeHnQcx3YZxZbBNcWc/EVQzkhvvZRVm6JZQTtLZ97UQI1OIui7gzFlIBbe7LdQG+Ngn/75Yy1ipf18NVMNSOqDZvZJMe19aMvM0kyJYpbTiG3KDuIIVGLEvS6Rz4eEMOcoJqm5r5iNOZjWOIq1CtxdTsp/6S6WuVLymgmgGaDOvB6dQKR4tDjjmZAnSiE+Jy7djqMU2Y8PdxfG0f1S6vf9m8OzQKeFk8wiwmIQWI2NV4XULcKF1Vr/p1//6du3b3/7299+/f5+HMcH/5tb70JdyKrUcr3o5VlEv3z5qbArhaIXFoInpHgyRQcbBRHHaFVQpihyuVxenxsRjafrVEg9Hczkfezd3d+A7ZIJ6+DCqiwDuhW9az63WqpeN8YzD+JQ3UYLPVIfDslVDXPACeLhNJPCl8vlrX1Ev3HEy/Xp59eXa9Foh5BdWDZV7B+3261X/fkPv8eXV4xy3Kx/wYCOKe75IGPkqIOggYSAnuU5AGTwP4eZjPUSMAoPyqVEk13G19yIU4UwuwboPr47l0P4zNcMHCcAwb2INFw+QrI6jaXuETR0u5mRhyfIJZCZHT5BQiKCQKqKCbaiE/0EzZk0A1EzcyJxwpKczQSfphBjZQPBMUc5xOO2AkiaX4+Zj87PbS1YMzcYIniYAzopKE386LjujANBFoP2OsOwyCCEBwkseBQnlmdMJ8bOWUAbF3079lX2SS8kpvd6XtUxY84C6Zy8K4N3MF+Z5UDVeelH7wr07r0fREGU6xnrqhYCJ5YSCSaIWWceKKChklYwCSHKzkZacMHMWJ9tf8qBT365Pj3XPGnfPe3T4vU69t7juO3t49gjIpvmEMgZ0Cw8nR9f/sHKWKR8f3pQLJQDvQAag9sR2UGrrIWiEKrEtZIyX1RYoha6bFpLETIhLizKUnWrqsokVESoFiqlPF+IiHL2RBEt004UHYMoiIf7xZ2EQaIrrUgBJjAosXMjb5C6NSG2zEU0C7xxQnJHJOnXsKbunnnciKhaPGDEANuCrLj7jM3C0b2HO4er0MtT+fJcX5/qrZs3WLiBSQZQqPU9S3NSiQMOymlaRSTMyQweLk5E3Y3jPrw4LV9q5CU8ZpbJnBSS8YaEeWY69n1/2Z7u9xj3xTxT48NHSeVONNhrF5R/KYVSKqZnSYmZOZn8mJotQzAfOY7PKZJsK8FvbWul/Pj5+RTrgPQQW54DyOxuvOd9iUhpZAByZ3dPXRmLxA/IgjqSBZuwGHQiLIc3pioU0WUnVJXdldmwQj7KceiZjnRC74dZXdDxfClvb2+LdYM5h+dBRNyNY9yjFL5cLpdrbTebnIdcyggIVXnbyrZtGn27lHLZdLSsopRStpqYAnJP/s2I6HDySHTAymFRjCDe3TkA73CNk25UXS3N5u7dskJ3f78x+nZcRDYtT5lspgBCWbZa637srbe+B9hBjCoiRdRntcS89d2fnp6O42itM/P7+6/P16t5O45DqlZRIrK2I3iF6HGyIuNaPTJI5Ozn9JkgjoiTATuL1gz1zcyAQNwp1zAaLDHCTkQ8jv8BkOxcNEXIzLKASUTM1JnhA3IpWeci1AoddEUYj7S3ACIkH2MWOMet8RgF8uNa4EX6evJBYwbnPy6f8+/ncJFOjVsxzYCZLfnEYzw5pwqteXFZWo9uPZ/At2/f/vSnP+UoEZu97jg4xgz6AYaiUduhcBKEshDCrZcqtdZr3VSVCQhnmLAUYQpQuBAJI/EgDBAHR4JXwUxsAUAU7pkx0VRlqrptZVONGORMzJwMWxFh3hKISBQkEFZGZB1YeixNaDYIC0iKu2efXXAocUigiFW+XuRo5hZGicMRyyc/+/RWPJbb8HN6eO/m9bqV4zhy3ItnSDRCAsfotX6I/e6i+CMtXATMzp7bknkQxSnIfxAPupMfng/mp0DlvAkJMDCZQ2anJl9ixtMXAcZsDDwK8yPqbbQnjNhv2Fxfyp+ZQUhwuy3+j5m1WZotY8Jl+zLYO6e/xzpdic51WfMF3fXzYisltslecw/IZwzzsL6Q4yIprCFo//hAGDG1fS91NDCOPMLkCctSDMHgPtKScEpWIs8uYtr3/U9/+tO3b98jom7KgmJq1gJea/35y/Mffv7y+59/fn6+kO2Fv7xcS2VUCUYkmdh00HOqC4CEI0Su915sr/Xao3c3RDhVugiBMfJxHbgdh0fve3d38yPImZGGiZl7XJiVSylKl0u5lEvm/d09y7Eg5zHmChHx0Y/eD4oQUlWt4Mvlcr1e9X3PjBfBL0Wvl1qU2kdXDjFjt977Ea1cn/7u3/0bXGqWrobfHDZfSmrUjNBivjKesE8OBCWgZNTlOHmGTwLpgAsixtCx/GSsEEYAFLAxbzBTxhmjnICpuaIelucgHh2o6fSZTu3K473EifGE/L6EMedwZhQEIiSLYawc6wCrz4eA3rvO0HHVsUTujYs/RoMxg8b1IYFtEtKsxeunAuZcy7Rcx6JDttfxk//3Ick4HiaYWY/jSADmTMNIRnuzxDepvTzSo7dHdqn1lO10yvN2u92Q+UuW5HrxCZmIRVMxb4yIkPB0c0z/e1lPMKl+ThS1iUEf3ljCEJjX8TGjweVHEhGS7TPVDTPfiyRCRJ4vAJxZRD0RVZ9eWOo4ArDaRWmWiXt4THfIrO/dMmuLkbsON3OCmRFsXuTnIsz6nYjWmHKiYBZmInBECFMQEKZERbAJKqNwPFW5blS11FpE6Lrp5apFIRgzgqtuAhZiZVIhVVZlFRbJ2dZaRDctIpKvp0yGIaaQDGIdwai10OqmoElwKvLx/r6eGDOrciJAysaz5XK8Mnf3blQouz5pglHdXUClaHcLJ4ev/oSsF01JSB+qA1Dip41/etafv1z3bsf3nbs5Q0RVudZKHvu+mwVyaKZ5KZJJHwSaOZmZaBKIfVp1KyD0Obs0Ht8UgDNMc+QO7uVrixgtCkva05mTOeu8d0/i5plYuRvFPHxEuHczcU4nx3EaWbOWY0T8mEo+L9v8//nDiIcphfEY/p293k/bJ4fmk3MhCRBJHZ8ZwAk4jgjMZuiIiN7STchLy7KPz7GQotz2jrkwU4TcjWLwGB1YvQH5WHxknD975J5DgTOC8tFRk9nofHwOhICEpTAri4voNqb0JVRhxbHMXLgMh5WQWCML5OT54fAnb3vaqlkSPD1Sgg/nfgX/DKS3HUDZ6pSlAALOSSWoUozTS+04jR7aqhIRRChAAs6xx8TWuhNnf8a4YDPr1tqRSLmn51egvr9/A/w4jiJiZq0dopdSRCiObqKSYcU0e5+jnfihIDYQktMNHKLFtG2biPTDZ5Mti4hwbbMP6CxsRGSn+SJn8pp81+7OlEQyCCeMcVDss9m0lCLCo2ADp+xmFwHczM2DWRGaSQOMRLdHBLl+dtlPSaJ1nWuZL+40PNpHPoFIf2PJzM+XDNwdjh+eMzNTZr0zLPQAvLsR0ce+/+Uvf/nTn/707du3fd97N1bhrkQ57YppLBgul0thIsDdwFJUmNn7UZ9KrWk1hImStAnwUi5wo+BFt0VhSdkXEczEYBLIYNlBxGhBz+YOVS1FVDlcNWfs0Apc7hoPyJZqAeDCYAo/8gmoKmsREbBKxPvHHhGOQFDAiIaDtam4dwlUUhHZAUqcWfLisqgq4CRzhJVH4r5utxuLV+VT46VlHJg1HSIKxKr4fVKCSxJoxirpnWUZ+/4GR935ntr+tIJW7fFf2j4r4RlA5t/OHbxDMuPhW/cip98vYBTiEu5HmUCRkRPvex7Ep8F1RJiXUjxdkJHfn6YKdzcpaAyhWeedzI0PIo2TnAeP4dOxjny+0wQmnB/yORpc904+C0WkRfv7cb3UqnJ0J6JuR93UOxgmOtjOHOn78njdKd7MTOhw783d2357e3t7f39nxtPT63a9RIQeXyKiKr88XX//9eWnr19er/VSgM5eGL4pOYWHdwoDQuVqloCIMfs6PHtPdHC9OCfvdGEGOFCUqaqk4+HuFh5hx3G0vrfGvR8g58nLUEsVkVLKpWgVVhmU7NlsTxwUJETKMqg4s8MCICalhDnrVtXMBKhVvRtTiLsiWPCiuoEpLAT6fP36n/7d7//dv4V30ADFUAQjlWykpp1VugzKRn0vAtnjN0t5BGBwhA5DCaLAGJWB+XlkspEmVHLhR9KdAO7ux8zdxEmWOLyf/nRfVICPgmUeZ6YyU6KE2HmKMTE4CFDM9HrEMuIjB5dEIksyT+vxrNjPQr4CFmZm1gVSOBcGMln4aeGcA5N82mvrvZPcuzPGDsQQrF5lkXJfgIBO80OrJWktyKQBdPcIZ4pscv1kw/J3P8WgK2BNt2mbPVEjbIsH8pyEdSVSbtC+wZOXbz3Ku1OYCBNmba2NQYIJm7w/1umpdKLt8THNq2WCMDt394X+gy03ZOTzT989FU88wFimK8Z4KxfJASCwOQzjbB6YCbDWeu8958lbDx+TA+4NYGf5WF4j7hm4+fqJRIiT+9gZROSGQFHeCj1VvZS4KJ4u+nrdLkUv22tRqpuWCkQza0qiIk9bDadM52c0WJWYWbjINM/Jt5HvWEAIkIcKZ6ZJChkLzfcic75C3sxl24jIva+RSpIYtZVxmaKcknO5PLnvZxFSqVW1Vkcna04xBm0TkbJaOyhbs6iTEREJBzNfIl5ftj/89PrR/a33mzuBtaonT0lRVW0eMAsKDkVJJz5kjBUZGfocKY7ZIzH12lw5k5V3PaiIcMKnVz/3ze7cB2LSCHMn4uDRS5x/dWZJFPiSW5oFJRHBxDv13kkUcAxi1M+1iDH0Y67WCCRd1Jnf5aytPi3q9c9z9PJJFH/8yl0h0Gd66fMDidMnS/JnYo3uoT75aMie/boR0cMKSQ6tdgM9TqM5X1jQ6JKfOWALH28to9AMCEGOSD8e7h7dACQUwswSercVzdB93WAEciiw1C3ArQ9AFE8aIZ67R8zyFgBgE/UxC+m+6vvRMolrZmYNkEwPMdAHP4oBCEJOGOUgKQqzJNjwQZHSAQiIVUnIEZ6zOxki1HqOKOUMpyLCWt+PvswYEZVSVh5kzMEDMSN70vy2s97t1pLt9cxpZjpWdpOZ6ZRYWe8o/5pc7a21xeMYiyb0zu2cvKNY9BhTSU5r7W7eyRM6Nci93B0xiAEfbCGw7ziOo7Xd3QAXJUQiDwmUdMQETJIMZkb5UcLnjSS4I28/faGH9O1aXEuf/1ak99mk5ucr0bn0SW7DYpGk7YhIHj9aVY4UPxGptRK8U3hBAtFTIWTT1vVy8Xa042BmLbyVWiQTQHueSJSKaFGuoiokQiQCDw4WRhXJyahBGaijDFY9J45USTL3GfaBRYilXsscI4mwsUwRtW6qbEzUGzP36AGnIKk1H4gUJZIgckeznh3Fy+MToLObCVNwgAKiTKIGeFgEhwxjl2rWM7AL6x0isl22ReNpc0bxEsjlBzCNoTEA6FQiZmao0idlOLq443OMN4UGn3JyP6RXltDG5BTAo8pNtZ7uyuhqIMIsWSBLMJ/Czrzs0WnwicOWsmpE51EB0xFJft1Uwilgi8U+s19ToB/ENeuEMQsAazHKzBNaksydSWXmMllZJ55ukrA8AMjvVaP7k0kW5Puj5ijK/+7f/dvrdfv+l4/npy8ftwZmLhzOWK8zr6poghTCe6IEiILDm/fjOL5///79+3eIfvny5eXL6+XypKq/pxdVvW71er1etlKVEIbWVLkZ9qO3diC8Fnl6erperzBubW8HuXdmyngszL2bd6OkMBfhIGYNQlCIipZLLuGUBGI+jsM8WaNahPGcplOuT5q5Ndac1YwwNwdAEAITIxGmQBgi7Q5FMMG6W+vnVBcFtlKeLvWy1eulQun3W33aLptWIrodx5d/80f83e/A4YTEBs3BYQ64w3iVB9crIiH4rCjmdu4hDIw5JYGB7/TsSV/2l8LnwsySV0QsYrD8LiyCyOe06RUX+FoXD3KzEuj5+2ksygjwViYl+3JjLOrsH8aEgOLe6T1GvYxlPZ03nwSka12fawZne4HPF5itUnOx353DB6DKtJh39WWTlmx8GJzlTR7kbXdW/NxhYPyZ0yTKb10lVv/gWUOtA2U0WGsFuawemDvY/g57ExEmqKpKWX/Nc68n0lvHBIDN+eccjIlEvZeVADgoffHhRMZ9fNynZ7qudn0es0rr7jHXwPnpL+0WEzixjiZENh+3mUkZ129mggmZSMeImDBKwNbD0URgFsGJbcO9y4ooggZH7Z1OJk/oqxNXOUSyeAKioOSDCyijqmyVXy/6dOGvl/L8VLdaClSESmWRoOAQVaZStq2UcGAMq2BNJLpQ4Y0ZRaSICgsT08i+jMHiQqwJWFQioj2MmTKxNN7+eK356pkmqUCMZst9CthYPBFh4csMmxmTbnWEQCqtp1MLibGSuai01iQHogUAz4n1BCj1p01fX7avt+1v7/Wt9SNYiCz6sEZa6Ei6ZiKl0cg06IiHx7lYE4e6muCZCAuUxX2+lkMeWTCk9KwdMAuk7rqEfGkEkCfwKqYnLRB3d/IltOPzUkUkRv5puQvscV9Bka2Dc53OKHG50YOc66R5T/mqk/bB43ZeMj9++OmTiPvQufvxA+kApNHKpsCkFraxEgfTRmSKei7DEZKJZrCEEahQRNgpFiLiCJtYuPvZST53rZgZ5uQAEWktMyrmPrhMtgjmIeH5LWG+XC593zHn1zPIQdk53LssbZtezkjkZI4pkPNdV0woo/MRjnvbt7snN0l681OngYiMHfAHymphAiwrYURgdrIpqPCIwgKVZmNsbuYj2OFBOZF2nbG3pHMckzlqvajq+/u7SNLeXplUS/ny8vr8/Pzt7Zac9Z+06FJfZ6U6Tk2kohGRs5zpxJS7oNdpFLqRuwcsfdEpu6lo79s6y5JeGrTPM3lxgvr7HNPava2IWjTtuC+uUWYQacz5tDMfEUQUeCiErrs+3+8ng6Iqn4Q//ymneVPng9CpsYpOXSV2giCe1jv5yYnPgD0xZ816RKjqtm1PT0/X6w3Ui8bRjxhzbgdHmhCn03lrR/RGAWUpQqrKiO3iIiJKqlqLFlUZ6CfPVKoSVKQI14EHLWYtIiRdenIKZyERKizClKWJHFfC7qVuOT0qGSeIQwb74CiJK4gYHGIBENW6jVVNEkRm0f1oh7EKJVJlUuopUEQIh0c3b3BQUbb1/s5ROsWYyjjaBa9Pm5zadxOvkYuCAUoYOYCT8/RJ9dGqFWCGfPmn2RYx4r3pn6y8/vkoY7fTglry8+MZz9/KaPBT5Em4pwk/7T+dnLFqlrmYf7pDvTQHLBERU0yiI8qJqSMhO9bj+MrpMscaoTERKGZAyMxOcRqsiqUxxrlPoHQiukd9RO6DQgJ0B4vGgKRSjnM4LzEcH8KXf//v/ou//+Pf/enP/50qA3Ecx/VSuidZemoaJhUWDcvskET0BJ1kq1TAWt8B/PT68rs//uH69Hy5XGq9FPNt27ZtIwL8aHvb94+2H63vyE5d5VpK3S6X6/P16anf9qRd8H4LTP8zIvV/5nEQ3N0iyN25EjNrEEkcx+z2JLperxGXlX1bAWHIUUVFShbyz9MUdM2jn9IlIug+AvE1Ooh527ZL3fD9o/V9uz49X6+vz9fny8ZeX1Wv22UrxQh7/9h+/oLKEErwsDjGkD1EUKpUDwy6ixiq1QdtywDnr5/nzZFY0FR3K4dMo6EUozdl4DopUlvIQvsD4Mjl7LMYS8zMIcOdXRISEWGjRDmM1zgFgOgWPFwaIiL3O15g+B73kQonRX0yhWvwhg8vd07evsdZdApPIoJoQIv9cbzCshcn5TA+tzXIfl5DwvJ3GzRv8ywjMKFRAcRamHmWRUR2VzQRM7UVZ30RuNff7tpqSdtKZwI5YXZ88eiHzyGE7p65gIeDq4jfS1LWKaP9LN5Oo5lkU7RCOBus0CIix3FERCmFWXxyuH3KxZ5vEIBP6Td3Avr0iujky04DPNRxXiH47vH/+Ch4ThyefYm0Aj5Vle4IBxMHBS+M/t2xyIefWpWZTwk85tWiGT0TFu6diIuQUGHYy6ZPl/p8rc9X+XItX562562ohDiBXCMK57VzFVXVIuIOWHKZQoVrBnQUIlJktO3nOtYc+Leg/AAmECvmHL+TNFOE7XtfPhARqyrI0ZyTUJZ1rhEnIuGyaNyZVPiu6N3beh1MlEo2xwxkriCljZnFw0FCIRKXqpdLfX6qv7bue7j3ZA2NCctu5m5GWtjN3WUgwUFEPgav3ZdGHt/8AUS6tqQlkFoqUXoSwGAyjJmhkVMP6rqG0bs7488hb6nixWiO0cuD3KGVESykki2INi9vxKVmLiR0GltPjxWGf2WjqfUeDOppja9f1s/zyhrvi8DTK1h75oOkBRMgAi/VOW6ZpkJYZRb6wf0qpVDvD4+ROLs8+NQwxrOrORtd1nW6ew5+PzM65ufZstt7T4qkiJC8Cx8oLNFBL2bZLYCICI/ebh8iUniWi4mZWUl778m2Lzw8wsxK3rqtWQU9QuZIjDWqJK/t/sq2u75OpZEnaq3NHn0QUfCUFpgogdg4GFFAqsbcqsIgjkGMlLHQ4HMuegGIaNu2y+Xy/fv3uulWt+frkwfptv3000+/+93v/vrt7Wh3x24lyD4lOJlWm/hMQ8x/5usWkYSxYDKwuUW3fKH3GtqDUPm98XIZtrOs0szHtdZ6rH4nXm73rN5j5KEkmwXG+g3bmQrQAUYwcVJuRJxM3lm/3S3dyXouaV+3wKdOy9XT/+nWMIHEPolk1jWv867fiYhnSth99BAyE7NkYH+5XF5fX5+enkR+SVKHlPNt21TrWEpJN+UWk7UrJZ8R9XIpJVS1avJ4MzPTgD6MNN/ATU8gTNXSKcxM8q8UifTR2biRYLYxlBKUTbQhAs+iak5P0t6PMIIHIxQCIR9dMzKL5WgWvTfrEYSEcuT7GAQcEd29sFB4dOsgsDhkvTI3m5k+MzdHpDC01o7jYKDUqqqXy+Wnn366XC6r6Cqx3uN0wX7YYkKFlwCMNPtjtLP2X3J1FoaISPYzTAmYDtm92f7TT54ZlLX/b17hOpHZHDsxrcbSlhlKER6rcJih5snvYmbDoK9YiBPPAtFpLZxu8LNB+STz6/aTVAajo3sSWa9meM4OGRBRPHJix/0NPSQrKcK9/f53P/2H/+v/+c9/+/inf/4GIPN9K+6lwSOnzGzWmRUEd2YK5oERqlpenp6fn/H1y8855j6zZpfj19IO9ve9t33fe/Mcu3Icrdb6/Pz88vo1ERalFBaNuGUZCazmB8btLs2WdsQ1u8WCpAy+U6JkRQ439HF/QkI8xuSuZsi/lCIyKOvHqgUAh7IwMQJh7km+O5syJtI4AOQSECUAAlIWJmptx6VupWxgBUWYXKri8vt/+28g1CgeQjoP4lUTNCKZuehcAkkNmq2DPF2FMZySiIAHixmwAeIdFcG7e7BOyLz6ZPNn6nDGqIhnhKJEAhxwpzX5c0oP+uRcmay2mNSZODtyU/3ydEJ41gw/r827lZxu/MkWrCs/V/79BGahxxBxHAGffbmzfUmdbI/KZxUz5hJ7SDDl1zNAYxYi1j7G9VnMRpT1XMalMPGkJzRrLNt6E+uXiEitGkmzFjynbjzUTFTVrLXj6M2nCf8EBMLygD3xMJHN5XfkDJ0AjSR66/tgH6lVlOGcJ3qwvsFrAtunRqn1RHSNgYIAYzghM/vJj1HVDKowS8Oq6gksmlfV4TRYW0NEciaheFwul+7obmDJ7P756d1lMZMZnOO2Y4r7gN8wM7pz9jwGAGcuwiQhLy8vr5u+PJXni75c5elaLoWF4rW+mDWPpgVaSIgTcVqFw+d8eY7MtxbljGTolGng0QYmSIZcjKXAAKnmtLQVqGdHe8JLlq5PDyy13roRd8y3PPBjpRSVamyrFtR7Zzfrk1tixDy2jCkzIVBKCQSSVp2jupci10t9fn5+OdCj7d2fnp5671kRYubo5h5mls0sKSQ5vnk4AVJ4kvZmXBEeZrzIYOhOiZGMIEPCVxCS73T597kNrgLmfIkx1iSxDAHr00pNSOpY7TTakS1iwB2ZOPMs6Xad5Xkp97XmiVKdntXoQ9BIUz9+UrX0+OH6Zfkonz2VhwjxLNufXSIAjlE5GXvCktEu3EstdyeYR0KklCJHRmNDt0hmf+AiIuEyG4hHpDGHQ5ZSmNGsrztLlbuu0ObWrJvgOI60w713OPadL1WyeG9mYe4IM2ud+n6ruYkSkbKo1k3d3TMgjBxIPOnb2u24q/uT/N+f1RSYIUUYZVIzcxqGn0d+gZVl+XMQiIib8Jj35yoEFu1diEIVwQgsaXT3CGLOGRU1h3A8Pz//5S9/qaUoS621dVfVl5eXL1++XC6Xbjs/zhlbCnnk/md9Y93OkfokewFk4O3lxKIcgx3UmFUL22reWPKDiAieI8KmlynZ7LekNyfgtdZyeRFl3ZhXGMkCEez7SNZkU3Ep0rtHks8NisNYy4ceA8514zGNer4+OY0Q7L2dHIV7dLcqfvT4fFaOdq2Cszdw2m0sBC0cEdbT7ocQy8z/qqpyeXp6qrVGMga1SES5SLkrIbibaZqzUooKhVs3ZxIRsxtzAtmHsGmyZkdIUmcTy4QLIYKFxZVyqKwQCyszC3RiAufiVRJh5lt2As+cL83ywVZrKWJmvSfYBwgPp+BsYaHkl7bJ5G6WEKG7vllqE0DOiSJRz2a91PljfLKfuB/o+fnp27dvlo5IlFrry8vLH//4x+v1WrYtc0yQgTQgQqwC1En70RzDs0K48epF6BGytDRanzT3SxiG2PzQ2j0EgHlFlesnABju4dvpREvw1kUOlesDxpWkbBilNsaqZM5rnF9oeWSPSaiGYB5vl5ljJXHO3zod6Cy9ceKE9/BwX6TipyognS1mCsNQL9OpIKIzaGI+oodWo3Gop4u9t1rrf/gP/+G/++//9N/99/9PLtcvX14/bm/r+Hyi98iNmcFM8CxsKJNcaimFhGu5tNZu+84spRS1by7i7h/70VpzkEgB+I+//30p5fL0dH16KaVEVuqYe+9YPFVxVy+qCsuxOzY0M4iEneY4hMJFOJxbMzt6bwm4wCxdkzsxcy1aSxER6+yEgQl0ilmhWb2Iqpo0uvCEWWUFK8n9hn3P3OW+77/+Es+qLz9dxMmsObi+vvx01X/zn/9b1GqrdAGM0C/fC0ARSZMRnmNRUmTuVd/ZSQjAEYM0CEDigWN4FEMTMo9U4Fol6acSQFDAgyJGxDhGFy45WrKC07SkWblJJqGZy/NYKNcqYw7QsNQTipXLfwrz8s1iySdRhvGZNiEz09UNaHc5p5kZWZKQyN9938+rJl+ZWUc8BI2nxX/PiqbPOa6kKACLM2STzsphPqLZmkeVDjt001Kk+c33bnZouXBJHSdKtffulpUcptKCwCqeo40QWTCNE21OmJu5E4tI4bIfe7RRdhdmkhphDBcRitj7frSj1ipaStX9MA+PnN6gPFUxSNgJzQxMQRQEi6QZ3YoUwMOkR8DFw9777fVlwszc5zj1XISVrCg4gAgSqtetdO37x7uIEKMDSlq4UFDfuyHSkvnt2FtndgSZ+uXyBCfqrFE0CpkATCb1cjWz3oKpilg6H5VELtfn69Ptdvvbt197b7UUYjB7G+RvQTn/MqAMlQLzMCfywYnHnRAq0stP3Z09REzMOfrG/Kyo/fbl5eWnjb484euLbupMrYhQqbVsRCQUQqyjjSQuUkPsiAPwy+UikpAtvhQV4rBovZOy1OrmH7fbdbswCCAHM7EylyKqajuq1sCgBM+5Atbas14AD3MPr4rq5G7W7FoKgxksRVBKFmeMLKSY9dabqopS7x1+UPTtcrVw2/ewXkVUvVE380YGFjeyAIzFnYIMRH65hH2h452+X+gb4WYkTZ7++r2XiK1DrR2tGcle9Lvbz3FNFJY5JHH7zICrN5WKcK3CHcd+YyJ2b81aM1WtpVjvTKjJvtAtWttqfQ8cx5F4j2Ze6yUijqO3ZiJSdFOpPVy15lxKLSosq2b19nb7uPWX55/226/v7+9hrhzs5v1WCAfCY6BGu5sFmiWE2MiPynCJiL13Yq1m4CDRzJaBrIu7+3249tmaisigo5upivx8shzT5O0cX/RpYAYYcgS5tMkW3uHMRbv3p1qbHSgXt36t28vz9fvbN3auTMXateqv7cOOg4lYpHdEhIg6Obh051pKb9ijCUkE9v0oToTY39/j62ts/Pb2DqKi6hbMjG7RDWZObNwZhCJvx+0rwijK08Xs/8/Yn/dIkhz5gqBcqmbuEZFXHSSb3ezedyzeG8z3/xyDBXaAXQze9PTBbjZZlZUZEe5mqioi84eoqptHFt+ugyhGRrib2yG3/OQnlVPaWv3l5fnp8VHBf/n5l4/vnz58+vT5p5+u1+s547o+IIKiJkJDMOaG7blUIhLKu1NtFgW5ZKl+TQ8Pp6zStCDouuK6OO0bmJ7X5ZSYwAWww+uAr6183V+58bIsDFy3KyKKZPVWqitAQkaQWmttCAD75dUcHckxVYdiHot4cs6MsCRJCFaL7pqFEmd8ktbKVnezhiSEJORrXi67taLVYH3IwMvrdvmyPa/ros/uSUTYDH76/OX08PT47kPZLo5MwouwWgOnx2VJiK47yTnCj2PqEjOMRISB68eg7QkPxwzITGQMrtW4xFxJWouaMyl4AyPB1qoWE0kOWq0u6/JSNqfk5k1tZbHmaA3AMSUHVVRHo8ZrEkJ0K5xEQZeFm1cD0KqS2cxaNeakhtsVsgBTbm2/XH85b+uSPghKgwLQHAp4Qj8jLsQOdDW6Eq96YMkexRef0a1H28gD8Q6qEJiuGVXH/0+4BADMSh8iKujcDuxupew4Ci6zazDojgDBvREACvdejLsCWG2Y8+n1uhuU5fR4fveECZzqRV/dHsE9syBRa7bvuxkgyWW/ppwzUy27a82JTosw6EpE7hlsEVozoysxZEYMuj/ixIh9EwYBQhZHhAQpJwYAb5VIzr0465082tTQGRmAFs6orqCODmAgJkKcxNwbGBFQShhDQ86ukOAKwa6hgOYZuQlUM2SuXkqpnBMy7nsppQC65HNKS8bSqjbdjJhkwcTVrLa2LAu4l1LLVlrZz+dzWZgzb9t1WZJZK2X/8bc//P73vzudl1b2qM9y7ck/MQMaCUM8aXcYj4Y64WsP6SKrAESAYOkb7YiezKtIipDZeyzeE2RAvsVoDojMIoxorfXFV4NdiYiQGbRNsJ872KD2nY0RGICxKFszJ3dX9SACgCCX0kLEgTeZe4DiI8YQ6AlmhsR1t9aaSHDz9DJHr6OoAzixR/zet7TRDL1oosaGIjgQ1NZXBXRNcQBzQ4gGtbuDGyLmNQGAWdPmUwlHiOtICObkAkiAPHkoAeiyfT6//7T/8vy//Pe//ed/+Yd/+qf/4z9+urRWWvVtQ1nPzE9kCVsTUTEAADYjEhZyRwYEkcS5B0jaAACsNmvWzCv+0RuoEVHinJZTEjmfTuf1tCyLEBIR6U7UKJFaK3sF39yUEDiRkoCpFq1b3bms68ro4mgEsc6UibSSOJt5AmRmBSVva8aKFvdfTdEhS87sTIb0AA1MkRCZMHp/wNBai5auZIK+rl0NULY9CKkK0NWpyGLaEDQppG3Ty/V9Xuz5pe1L/vTj8+cv76DJeeVP7z/Xr9/9l//8+P0HeH1elxV4AyBABQYXCO57BCcsjuChH0FBCuhOhHGzKdgGemHsQPbCwTuKvcNGbOgxL+tIkfI1dyBoOLoXEJmhRzcb1W9ZFoFZVXeHIKmingoO5jdEEQib2jRy5qEyDGYEmCXdBU7MbAYjf55lDiSM6jL0MlRMowORxHkSWpQ7iLnPeFiLZVQ4YIOtVBn6iBAYVUV3pj5VeSsi6bFRCYiY05lJVdXNAUAgAc4xy3An2A/rbgqmFvve3L3sTSb245aPEtFYgoF9WXsD96CvVWyD+WC29Yd1sDoL3kS3acWDtTLsVVWL2gMiMhe4dS1H9Shg1gZEt46E9YnnO9TBMJ4Gg9KQWWgQWyNisET4aODsrdcRrZcStRSvdXd3c+1N3kMpN7NEeSqAlFWbmSXOM5juHzEzRHevtVhMVA7cIyI6wimvVXUrNR55aeq1tdZcMiESdu5jb2oOzZTxVmlARuxFWY7Vf4CG6uiV3QVcUB4fTo8Py9PDej5xzpLFAVpK4vbqREIcGB7ujitMnRFXAGMRYoj5NLBOekEA7hjs+YwkIuhOQfQUM8F9Bdl4ZAgAse014GhlTJMjQOfDSEtWczA3uDHN0m1836KmEr0amwg6ZGRBMzM3AERyRHez5sExYGYKboDuvp5WwFbaaVmua8o5W2rULNXSR2FUtbkBsogslKzZKHACIDi6glMMqf11+M2M9nw0/aIqf5P1e2DPaPdNuJcN13gnRTYWTozQU2KGahaQaIx73Upco4A0Cj86y1e/+rJbJfhW3j6e9vFPAMcK1P/shb/2jYeAGJHIxuAWjks+dDgJkeY5zJZL71EIuTUz09oA0vHm98BpTvMerBkd6ItwVPX6B9VqKcGbElP1l8tl33cAiHZi8CAm4duuyDg4KKJpa6UqQKk1t6sCtFKSeQXXfYeLMIPnhNZaKzWJrOnGmBp18agiB+RT1QFarAZqTWszgL02C/RjtasjAaE5Vid1QCaiqqpgWoRPa86EkjNiMEiCiCyEquRAZUzvBJwSJS/LEirmaqWUXsXjzgXQ29SjfMbMhNK8d/Zyznuzo/jNZzEbcW9kgJl7+dc6dXe8LZ7X3ABJSACGyLMWO0UogJ+zqgr3jeiugAAMRgeqp5gGmI3fmC2Muv+kXWFmVQUH7DCBXn6Oriy4mitPt3hoD/o3ZsFHYDFxyPfu6a53MZ03ACD6TAiPekRjGPvNLZ3F3cMAEkb3BhFxMCF5zCOx7NX60urWWuukIHGeDA5u0QyZ9w07pkdS8AsASoTf7sHwZ2izixWcQ2AQD59itubAvdSfMvo8LvSSORh6uIU4+0haIpLvwBUzB9yaAri5Vo0eEDX3ZnC5burIaUHA2speWm01TqIHVa1Va40ZLHjuuzGppQSPaK1127aHp3dCtO9bq7W1HLd93/fHpzNzZ+6BMdgWFxWDOPFccVyqqjMzMHcEWtDWjFgi3gOHDvDtWSMCAE/0xDeQsJulO8A9uhYMWp1p4hD7DPVRbCZWzc2ODYf4q00s2Nzk5O6jlXcknjm+RsiLxz/N05snc4vo7tdmTEWINULHDyKiEwa9fJw2M8OgH58ztzcTMZr5M+WOCLqLKLPve865Nfrhhx9SSsuSBJbLtUzXE90UZmIy7TFIf1AciDDXIDFqbqpKGCTJ/TLDxWRZ1pwTpyXllJJgzA90jHq4WDNLKbWwxq2aWayEDcwU3Xa20QwSHGL8zmJLxYxs4685Z1cDgBnuSpLj/UGHIN7prRoAOCwJd3cm6pBsj800g/wCDFzXZfnuu+9I/fryfHn5elpWl9HSTPnjp0+wLJASCIPpxEZ6rPaG6M1HG+sm6r2f13+6/XAEKvOg+7w96L8S0Ryt6/jVbUhkfnxGOHiQHIDY2ATuzkgw8KJTaAEAWvMDmLOf7bhN89uPSotEcnc+cSlvtdvNcCCn+LCGPp6yqg6ivZutQETiw4G9oyeO6hOZ10SiIh8BQTC/QvhwoENCK/Mrjw4ex5DirE32oyOKyPV6nU3JmZ7y2CcDg0Xj6ESnX5sPZg5ZwY2Z52AlHYcR7g+7Q3TY8FYPuzOOZoYWOTYx86T/tlF863oPCmDusZMWWyuxyQfQzYI1tJEwoAFCjJqoKo5NxMFBkljcvc8XqhmbmUUnr5SCiBYLm7tljbFBLK3WWg2AUzYzAyfhOJsoa4C79pVbYJ1wCpH7/pJuQ62ZG5miNzZL7KeUHk/84WF5d14fH05LxpyZyJGYWVBfmFlYRCTzALwCMG6IDqkhuuQasQQiJkj9oTAxglkjxFhH6MHxawaI6k6ADapzFDK7WHZb4O4ONWgkSACxqbKQ5OSleTTlwWMlChMbc8wQhrSomltDMGEsDRCTMO16NfeY/kEzQEMicHR0ZCY0JAznIeN1C558WBUmIhpYM8CAXPcqBvSYEA0hkQPGPDTiRJPPAx615o2re/PygedmTnMQ1waBfpSozKdL7mFT7P8cLtnnRyYAe2aADHxX0ApFpr9iPo9+9IA7wvvR2aO+/2pK/OZtb47vfoTzTGjEZE92Jvi1i4K4OdPh2bC5b75xzoa54ZyjGxHa0Tf0sJuZhVmNENFTEpHW6uvra8Tua8pmtl2upRRm7vQCLEwUPtRVGzqam3nsJzBT02oGrVY03HZtKujuoGUzZkxMmsWq1aWelgVgReQoYmBTRKRx7dW0VcMI1tVKKaVpM1DV0nTf902vKWdgUTNDYklsbFBaK6qahFUfnk5rEkaEAsBmjizSM3DsnasOFjqt59PpBGj7vpdSUPExP4iIEzZ1pB40I/YhAREhWarpzLWwz67fagpTEY7qNiWBcwI1732N2wONiIS+WX6Jd68eI+AIQImIYg39CAGngtBgjjuKgXswdSFCUKLcag2R9iOiO/CgQAZEwDHoEqrNdwWgefw3IcL80zw+HHLCo1+/c3MAs4T85vdwHwJN9ayT2KOT3gkAoHspBTm5+bZt1+u1dPpWK6XEvpxatanO/OLd0wOauimORRThHhGZkBMlCtqWyKID+2QG6GB9n5wAMlFQgiMYQzSgsM/TAswBLEQnGRWxDsqyKAFRuL8GPp9mJ5QIeBYgntRBVRuAqhdre2nXUj//8oWISKgzYROzZD0QhJZS9maNOQEDUQMXyWFYWql9HoGInEikVjLVlNLT09OyLPtePVCayATVDaJlOR/G8YngiJ3mn8B9xigkckOtHbRmys/xid/ec/jtCCJueHs6ICr5DfI/qDbuS37T2ro7yW3PIR5KOQCdhGYafJubNu5sb3/NkxnKbsc7gDgwpd/kw/Mgs4Q076TfyA9vN+b4ZgCIKBXu1HxMM95rS/y0rMu+leX0aGo/fvd9zuLu2nY3BTcHNW/mzZ3Dh/R9yTimRcIRgwCYkDCC1sLMy5KiHSKJrCkALZKWtIoIQdj6Tp0WTEvuDmqgNsuX0X2IAP/ofImEqFfD3Z1uWNZOf0VEQgzWTY0xwIGkgG7llMP8Fx543d/0aWKJzGA37GSWVoMULS/pu08fHvL67/+idd8/PD563aIgmM6nH3/8EZIAOlgddYMo9kRH0A827A15TGC9O4kXAN6jv2HklkPjRp/w8Ke7oOtg8H2WSG560YcC+1+PlnyKSuykNbPY+TGjrDbIh++k7hthhlnZr4e2VmjcYdx9WozpttyBmZAIR2vh6FLnV9PsneBbXrd4vx1mT45W5SYEcbXjtkVJV1VDAGjSH8Kt/n2n5L3skZI1jfpi3J11XY52ZJb5IyqlOaB88HwHZzOJr6IC9NZfwkjbVPUYVCKi282eunvP6QCos7wcboEZUZ+FU1WPYdAbjKfNJ43oasKcU5Ky72btjZzBYAch6kOyEVwSUQtgRsfL3RIGJJ/zimamZsQATFXbZSvXfbfwuUAIfYesmTUzq6PBTYwsHmvV+s0EjAKGemtXdAMzQRWEk9DTyh8e0sPCD6vkBCl4NgAQGJgWRiJI5CKeGJlQkBBnk4qRPPfl1zEHwqqKDiIMANYMERZhcDXVvje0+2312NuMUcpyJCSKhRzxOBSC3B+haWOgnPPUtNsdw17BnVJODtJtFxZVZgIgN3ZwoETuDgVZwNEAwuc4S88NCDtuLeyAglZr1ppFqVucmVTUvbVWtLMkR4TprgQQ1Lvx9DmGruiWWsCvvfBgZL/907xMPyR+3V4Pg9S5hEb3TwcVDQ7KBICxxKkPjBE6ICITR+R3q6q8KZjdv+a1wBvLeF85m7//awc62Mfbm+O64Jsh5uN9YO6qhH3Wd1babqfXbVdthYsQEyR3j27edetbKNzxiKcXkVq9N9MNVD0szPSU8xYxs5lu2yWqxSmlpiU6JyklAHJHU0D0TlOpzRwWSapVVUWMEIWwtKLNykXdlQdaHt1EZEnsmjWzuxMw+t5aIwJySIc4jEiaetVmBrVWB9xUt600c3Xf9/J6vbyU6+lklERVkWldwN3Vreybu6vEpKvmlNaUF0raGjNHHTFqw0TEbJIIEVNKOee97ZEQEiOtT0SkvTE22wIux8SpkYhwL1ve1fLh4JyOgj2V4igz4XFV1bhzdZmZqqmqqdbqiLos72OI4qYvZmaOjkzEEzwZRiiCrp5SdXKgWlWbIzJEIkhCxA4EIHigGJnqYD4ywKNsH7LNo6bMNxy97xv5P57h/Ovx53nT3B2RjjfJf0157+6wd+EBv311ZO9JxJq21lqppWzbdimlNWV336uWUgIW4e6tNeZ3iKANj1fhCGhATuAEFjtxIAadu71RQEIkoBiXJY69XgQEaATIyCIiHB0MiByMiAyCxEjRJsgKiG8s1q1VGAClGHmKANEoV9OqsNdW1La9fH15fXm+fHl5DlzUsqT375/ev3tCXkw3dw/afXQvZQfJmiqomHv1mlKKmzljr4AJxITKeVkfHh4B4PPnz999/LDmBXOnhnajkXocTNsxsY+Bj0EdNM3dmyd4lJ9vfx8SGe/oudmvOhQiHBQvcy+fu8+lf36Mj/3+NZi3ImMbTZtYWH9rsk0tfrNZd17y8dLMLCDTIYrThhyV4lgomVEi9EL/7WaOk4zg5O1NhoPfmd8O9ykrHA6FiADs3kO4p6env/nt7/7yl5fL9Rr5VUzomzUndLxtACfmoNATZuZYDgEpS8rijZl5XRZ33fd9zYuJoTkBA5hpReAgpwIEpAAdubu11lor1sbo+KgjByJ9SflQ/SS/ZXQmIj63FkNsgMNlWaCH9c1H0ZmImlU/AI7mXo6qN14MPIw3q1o79F0BDF1dLSc+n/LK6eGUP717d/nycHn+GrPfoT7Lsnz6/rvof1U1nHcPFGL3B+LYMdhJRcfLIegM3L2zy0T01YFpAIEg7UYde3AG8E31EL6xzAMOefeHMOU9ABhbAKCfBAAA2ihPj7xu/ncK0vA4o6OY0k09EcHcD+sP8DAZ6IOL8eg64VYQJx8Z5vyKY/v92yudqjyE/XbOqop4d/52v7huvm7ChrcdYe5+6y9PT6n3yn+0Wce39YLxOMt931V1AkGPF09jyzYRjYlPj4/HDzjyaTNDZERFxEEgTvP9EKbkkF9O0YeuyT1TN7Ockh148KLsPesuMXox4I4ezX0AwOCVAjNrsXkSrO/OLk19LHNstTrMnAFDthiJkVJK5h43IFYdaHQLiQy8maubATVrvVm85NtTckfrXTOznjI2iy2e2neqtApuAsoESegk+elE70/pcYVz8pWcQBkk9iUiyYJrN/5OYETAyBy7B4lIUd0dPbk7KqETMXrUCh0AzF0J0LyBY+xEJSIm8vibuRvZje0wyruAiA6DfRTBwNUNHNSNgACmgg39cxfmWqsGBcXgKjQzNDLApmYGMZSqvc5N6t4L3sTk5OTqnbNUOpGq4OiSq4IaVtAAQ7hhMNbDNIXoOJgwmZEYEB0BY+vizcEf1PLo3t4I/OzoIjDR9JEAHQ/Z2Wgj/53D3BHRllJarDZHRBxQchwzSD7aOHZTzDex6f/kdcxpj4bjV0KTnhD++jHnJb95v7mNQKZTN3QX7waDGlubq2rKmFICt9iwBHAD5rXWhNgPcUkICyKOCKcnq5FWTab2/jist6kxdg2bq6o1BQFSwL72grRUI5ScSREAgmmWSADIIFJzCsYEAnT2VquqqkiSxc1a2fd9r2WWFRMzR7dCUF7rlmufpSiliKCIpMzB6BaoEKJYxNJUfW8NAGrT3VptpubXvbxs+1XVuVFrtVYkqLUGNVc3quD7jtHbfPfwiCknh1hJE6nWkVk+EQshmNZaW63ofYFN3M6b/JhZU5SehOecV/fHx8fT6QR/+am1ehQ5H7E1MyNyeCMcm7c9YBjmPFQIRtrW5c16BcQ0FtOhpVkgiEwweIacOuSCQphiBaCZBUMBEaHfFhveJNOJSQhFnbS5NiilTxCUUkopWRwASykUWA1DdKPBfUlE2m7rH2ZkAPcu9qh3b5TijRc/3rf4/YzZbmHK/OEb5QKAlJa+QgNuN8rdAxih6oKUc2ZmU22tIXJrqloRUTInyeEERQjM3JgqKd0FbAQ0uBYcHaPui8joQaDtBJ4QGF2oL0Il6uy12Ndc3QUM7u6AZqDayJsNegYwdiUAUPCyVyA0YiB2REdSq82hAJTSLpfLZSu1WSn1+fV6uVz2WmqtpWwsdN1rUTufV0TMRHmR9ZT5mcFdVakpUKWcW2211l69dQcIljt1s1IKRzrjcL3sdd+uv/vt6bxkYUAGMCKyN4nHLBxEUER3/bE3pvj49KeZOj7W2x3rjPB3pXkwxyMPRBxNDdQwS/wGR9R9tOezBjlPeHSKEIBh9DPjWxAcB4niPP9Y8zDPZIrcG9mex/9V+Z//PQbW1gPobi7ogNMbEd1NfOLmm1lKyzif+dX3zyW6DhBoZtJWmBmqieSH8/q//i//7Z/++Y+XvdVa0TlludHhBvHVbYQBGTuHK6CzUEosSCogxCmxmdcKBMYIwOxq2hSAY+gqscR4Dri5W1NtdW+1mvZI12cvyKnbwDtifAZo7l5rH+mcjjuefWKBCI4D+RnhHYDWFrerC8AoYiySmtsoytw6Zs1Q3QIe1qyCxlxaWxK+ezif85IQT4neP6z7y+e2vdJwGe8/fjh9fAeEIEQAzY2hF6Pj/6DDwHRE8upIcPgngKOzjZzQIQBmPoWlB5NHmpo37cH7RZQId7IXL8JgDOuo7Pv3jK1FCHGHw4iFnPfSBr8t0kXJG47yBgAISARmkSzAoTLIMev7TXFnKoUP1sDQgjerp+zAdkNExHcJ4fQSb7Tyjf25+RS+9R5ud8F9IhplWh8c1XScMZxZKSVqydF0EZFW+uaiaEQxUtSSPcyWA4SGAxz3y48MDmfMPE90hOAd6VcOIG7vxpYcjTsuhJGpd7EQDClm9iL+xv4FAJNuddTAwsRoL+Apddw2IIJa1d0QANCIwQGss1q12VLw0csJAaq1SlpwbMU4wjaYO4rGorZBGHipoCh0BDWE2LZM3XBPexSS4dB5ds3JzaHnA+7uYC0aheQNhYXwnPkpp8eFPpzzKWMiA7RYl06EzIh2Au2dlIaMwcWIRCkhckfaqZiZa3Inw93d0PuHwKy5W9O+wDS86VAMM3MiMBDvoPa45601H7hMHavzPCg3dewX8nCK7qDuHpyQwcEVn2o1RlIXUPVmwRiPnWkcGYXQNYqaLOCkqmSm2DHZmWOxIoKbqbtRgWZtt7qbGWJKJIasRQEsNqUSOKATQiImQHJAArabRuBg9Z2iO3+w2/RsuJNbZx9BiOLmcPd8hKpqY/bdR7nXzNww8qV5KFXVGR8c1Nyw+/9eHxmOnIgcf62Ydm8abrbg1wpINxPzV5LMY1wbN2K8/2hl3Ad7lanO8QOAWx2HDvQAM1ZwNWQJtxp0LKbu7rVWAA4ParEspLdz3a33AHE0pZn7RmwACIANE0fwkYgxLyFvQr0lHl9NkpkSAMyJ9LiWgJmpKgM6Vm0tUvd4BoMEPyMRg21awLxVi0alCEiidV2AMrYwsyJuAEGOhaXVZlrVS9W9aWlaql73cq2tEZaqgFbr7qa17sIIAOu6EibApKqqxZGIhZflUdSAQE09oBAQ9RdQCybxUsrIwGFZFgp0Lnjgyn1sCAQAIY5FFIjw/t27x8fHY9A2I9ppw9/I4Ywg6aYuw91id3J9rY/1IyBQnJuq9nF56iCfCGy7KjAggII2VTRg6DPxFpTvA49tZuQRRZGqqYL2AduusDaYIb0/vhgftr7b2vt/j5pyuxK8XdFRI46/PHruGVW8+XibK7COXZ1f60DGy4KsoSOsbu8xM1ettRLh+XxelsXdtbYKFFmQiCSWQBwFKps5obuJuDUDV9AEHDQU4AgKHjsE+xVFj8JiR33olKtJiqkTiNRDh+lZlkVVW6vu3oH68SBALbZxN2sT1+NkCK15AzdyI6qgeyl7bQX8crl8+fL89fm11toMWmulFHXDgAntrX3+sjf94ftP7969C3qC0+m0LIkvbG6qaqWclqUBtFIDTxE+HYO/yFutFYXBvLXmapRS1wIHAAckIO/rsMejxUl/GBvLIocZ7RcIw6dKOIggoc9OhamcvTi4xXb9pr6xruEQZ3UbANAOn8J57Jtrjj/RATB281ZhrRHBDdxUfUacUZGDwdEQyl72djPLh1RzJJa3bYG9Q1AiMYgzG4tP++pXGOFYX+zssW2AbseJ4wf79EwDphrOcyCiQ8Hk3nm5w2x1EiFwSuzNEYkZ//CHv12XzKAITmgMg7zWxd0VHZENx/IMUIA4DVvzKokYEBtGcV1E+HwG82CPdFIzE5LMEpXBmD9vBrU1a01L1VKBJLpDMyFkZqY0HxNRp/GInlsAwRCZiRMxIhogE5tpEKHTYHiGTsx+yx8mpKh3xSzm1eMpdGRpNTczkB6GOSgCIHor+5rw3dNpSZgY3z2dX7+sXYeJjOnTD9/D+QTQALMDepyYj5qIG6AhgLp3OmgncAUgRwWngIU5ADlbX1REwT4MgLEYEHtWGRGz32WDaNTLmCMIOeRb02biLXXErh0+/mkOODJYHANlQbc75C3+gMfa3ygDebtxL2H0bhExgIQHs39zB8cayjjgMdt6Y+f9/jULMTp6Wv1tB7MwDzIdMQBMAtib+tz22d1e5i0crsS3Hc/eR/HG3bs9NeN56p22+NZFDIlMKZkF34yOCgveXL53Tzy/KMb6j9fWhbs2hSHLjgbuqjF7Hh+fExeIwZ54vH09D0fsC8dhRNuBJ/TJvgUxOxF2qbkrHcagTSMvAyJKxN4G4TtzqRXNZZEB2CMLDJ75bPJ41BdiM1gU6y0Iw0hE1BURU+osDrXTOL6tNzshMIGpOcTov7upAbOAu4Kg0Lquj4+PT4/np8flvKRVEKy5GzkFcJVMtREAEqIBgpMBgaMhaDPmYPZFREYUksTMCGVOuKIDMntrpVV1C6x8Zj8KSVJQ0RANHuPRqgpgUXFRb9EYcvNSC+kBGnqYIjCzVqrWzomsg1TGFcyagzOio2tr4C7E0Xhk71y07ljIVV1jFpyFmSOvQ0M3c8c2yIVjlDA8lXuD2O/tAGjoRAQsGCBWHJshLDq2h0UpA2jei6YxkIajqjIj45gpndiJKaizHDhfUXIhakREKECNJJrtd9kmIpp6g74dxD2mW2eEHUb07fqHo1H49pdHl3+MaN8Yqf+fL3cfEQfMBZvz6o5WEgZSHQZy3QFnlghHS9rnIm53qQuMOktPS+xgJXtQM3kyHCJJEGIWidG7cNeqqrUVwFaLx4Z0gGgeuKEHoMBVmypoFAvWlCOJAoAlNfQUhkKbtxJnb/tWWrPTmltPGillylnUXcFVMecsOTsycgJQ91K17bVtpW57Lc2b+lbbVoPMBQJiICIImITWJJI4sBiZiVKnnN3L9vxMlIEYIGoCTAjsfUelBp3Mvu/WmqqqKwPNwnPOmVOGUTVDxJzz4/nhtJ7cdV3XyDF4rAmZj2mWEY/ubT5fZJ5VwNsLDsuXxkAvIROJOcaagwiWfK5yDX01VzUiwlHKjRM2ABnrDzuzJZD3yX/okyMQoOMChzKc97ZbCrRddAeP8jyHoI5ue6Ja5q2YV/atfs1fHu3eNH1jxdbbaOD4m5v7d6/W0IPAcsxpIQLAXjpRCjO/e3w8LauZNS17U9XbpvW9lLheRCTqQXzp7C8yww5zVYWmJpy6WwUENMLeCjTT5g7g1S1m3CMjciJnj9vbt4CAD5MYW2WaR8dEPRwKoSADp7xV29UqqqJdWnm+XF8vlx3K9Xr98svz15fn1noMMDiBMIkI867Nni+cs6TF2TinxJQlCXEgU8xMayNHb7q9Xszb+XxW1cgACSxROq8LEe3Xsu/7w+OJORFKmH5kgth3Gm2NYz3e+6wpTbr8N0W0g9E7yowfWm1Hq3jUIICxcf5oEseQfmgcaC/4H+UNRrbEB27bN6cBt65UDyIHGOSWE8LQa7qHi38rojByyElpc1SNbzXo9qd71ZgC3eO78REaiNN5BBxMLXcaF4I6v8gMu/kBsCZIp3UhMILmbQd1N/Q24DlOGA/XsKGRgaMjggihEwsyOCJIYnJAhMR8WhZtLfK08PCZJW5BN4ZgaO5qk8fFTHHUGmecMMzUm4QQI6YiRI9FCNa3yNBhz/bx+ZoZaHAg2ZucUL071w7NQwglN6C5DBAHPCoJLkmE+OP79+dTRtAl8dMpiwi7pEXglH/zu99ATuCm4K3Hr4c+3ujp3ugM0MFjtw95D9wRwBGcARR7RSPiprA2UQ5BBwMbJe6AlRocKGgI4SgDBwEbUcMou7g7RnNkjnW535BW5p2TLz5sZmYSq1lu8nX7650971EPmKofeCUwpt/n1vj4zts+uU4AfDw+3nNVwEFnEVG/mV2P0ziUYuHNn968HwCQwNotIp0lIXcXVYVDAhrvmIFa0EtGoKmqtda0BNQTopRr45VzrnXvGZHfsAdzmnk+lPh9sCn2KlFs8MDdLAhToq8c6KCA9rkqAjQe25+iIwsAIhLzP+6u2sfR3d1UO63IsGUUvb5oV7QGpoIxUuJE3GoZdy3O+kbkGjE95yWlVGoFgJxzrFLEw2dAzQdlLQkTCjCQswGq2ZLXFdAMDDZ3ZJb+qWCpbdagBRxu3plo9aDXTm6N4o6K4giESinl0/n8+PDw+Pj4uDAZurqpae0FIVcAay6I6EhChGiK7sDgULdXkRz7rOMpC0lKiVosCTRVjUVqFUmrbtfdzBi4shFR5P7untX6gsSYcl6AGV0thIfMwRQcEMlUayno0ovxfbqpB3/1GjbTzVybRouMmYPPlror9KoN0VOSqorgAETYi+XsZqYoCSiJ1SxL4iyUwr62ZuLkiCQxniSmVvc9zw3p3SCiBFt/0PjdiIMsIsU5fDLcZE9f54wE3qNfYp8M31ik4gjQmqkqIBI5Ao25KZM2eGU8TSUf3QwmIujdRUTqNL+zDzYLNPrX+oMHjPvNOnzTjji68L96oNsb3oYFARDAe8j/EV4eWhmKT6M45wYNm7uH1OFoKddaKxOgE1FKaffJLnCzdL2hinNOmEYdEEspqtr2wplJLfoDkliErKmjYxB1mEtiItLmlizclyKYai07mibBZVlOp9PpdELEfa9Esq5GiQjFqm1bdQWGpEUvl4sp1FbcrVRcFmlNVGutlfNJJOEoqEe20tz2Wi7X7bI1A2hGpaoBkAiiARojprxk4SXhw/l0Oi0BjCQSSeKC17JrLS+qVImZRIImMrhsIHi6cxYQ6tlgbVWLqzEzODCiiCQRO+TVcb0pJbh2f9ZaQ8ZpsY+O6igSURPUgCyKvCkqjHTobiEvYhBWe3C9DK5dG80VYg62YgsRo8j7oTIzKJk1i548qPXGb761yR0RiDkJ54I9cYqTr7UiuCwkkszQTcAYnI5LDqew2ZGx9l5B5h2Y+jVf851H1/tregRvDnX84PyBg/3cATFIvvpfs6T41iXn83ldloUiNVZAgJTSsiyTSaXvSu1kZRBFTGWtSkY9dqwGpKBKSAagjAwdVU/qnQfLmrdWYxlmpwklXoCaW3l5NTO1PscLox6Eou7oClZ7pZeZENNe6VrstVkBKO4vW/n5+eXr8+vz62vQge61U6cCgKs/PCQ1I0AmQcSq9evzKwD85v27hR4QkQiYMRk7UQOs2855UbPL5SKJTqeTq10uFwYk5iXl83lFh+fnZxFcc+zhYuijR3ePZMSWvSPXrysW38+FENghx3BvQvGv1N2O8eIUMHRAOpBnRlsM/RZfIlq9LZqfpxf/7p2gmFDyMe8/oK3TfhIhkfgotU/5dEf8K8J80OIR4w7xxoOs4iG5nT9P8xIHTHQcIRmqjX228Hh/5kGgXx2M+9/fdHg8x+TTzSuhgFtOvDDlhA/r+svnrwhGKEF0jBjckIbOAGAKZi2oRxGRCaL5HWYVABCMBZcswMK9WOXRGDSzuhdwQyTvczUO5uiETtu+pVGrnPRppZSckvuvlJNaNbcbVUcPMxyQ2Ad39OjBECJ21VaoHVPWLWgncYxaKrhrzDQ2PD2CO5IAIQM4wpK4JP708T0Bfnz/cFqSttLKFcFy4lVO6+MDPDw8/fA9LALaHMBBPXYJdgynBa4tnkfIC/S/dmoZnHWHIEG9achBz4Iu1t094quRDfZX9OSjo3kY5+uSYMf+IUyKF7sroNxkxb2NkgePLF1VmdM8yE0xEXGAwoa+HMXvvvICPuNDGNkF3ErbgIFUDBSVO4496pMV73jAKa79aIaHcxiovcM/1aPDzzOdGfqLh0vvKgkA4u6EWEp5eDidTqe67zxeRIeJvXuqmIjAUkrxs4js+x7wWTwQvhNRLPFT1dZKay3WJxDdqPPpwFBqZoEBK8UV7WFdRCScSpxxKWVZFmbe9x2Z3T3nxd1LqQAgTIR9fCvgYsMoDAky27Y9M6UcgGdHcmZWawDQWgne830vMU5cSmla+7Zx95l51lrdOs7BzJYsMbYBAFdrwYgAbgy4qTrCmnOz9vr6fL3uAESIddvcnZHMa0ysgMUIS1+GkVIKdmxyC7sQaH5bTiklIgVmWR+X83tM+WXbl4ds0Vt337Ytm6V1cTVdmqpqaeTAnBILOajq9XIxM+G8ruvL9sXdU0rrumLb4w4nFsnZDBpYWh8UNgIA871quW46xl6r11xcRJgADRsqJiYga17aFYVTSgRYda+17rXEbPOojRlO1kLEsFBHG8fMrUYA2tydBHPOQNBaWXJGYXNUVQcllsA3FeK6beuynPLiqlGfM41oBpAJFFtre1ODHG3tYNkh9CUvj6flvErIfUxqUUchIjOHAMQcVy3QWmMmd4voOeRtWZatlm0riHi5XD6dz9FTKnWrtZJDSolFEGMHgwvZrMgiYtUWbLQpJVXdaxFmdzSAEIlSiqynbjIc00A3RRNga5uaccrNjt4UZonH9C7r88PrqCk3S/FNk3BagP5z763AjHIQwc0cPKVEPskbqnpfud6aLnkJSycpbdumqoSMDlFeNTNv2ggYAQBSSkywyTVOLGKaaP+0fSewJQXzpLXmOWcRLtpUdV3XJGKqpZTH5dxaW5el9bVygohMSERrysVLIOHD0IskQjQLq5IBCyLmvOb1HHN3T49nIjKEtEhrtub19evrdtlB6Zdfvr579+7y8orApe5Rwqq1lipAbBKFWhSRvC5Ye+80EtdwSy9b3bdGnJbzCay8e/fu3ePDelrWTGDKQo+nNZZkuLssa9krgNEa/M/GvDS37bJxwbycgKVWq7UyiVBWrft1r7Uixh3U9bSqm6vxyp//8pd931NK27aFcb5cLgC0bRsivnv37vXZpseK4sj0NNGemg4s2lCFaBIPQ+9MmLq6uzYbg3AYHSb3BtjCme37LpK1GqE0NwdvVQk81p8CIrillKxBhGKMPIXcQWutOS9VO/w1zrC1pmqIuK5rzjlcGGFSr6UUAEMg6MNUvfmJndgfZtnlrfxD35kU/xzNt7vJCx8z83EOIrKuKwxKajkUdGauuCyLDf+IeGueu7N5g6GXM/90t9aqNzWC0+n04/eflmWp9WektZ9DHA0wAtCaMqMkkXDicfxS2is0Q8inJS0ZESAxCWrVvWlmaqZbLQydhPy0rIBZY5QJIPB3DioMN48/wYSI7l61xeCoGwgm5KU51M2q22tpL6VcVa9mr9v205fXLy/P+5U4iTpedm2tpZQIUbWS8LpmA9xrQ1Awz6rmuO87p2VdT58+fHi57L88X9SKoZwfTnvZixohEvDD6Zxz/vLlC1sxa+uaz+vHp6enVi7b5QIAHz6+i6W9nHOsveiBjQ9smJmptlr7/bebXswADoh8yIn7Xd/gm6hxdA5HKugzkh2/J79Rzsx4lAdiEInAvbWGh5ZaR0yNuBoO8TGOMmVERnBgMR2uwcw83BCMhTS9Ukxy+LlP34QWPOR1moUZB8IopswYcr4AMQge4/1xSnYglRnSC3HJeTm5mbY2aWwCq0wxOOA+mkChHRrPhUJxCYX9YVlM69PD6Vp2dw/SREJBZDdAIXNHN2SKCfJ93wkVYXEiAmdwcF9SZqR931deHDwxiwhYP5qIZJbmVmvtlfFxK6IAp6qu6AaMpMQBV2mt5ZxtjFgj4r7vQj0bLKW8vr6u67quZzPY6+vAKPVtcihIhLV0ZJz0wnffpcbkCo6hoy1MIgJxKYVF0D0oLSLUeXh4WFI+r6dzTmTetAnhhw/vEUDyAkyPH99DlJrICYCAo/1HQEDa+7QQ1U4KbvrIcBANgQe7qAHQAE8NLYg+YeRCgIDkQWnf5wm/oRiNEAXHKhw3MHNrOGoxU4TQ1N2B7uvdOrpVFhN6DOEXD3zFR7OMozrTC/EphZzF8g8A8L6qoJOl9wRH+qDvDLR8rOQxc2aa9n/24TW29dziz95RmD6uC/9ICEMfiWgO3NlhP2FcRJAV9QvpBFR4VDqLXQkz2YP7Jp4PANh8D41WtQ98mqqS9veo9ktS1RnjcpxEEDNAcM1ipBZgjgBj7BAJUIgbcWR0ZtiauQcBdG+bJEQRIbMigojNTA90qx6R6f2Q8bwF0wgCoCmMxXUj8I3ZFsRw6swppWQGMUnvdLs58Qp2U0S08WgVvLQqsbo+anshwd4T0bJvWiuRmBo4CmAM6AEoIJATIhioN60Y3afowtn0Coi4715RlYCt/fnz1wUB2vndSpl1oUD2JkRvQF4Nm7dYOxPo8trQIYkskp5fnkVEqL6WC3XuJXT3BznlvMd9yHkJc5ZzNllba62WUpqqB+MOAm7PF0ZaluXhtLhZ3S/CvAiLiIJxc1CzygDQWmmm5jzl1cYsmRvG1jUYTXZVdUciSiTuPn2GIGSRGEoZpJ9RbkFFVIXwymEehFhr2y+7E5khUFgONx8JmAc1kQVFahbKiUVIANCt76GI7uRwZDM+8/sMKqzAUfBgxKBBZBfQuFCiIDNIKaE6BiBkkG1wZxa1BMCUAgzaS/sdeBmbV7s8TFM1zRZ2qDYfIoC3fYxv9QK+QQHNYLe/PwJQvDvIX3uF6ZiGwsyEmZEAKOdsWiKUyTkD3L70eGRmJsYZR3ZO1ujUsgIIM4uQ8+ARHUurLNaiqPYsNGS/1lKKmZkzdJMJiUUSJeLY2RsnIEjDjypYQzcmJE7rmpdlFVkk55xzTmvUv3Yre315vWyl2bKehaSUVrf94eGh1M2hmTVVV1UgbNXaogYOw27DEOCUUqlq1sretM49cvk3v/3E6K21/XJNdF6XJSXmvJxzZ5nrLbUtGLB024puG6C714TCVgHQHVJmGwO9AICu5hB1YrMO7InKgpmhGyINhjC7XC6Xy+X19XXfd6Jlpjrzhk/fOZ84jFYhisQgiQ82ERgBMREhiUgGaISJEIgEkN8I2Ig7e00Qkc3AXR0ckVpr5ITOzoPtrI/vkFWIZmDT2pQBQERUY9Km3S52DqOMgnE4fEQzUGvtFrweFGc42i428686FqbNPHn+cDxC+HVVRaH5z3lXAWDbNh+Z5DGdSCkTCHLs6W3TZyEiI0lyoKTNcs6CpFqB1pSSENdaSzMAIBRTMANtTmCtWW0GDkgsIpkZidRtayqE5CaUQZIDuBAoO5oCtAHwTgjohobogO67NdoVyQWptaatIKKkFPGMmTWOLgEhsrOAc2m6FX2+7K9VX0p52faXUr5eLs+X12up0IhLcvfS1My9adSMqjqV1qoxqjAQeGtWSpGnlcBMW87548f3Vf2X59ei5XK5sojkJYLvn376KYJvSsAohLht2y8//+xe371/+vTpQzwnMAME8AEkc4DouBxq/POZ4hvA8H01bQoAHkrq8/1Tm+aOxzdagOZAHt/bWUl+Db7B2Dey+NBB9Du5Qhhx4Wyf9682BHpzPlMmpxl/c25T/edHSqxuJwREJ9TpepiGH+o5ar8bxDOknF/XPEiqfN7Y6XzbGAX6tj/ZO43Es/AUQ0VdVa2BE7oK4cMir6+xSajt+162tG0tJyZ3GpxlZE7shGTeYucngXvTqtYJXRySJJn7oh1iATsiSrDKtY5WaK21qq2qNlO/0ad5Jxw1M4sefsjkXjvrNREJYa1V1c1g27bn59eHh+3p/TszMwUAjZkjEcHm8bi7zRnRKQEYOAMjQkQOZtbmPScGIIVOPhThEzosSTKxqrbawHVZFszg7uvDuTCtD2dYMoCrW4UCwJ3gBpTdoDNfRGljxAEIEDOZGJOZOjSqP8+bqAMAWLjFaG0DgJlTXwN2H8z07P8mzTAqBTAAyeQGfuPROerUbZ5tSDmOT4XIzcLfrGWYGR7YSSYEoEdfI8ab7aI+Q+Nd4mflYlyFRwY6LfybIO2NayC+I6MKWbrXym5neqQ6oC7uDmATuTn9NY6AJ05YmPnGFDxuDwD09tTktDkM6th4Ya9c4iwC2TBVNtY74gCkjdPtasBjhfTxansoNnbaVFOACApv3CTTv7qPGZHbM4D7TsWdge5ek1OUCtws4JpxwjkH22fwCYTGomoT7kM47v32BEU79rW9/RJizqd/xUE8AcBdwZprs1bJDL1qaUGVsbeSgSFa+QhOrODVDKxpIWvNVRWC8RoQgRxyzuxGoHvZ//3Pvzx//vlP/54/Peb/9v/4wynDOUsSXhIDigGB+2ujupfW3Bu4ErguJkaLkaFkJ2pa0ZGBW23btl1Ag5t7FO0kpXReT4+PT+7onGFNrLDv++Vy2bbtQRjADCUlcnMrRZjxtO77NZauSiIKxn0AACitICJyT6tataLNzGJ4EJGAByGnIzqoadxnYAYRZ8KUWVDNeMJIwtJ1fgpHErOKTqflTI7bZbPzohYsNh4jlA4zSusdjCkhBAigbmqGRG83pfFh8BV7HcTc/ZgQTpXGsUZi6sLxUCIyNxzPzjDi4ByDrkGqWq3agHgFQ6mxR92x1EaDvzuCDAxfOOLI6WfHuX1jFv96lng85TGN0P85g4O/9gore7MJiDjYVlXVa8VYGGN6DAUO3h1mYVVVgyAcOskRAXjk1S7CjCwEBUYAodPKmxkHORCiEEeQ7oSBUEgpLZKY2bQCAJqjOWVABFdTK66VEZgEmU+nh7ysKa8pryxizpdiZvZ8fb68vLZSy7Uw0UM+Ifq7d49ffvmFmTLL3umkQNVa83t7NReCg5vGHK1pI4fEkmXJkqzUZraVKwCYeTufOKddQZAkkbsnwSWvgmxmrmpG+34tuhNBcjIAdAWglFJpplqJRYLwWVWD8XzVJBkG34yrgWtaT6fTKWCtLy8v1+v1+vKqtQEl7zFd7AJ1CEQ4EwAQg3uQtDqSz9nP/jAOewh7SaX/NVxz/IsAwPsAT8jfzZKHPzMzAzU0kghEbupGHYWOZS/WDNGXJeWc67WUspVSpvwfVdiBojgJfRXbOGEwq3U+KTxkwnQY4+HDSOEM9eyQM7xxc37ImZmo72g8+Czv2XKvDeP9YP+9eiL2ii0jogAisKb0cDo/PDxkFgUMyop926o6c0rCiLjvO3kizEH/HlPWDqjIqmaXnUsRxGJrUwcAFlRkM4i1hYpYrdbqq4Q5J3TDiLu0BtGA1qqqiRGpLx+vZrUqohEJsjiRNnu51q+X/Y9/+ksxf63tZdtftn1vWk0dkR0D5j0vn0a/0czMG0QZdBQ0wz631rLI+6d3n7+8tlId8Onh6fT0tCyn6759/fr1y5cvAUt2NRIkorpfP1+fU+LTkn/z448xOw7ugLd0hUbc3O9/UMj0aPKuoNZ/vicK6gY03JbdVOPuv79WbrtNEh7eTAd0HR6iWABAos42YW6/kjca4jDod19zywmPsdO8hKkvMxk7CHY/RimFBuZrtjsAIBrRR/8SdzWxfHMoH+HH4dRmoKWz0widv2fkNmEqADFm4gjAHZEEU4JaOpev+5rl4Xw6vWwxquOOrVlpZkqIrh6dLUfyGcqZWStlOa2qamBrTgGzSiK9ANTUBDHYI8ZVRC0yvF/vGag7dgE+UJf3DuEtH3ACJ2ZgZmzWGDJ5SqmU8uXL87ZXB8o5ozgAtKbMLILmUGvLxACg4HzLDBAdDQEIo0Oo7p1rBGOtSLQso5XKIsyEggKmtRVtdWFZ1xOal23/ul1akr97XOGUg2hKtS6cFNTBwcAt2J7MEdwVRw0ADW3khA4tLtMx0pUbRJ8OzC8+cadooNGDG0t9Jjf8UIop29BbZ6MgMveshEi7HvW0H82hjaTgKIdzhSAzIxEQQeB6g7fsWJQZcst0Z7HNrO+AOhh/PARdIqzapn2bvyei2Qj1g2ofJmcdhufDwR4ckfOMzYjI7AaQcXcYqyDtZn905oQeayf8/nX8zTwPGxyS5DD4tTpHGAESYG1qrYNSGGOdZr+POAr2ADCmXTVwdEcXODJJMDNHInIzBUpRia+t+L01tOHacRIbUOxFv0Wybx88orsRMrMbiFrRAywnys/u0GKFr7kpIM/eIyIGB9v4zWifAtMYeiG9Xg0cPfgqURAbgLue11yvstvuzRh0TcspZ47Y1cAADEHdHJzNwd20mikMeH3ceUDQ7QoILExEreDnrZRSrlv+l3//355O/OHd+cPT0/t3j+8ezsuyEMOVnratWaWclizMDtDANv769ZpIERt4ANC1tbbvtbbLsiyRa+WcCZW5Xar/6ZfX1lorzd1FZFmWZVny08Pl5RdGQkXcSkJAhxNSbWDqgkBszWLDB8Y+xn3fISidEdVRVZt69JRrM7MGyMwMgyKyueaUUkrEwIyYkjI7QWztAIA29Lw126uqKAcdpfvT+WFdFmvghmZgaGwW6EYAiMXJTn3Sw8xaq62RSiIAVWVGMyC6cZoouMzhz/sK8WGq6mYfptww88TeqCpE9uiE6K49puwYgwZRNYThG0IPiWTQ1w0lHQW/kBAbTUsEcLMb4Mangb17vTGLR+U66suNVKPbHji+7dtXvwOIOFigu2VE1D6AaSN1MTMjm1PdhwkrtVorISTGAelB6rS0eqvMICM5dZW82/8WW2kQ+84YQQqQnoGWUtwMKRaWYy+peoc8eTIwAu45CvTmMANlA2lGbavbfnkN7vutupgkelhPy/mspX7+8nl/vRKAEBKAWkV3cAJ38kSewnBHEkgMApQXOS3pckEiiHUnhkhAYF739scvfzmfHiUnFN4Vymulq3KuVtv5YSXwZvTuLAjMCEJ8Oqdm1WsvufZSnXef0aylDCKSJW2Dl7KX9txLLZOLa1mWIBZ297LvWlspBQZUcsrPXzOz8xUFtGNk7INl+2bt3d2tNWgV0jIIpbinQ+Bs1gvKZnZgpQULcK/34VEAGGCcGtMiDs5CRGDW1KqD0gEydHfCkwOyi2+f1QjvfoyA8VBPtRFAzHuyLMt9YfQ2djj/Ow81Msmbq526rKpw2NM9Y2siNGsRuhMRYqJwQ0LQgOOSmd+9e/fp4/uHh4fiJ1UttdVW3dBJ3JCYa9FEvC4oIiK57n1I6bk2REwKAKBW18v28Hhi5pQkpRbFlpSyu9eqAPqYKjMntiAeh+DZN6itag2uaWEDoKBd01oaESGBM0HDrdpfvr7+8vz65y/Pu+q12ute9tIcAYWFE0Fr2kxHkdcBAIQZzYkAoccbQs4IwqiqKR0AkGoi8un9x7/5w98v53Nt9peff9q2bdsuxezh4aFsl2VZgla01eu6PD0+Pj49PUDncdXBjdHzOBgUFBjlZ8JJNBqGb9jIaXQRvFOTHZO6mc756D26++HDB4vaOwH49uPYeyDD5xztb5Dg/1qLEltXXnK3+78COPSlZNM7THENzZ2Rkh9IzoaA99vSje8s+iAiYhsR4Bsr8e0/f9WezPOc/RD3uxWLOG+OG0T50L3fzbAY6OAmSKclL5LOp0XVG6kgu7EpmjIQakMzBTdgUNKunKaXWpckAk5IWYQAa60qwspAPggy57R/L3abGQDGAgwzaOqYMEqZkRBKfyU/JJAwoBAA4GKZ+gCXGVyu+/V6/emnn89PjzGlEg0NlgwArRkyIfY9hsHx6oCGVFtlZid0R/UukjT4NSKQoLmfMDrM44mnZVnyWrbtsm0/XV/e//aHj7/5AR7P4E5MpBb4HI6QBLsoO47crIcgRp0oJiSBewjv5HgsLt+HImjhnZsP6HV0Dq1rkM7C9GgMYi8cKACQH/KBCBdrXzRPfhS8u2pd/D6grnP2DwCiem9mfmBhwYGgfOMIprjG0dtYcA9Do8e3w1SueQS/QT0P6tn1qMD9y316xtDcWyx6PMjhrmrve41LHpT44EG264NqHA+9tchwIk5VVRskH8EaiobHCreI1AGmD3E/8AK+bTvMmMAGP/iba+5jTsja1Aen5Q2SG43PMei9l6MrhdGUe9vomGeizYKBkgmsdF6cJHK9Xtw9JYobCkCSiFlCfQAcAH20OABxbiUyBImokSn4jkwdyXDMX7IqIKwp1TV7LbvVE+Onh4d1XVei63U3g+rW1DxkJSgO3fnWEY1Oobv7CoqAGZCZG4s33hr4DotkrfT6i/7bl58Yf2LE4GaB9XtXZaScWBCgNQRbmFrZrRVCyFlEwoAqERleYgwmpfTw8LSkzMw5r6fTqda6bSUWTjKlCBmfEp5Pp4Ztb5aIHlNCtNYu5zU7OGlUx6pZg0i9BrmAAQCQujWDpgZApWpRQ1bhBAxEhtjyaQWMlgNQ8+JKqoiufUU4aOsFwliRYg8LmGpRdz8t63k9IYKqmiMgNDK5uRDw2+BEf+yuSp4kU1GjvmILDm7vhhwehdI75ew+d9rBbjWAiAINEktqZ0B80MkwKxIY6W5KuAvArY/UEz8cBurWWBindyviHE3MtxbkW+341deETMR36ghe//95Ha+RWaITAgEHFTneVXII8eiFJ7p9RVheGPvliRERzZtqVZ1wowiXw64p9PouAkDisc5EDZi888poXsQ9eNsgETeR1oKFvTkjABIDICO4qTfQptau29fny+W6f/368vXL61aLGbz//ikxl91++PjhfD6TmVtr16saAKBZUzVyZ0pEQpTMakgIIRIgMS1JlmV5WE+t2XapCBXMzMF1t6KAjMjIGYCb59qgtgaXVkp51wisXK51f9dWoTWLSDanJEtKO6A2N68VgMzNgVWDnM1EOsVIhCCzTtFVmzkw/NfXi9ZWDbZt27bttjzwULOcwqOHmbr5Hh88E/Gkqe+0M4XgTJcoFUfWp9oseC+QETFgte59ho+Z1VpgR4PEJGqLAtEcu4llfOm6nnfTrezbdtm2i2oTofN5Lc8vcACzWBDJQuu803ADD8fl5WV5ozIw2LCmkIcpODqyo/S+0YipF8f3zBs7Xfh85/EN8UWxKoCwj3FEj92augMzO1AGf3g4vX//fsl5Xd59/fq1bTsCcyBmoY0HR7H0FoGiT4JorSozO2YHvVz2L76dri2ldD6fU4rJwMZc3CHk4d1SmHlNOSdeSJhAwAmwVbOm6EEPV8TMTFtrVmpDBJRGrTo+b/WnLy8/P78Wg2ttl+t+3UupCgDE0qhmcTNjkuA0sNCvvQFjImEmJiMwMANkQaplO51OxNzUtFQR+d1vf/y7v//P7z9919T+4y8/79ct1jMSeGsNaiXCUlEUifHDh3ffff9xXVdk7DMugVdCixiAmN90BsZ/8fiY+3+HQfZDOf/YUDtq0xvN6kfy8RXjUDBQoCGiMEsS85T6Ozs9LxxrN/eFP4cb+zpi59MeaKdxhgcZ9sOZz5Il9XHZXieK0dz55m7e7/HVx/+2FizfvevQXUZHx/yKFgT1qI0Ft9jbJre5KQB3m6SO7uZsBmbgCGqS6N3j2a0llsRiGmsAvTUo1Zlir5hG3o9I4E6gEGYnxMEDXQvkgAMHF/15d1It1oKQogU5IiKZQa26l9ZaW9fMo0MIYzKZmc/n81w9NxvgqoqOPEju1tP548dPrf3589cvW2kPT4+n0wkA1MH3jYiaNsaOqUEmdkammfUrOGJ08XvfWMFrbcwckcZR8GbRlomZxRFKa1spuObf/t3vP/zhb2FdoBXIwoAOjcEJECiWRCCSxwQhWzd3cWSO+WwY1WIgQMdbxXpwz3TRi38pDDadvp8+0tmjHI715sdLIL912OY59AgEcNJxxtpKSQkOeo3eSQpwkBrAoXn4bciEg/oVByjXBz1K16ujIzj8aONLZ83RxqTAVNW7AO9NRjNmCG+/OXgTdz9W8A/6q3QYKTr+IFPyOroD8fj85vsC9ubuIuwjf53aTkQppVLY7luuiAh9qNVC7WP0ziwIOfhYaupaTfHtvaQdr9aapM6qpKrBlTR57c064JhGPQcPUxy3pzAu2/oyFo7NcwCHpYWRNKqCo7ET+QgM+oDntKpxdwzB3ZvFCmDsAYa7myVnQJQgtgEkhPOy7M8vzeH948On90+JOYElZwOvqnvTrTWMGSkMyEDvX3k0r9GJ6NTZ8E2LW2vmtAM2pYaJHLAZuTABEYKzql6+fM4558RobmUD11OSc5aH89rUGDWxOBNYI8o5S0U0gP1a9Hp9vjYiAqeU0rbtKaWc1+AUJdLdKu26JXvneM4LmZ1zImIFt7LvtTCBIEAsYnRFCg2JEUFXdwNUx9q0mSOn0rSqsWQRtrpXdXfnUqNHZ2YELrEdA9zdc0rUkYSE5kFGd+Y1uUFr7i7EKaVE+NqakziAeWzaiLTGwV1dDV3AY01IihlCYiNi7lwyIx8jIvLmU4EjVz+q6DRJ00XVGpuvll4oMYg6ChHV8pbjGG+9iFse6J6YQylERPRYsjrA1cY39pLYnQG6sxQ3EwbfvN788viBnjePw7xRrm+PMzuEMIya9U5hBw6E5vKMGEaBKb6TBzAMYt5osNF2UHlfRdjczbyZiVmbFr8zqKM5aM45qrmlFBRurVhToGAqnok3p5SwtyhjhD0eroFBa82KJdku1/35+fXL8+vr61aqiuSc85evV2Hct7pIqgtr3c7nNT+eXp+fCQBc0QFZmFKiTE5hnWJFZs9wmYUQwciB3AhQiDImc3ZHTqem8OXnr9dqLtmItxLrjeFTsbZfFvTvPjy/Py2f3j8+nNYmwDllWNVKOCYgdNemPWZ1NUy4plxzRYco9plZa7rvu7uLiKtfr9c//elPROTAz8/PX758mTQzfowpDxJy9Fgz23cP3N1NtEL1essLAZERbX56in1sA1SNTa5OTHqQJR+Fv449wbEmrkNAtbUCs1FM3Xi764jV8aBr4O5qiuiEhm5R9Tkwk/vwMnedk6OXHIUJjz7qm3syY+L5kRkHWLvxntsBhs3MMC/nULwnIiIxa7dzm5A5CrodBvCHh4enp6d1zenhcd/KdS8dEdp6+fjx8dERW7NWNbidHZlQLqUxe6yOeLlaKfvrZiLykdZs3pptWwWoalZKM7PXdMmS1ixZ0sKcE5+SZCaM4RkgdauDC8DMSdWJq5et+VXtZauv18u17ApctcX4QO+2qYIqCeUsMbXLzGZWS2mtEBgTJKYsnBMyemYR7nh7JjE3FvzNj98/fvj0+7/9h2L+57/89PLysu87mKeU3LXtZWECAK0NGN8/Pv7d73//+9/+Zs0C0TBznSC0/qypg1MPqSAAots3YeK0nD3+AaLeuXoTJr0xuVNypvwdj4YHSBgeBAa6DHQIay/4Hw249+1/8+chP3eZwHCM/VM6TmY2veEwnkC3ZUL9i7q6dtPT02DvewUBhh+xcVEME5x1C3NHpfBwT4aCIM4I+NY/x0Oi6wE3781McHeoVVUZGbSd1+XTh4+gtqa8pyizozZs1Vt1YDJBIgFoOFZrIiGLrDmNx2etVhj9EmudSgPcWmvbtumghKnqAGiO27ZdrtdSGgAuyykRM+NMCHtcYZ1i6uYi+1RU7+NVbUTy+Pju+fXqv3y97hsyIbKkxOBuLbhaVDViUwJ3BALv+4p55Mzu7t7i0txUDQj5zUAIAAnXvs4HgVABSdJ6fjj/+PT3//U/w7snKMVbcQLumMi5CLATjkbXRHreDrOTTeB++yLrD7wLhs3ZVkQcVDTQBXuIDyJMFtMkfTki3L/cvW9a/EYLYJQ57qXspoxx+iMku40G4KjOh+rehUA+z/nuGzHEaCw+nao3XzfLcDy9+xLMTd8R8SbyUToZ5FJjlAnH0Gy8x+DuwvGv1yLjB2FmG3N0s/kZJ6GqOiaaJhvB8Em3bZg+5s18dL1ba9HqRUS7tdFvaBm6x9tMH9laQ8nujk59p2kcxCwYa+JkoOfigUTtiNi4WdRt9d1UBt41dsRMS2lMkcT3zkNQyIwb10/PHSNJBuqEq8gUJodGt0TdcGzcMjNyUL+VmRExeGXA9Lzm18RW4MP7p49Pj2XbKgKvCxCq47U23K9eCporkjEG7ZMTgvZ5Pmam1xchUkBVQ8e0nIiTixhi37FqSs0I3cy0GrEhqWoD09Mpff/h43fv359W+d0PnwicUIWR0RA9gL5XRVV9eX59fn6O7dvaHEWgwa52ebmqvgAAYd+E9rDil+dLIlqYnk7r9eHhcckCdr2qMGWBQA0D9qX2CWPzo7mhAVb1UltVq05bqbUZSSLJ5p3i4sJIANECYqYlCzMSGCPlJWVJIU4MWGvb972mZRE6I6F5a42BI8C96YDBoWo7hAWMhJaUo8DZWhkZCk2TjQOK0IuCHcVxY0kdXrDHf0QUlO7hSaf1iMwQEYs3VQWMaVUNq5Ggc+rYPVHbtm1PT0/LsngfMp61IhqCHVpZDQBobJKc4n/T+ZtkwiEOmBbnjaXgObKLb23Zr77cZ0BxZ2juTGTYQTRVlREozKYgA+ohWpoOcvrIYC0LCxOHUqhERDGNSz7cDM870GtPajGsnyXVsnteiEiIqsL8CiLHvooueBOtlbrXttf2+rJ/fbmUok3dgNWpKjx/eX339PDwkM5P7zLpl5ev9foCrZ1PyyIpSzJFkezAZgBOiHhDXGcxb0JAAK01dEgpPT0k8wTApfq+1a0U3duXl9e/fL1UJKdc3dV9Xde0nNtVn+ullvKc6PL6+vHdU3o6pcQ551K1tU5gAEhNnZldsbXGTVNKOeeQs5C3WmvdiwdfGdjz8/PXX76IiAO/XNvPP/8c6SIcHOeUmemQjq5oCtgxsJsyEB3FwH3U0sxM2+3jHMy68fSptwVCrw4y0D3SGEEhnFhcouvrq+BJEqfMIizJAGAv27EgMs5yLgPsEn7UArW7mf7hF+4AdTPIdvdYaDG95BT4qY9vbqCbT/ASHHr+pRQc5mJ6WBEJckVm7hNLZgge7rhHTGYA+Hg+f/fdd999993Pz8E/vJS9NbWZuLp7a16x+v2so0Ku1Zqrg15336sVa0TWfvp6Pp/N4FqKO5pZ2auZuRQRySkxQkI4pfR0WtclPeSMYEJoverPEg69qTlWrXXfXrb6stXX677XWgybBnwAGSaQmxLDklPOCzOJsMiia677te67iKRM6yKnJRM04bDVXkphdUc+nx+f3n/38PieiC5fvvz7H//t8+fPzHw6nWJgd9/3lYUZU2JJ9OnTp3/4hz/87ne/o5zB2zAj7qM1BAPuNZ7dDRI5n9GUHDy0B2cm88bewkHYpjWbv6EDj93wMTeNOx5ivg/dAW+41qP4zbM6ntu3L0Q8Ake/Vec3ktwH10d6dpwb/NU78+aq6ZYM3s4T7uhzb/lwOIQ3J+y3SBLHfwf0EZDmrJcgFF8fH3/8zQ8iZCgiFbF5A0XX5u7owDF6gCiIGsBHIk/cN7UwkYDUWhkQU9JqbgTQAYGtln3fWyvoLnkNTuOmfr3u+16ZOeclpUQesmMAveWOiGXbA/wSd2dqPRghRyPEmLVXhfOiqqU0pOsZ0RDAlIxEZK8l7I8juEJzsw6xIY/ZPrPmo3NhnbzAbxFRfzrM3EplZkmZJRNxXs/v0/Ldf/m73/zhDwAO+4Y5OQBHym/zMbm7KVkDd7DYKOH99wcSpg7JjpcCDFqHOIGexmBnBYEDuNnhjmV08O5OIYmYLqhmv3VVEtOW9/LT5W0SpI2YysxE7tZlTUE1HdwW4xvj+DYSpTkUytD94+1k7u72nVrN/PDNmc8HJExT5gFgor4PitnH3btC3TMb34550KOjURJmbgeTdCxkzoPOP806/TzLEe92UpnJ3obUQZ7gNwrU44XxwIDNQ3ViJTn76MAEGFUkTzwq3poJCIDob9neCHFiy+GbV3xv7Mt1Hv2BWzpqROQ995KcMwABqY+S1VjgeVv3cfPr3zRz+ykNuUTEZVnWlE3q0/m0rGl7edZaEj2QsCIoQm6yQ63QTyxuHZr7uBsppe/ePRKn6v68bZddFaCZl60AsQdfnpMQJ2ZgB9Kc3LWV0rLgD7/9+N//63/57W++Wxi//vzndREBb7q5WhYmAtW25veIyLJIXkopZW/gnlKqf/qLu0t3SBzPupZ6uV7aXqDBmuDT48Pr09PHp/PTsn58Opt7MzRUUldQVTVvTzlHFANOjmTmzXSv9vXl68u2bbsaEEtCxOZgZldi4hA/WLLsTYLgOrGcaw4CPWZmpNbaXpp9/fpuXZd1QYNWO38j9QyFFRvGelT3jgRmdHUHx84uy2Za9m3N8kZy9JDaRBz5xpf7EIgZERr49IgAwU91VwExMySc8S4zxSL1UKJpqkzbvu+Pj48ppZaz9fUnsU2YQnFux4SeEB4EcnhTAD9En2988BvRHZfzNog5BgTfvswsMvgjKS8covyOhxkHuRmQw9BjWANCICAe3r1XZM2IPZZGRLe+67WEO+8jEONPncI7seScj+wUY4/fiHtsjCLjZP/rBrq0Wkp9/vr6uu21GEtelrVV3/b6etnWh7Tt9fnrq7t//Pjdyti218vXX2rd15TX05rTKedzq/p6KWFhRCjnvCxLSmwNLojuKkh5kSd8NGfwtBUt++u+bZu1tJ4kn0hq2UqF5syyLOn0sD6+b4z1YrW1L9tr3a/75fUDfP/4eM7LAMoCIUtESUysBq01CUDRYQ1svFprQEjMjrjv+3a55pwduChdLhcd2wVvKnBPLjplaaZMqkqSogZjZnSkbiJKKeWFRWTzSFaISKbYH6WUmT3IJAYyx9EiluQOeAKwW6sBwIiIkNRt3/dSijuyYIw2BF/avJBwYkSEMRMAFGA7PyaNh9B2Wv5j1jfvQIR3b/4EYw/Y0XvGP5sr4t1BbldxSDbmaMblUkOEEHqRVJiCBmx6MSRZluXTp08//PDDv/zpn+Ljqnsp1TEY2izaEYmREDmJWwsDlfNp369V1d3VAFzMGZz+9d///PT0xJJqrYhsQJ1dve0iWkoDV1I9Jan7+em00hMIAggSsDMjIksSRr9eqpq2ul+315fL875fdtuaGiWNjq6RYp8udTACAbO6X4t7Tqs8PGQh9GytETgDCvGahYAdDEyZU617rSp5Pa+n7777DlD+4+df/vjHf/vHf/zHvbaP330np1NMtosIt5pzXpaUF/nuw8ff/OY364cP4BWs2wWAgJRMjNbcdnCjgKeJdb8v7U+b/MbGHhVnCkM8azvU2X8lhOkUwY7f/O0Wjdx/8s5Qm0dF20dm6B4IVELsPB39tL27JD8I7bTAUSE/mvfg/CaiqgZ+84A4AGjtwO0xTHQAFjoBoR9eCjcMGhzSQvfY0BWfnu6jmVlKCQYGDWe/CAmFoZYIGgERHh4+fvyYUtKYclMI79maqTqzqzqAE7t5IzQyZ0Inia/hnPnG0Q9b2dblwczcTVVbrbchQDNVLaWWqnup7p7z8vD0ZFZidMK1AYC2FgPzrhYJofuNuAgAaJBoxD+rajgRVd1LMXdmTj1+MHdUC/sgEbOqWlOtpqRk4D6HYod3Q0nHeB7QgvEPABBIBJfltCxLdGiY+Q//8Pfwu9+CKRDB6UzewtFkSdD5S8zRzczJ7H5lPNzlh9++NMCjGFn9/aLJObtO95Tp2iYA/lZBdnc+zI7dLCoRBPTbfQJEscsV+SzBE2OAJ3GUGA/q7O7oHoTwfQHSff8tPmRjh0RXrHGXpyTPjxydix9s/rzMowoP6Riqd1jL3OPJkal1feHAvL+FjAkHkvquvOLukshey+W0PD2dFkSsiVvO5boxEpoCMDEAKpGLoFk1V/OqWq3TAxozSk61VkciSZIXZFHVtmvVesopxr6btmYWnXbkW/H+FjoTIRM5ns9nRHRQEVHwve0ovSEZzBxelZzdfU3rXl7AirWGzo7JUcAQHBKmzHnfdwQOOo6ICLctMntXc0JGSoq57DXnJ6MdANiJvWDgXQiwAguCOkBL7nutBp5PK6AJQmKM5dXWGnIGonZireDuzmBmtZkrJhGrtpXn7949LLYvWD+u+NK+cv1qXLKsgoik6dQWb1tTystruVbVYl6jiU2InBF8T7Iu/GFdfpcese3by9evn39+3UASFIcKUCRXXHbPxdGA+UqmmhF+eID/9H36MX2B//hzXk7fNWyv5kgEuLVaU8mnBIy0vaacH7mlBYy5cN23zdrlHb0SYGut7Q0R13WVlFSB24/0Xr7q9afr5/9xff3fX17pT/CU4R9++ze/ffr4IaePDw+njNqulNiUft6J0/r5yyutC/D689fXUv11uxosW4OKjikr0LWYMsu6+vNPp2U1s1orYgPf+8ov8/P5nIRSSqfTCc1F5OPHH7/s+3OFP6tQOvHj+eu5yrtf2r/+myBZYxdSyTtbw4p0JdqlLeYKAOf19PT0JGReyuPjBy27KalwK9UcUVKpNRjBzKBsdVkWcHEDSbTvOyERJpYFmKQ2x2cDNYRr8xMlYFFrbTCjWm3uviRmPlXVqDgySNPmblF1YSTKkjJvuzHQsiyZBdzqvrfWEgsYa92Xh6SAZt4MAIBZHJAl77UCobp5AyJAQkZx95T4aFCOHtcPad60rcgIAc/2MMdAY+eP+9hlExOGiAAg2ABAXRGQaDEQhayNSLntFzCVptAUiSmJgyBqSskJEUgMW2tFNbi8U0pJyN1rawAQixbVxQ0yihoBERDvbX/3cK6l9paJGydJKARIDsKOoOQlu9Xry2J1fViery+A1bBUqCQktIhI3UstRZY8hhrIvarGfHVFUqG2PKT37x8R8fnr68pFRBAu7x9PP/7w4WOuq13ePT3YadHHd/t1C5OVV1kWtBOb+OX6KsKEiFW56EoZYd1hffWcUtrV8pOcH5722l7/4z88Pb/7Hllzq3Wl9psPcn7V674ZYCY7I/+QLJ0//ke5Pr9csyyF5KeLln/+1/Lhw/tP73NegThmJJf1jLTvVVOmBUWtIqWFpLXq16a0w/rADgSWmB+y19rSIv/8rz/z6enxh7/5//zrnz7v9WtaL+oMUZ/pTii6OjH1kbjP5xJRNG6JYG+NgymawFpEM/l8Wq6XrZQiaWFGSajm7pWYgmmyts1gcUoImXBB5NY2ACBywFhl44BEKG6X6pJY0HArsO8M9F7k+w1aU84L76UXjgmVqTmLs6A05sJwxbYhvBN/hKSeqqGp7eDAzgirNQyugpnd0QEKHvExHPGfFhR7puoBtrtFxsLQkXIO0JcSGUImOSrjvLFzo517pwZxgNqaJDerpXbHT0IGcC07AOTEqq20TUTQ5PEB/vt/+7t29f/xf/7jXi6PgtBsV0PKzb0pEGFzAddLQ4CFOL2qwuvnxIyIe1MBBMC6t2KASi9fd5JmCBaD0G4AoA3BCgsuKS1rrkb75l/r/ssOj2v69HSSNdGCKdcsOzNsP/7h559/+vNz+drSi+dXBSM/n+T55RJmCBFJHCRiN0AzVkN0baXVS7VXWfI50bo2hEJK15fnzO9fX1+fnp4c8kM9WWtA9PH7T++fPuhWXi5f/u1f/uUf/8//6/LyRQH/7V8vD++e3n/4VNUfH9/R60uSdD6d1vPy3Y/frY8PqhsvadsaEUhQdAD0CBgRWAYz9B1kUaSTqURoFvVCQLhfut4pj7wpHmZQYUDLwD3waAA+J9ZmvBc72rqQDKp6ok59ahjOxdxdRxsZHEwbAMRaQgff98G9EWIYUykHeH/s4jIPRmRLhK7AwQphO5IGcw8EPx8Jj2XUzbTPuiEAOLq5WlxeDHKHQ+kskmN/bVxV4L2nvnBPThEGiyQaMrMIttai1kPjbhuwo7s6xaiAu3kbkbQ5bCjgpvtlW5YzlP3p6enpfGr78wOpoe6kqra9PF/ysi6ffGkGaK5WGpmRUN1JtwKnnNcFnK9VGckJSpQXW2Eicy+taVU1cCUzU7Ptqo7U1F+vTSSn5QFwqWWPCSNVdSQSTK6snRSEA9hpfYenqq7r35S6gfm6ZmGo+1aqrw/vLn/5CxG54/W6t2aUOtTi4ZTNUQ20GQZxZtxhZnLXWPBrk6GKKrRjb8ZVAN0VWi1W6iJJ941yFgQH/09/+Af7Tz9CKiAC5uAXQCZvGU1LIaK+tUMRDAQZEYn11otGBjIAZEQ3sKi2BPIO+kSJdNIw70MBoES98djPECKx7eSyQfM0J7/GVAkwp1vWF8gmdzDT1mB07boGHbZA98Ss1Qnj7xSaQUR/wBCmJAAwSvOH+os5qAFRYhEkdydmIAqYdTcaQtETAQCtGpnUPIHJrhKdLzgUWBGRkN0D1RO/DCvErYUGRXrbu2tmDmATfSYiSNRaq1V7RX1apHF88VEr0r65q29nig1OsyR8DBOPruuWWYrEm2EAo+Mk7kxe/JcQYQK0SLWDiyOvnfDU2P0yYs5Dx9MRMcjW7pqezBydq3l6t7ZkQDoR414gIkfj1drMMxGmXQaY+FXETGx+Y8kDQldTVUIOj46Is0jgHYDYAlRDs6zoLiLilBOu62lZls557Q5NEZGQGKwhiwiZm2pmASQ356AMRuZAzpgK4brIh4fTOb+n7z+W31z2ff+//uVf9wZfN2jXYqLLiRDTtWzamDAjlGX9AJi/PG+rG2kl5X3TvbW9tEvZjQ0zNauvL+10OhGBm4kIA7oDo/D6noiwGaSKDs68qRf1xWomOuXlh9On99he90vZKjv8v/9ff/zX8x//89/+zcP/8z/ntEBKTvrTT39OsJRt//l5g4u9bp///PNnNYvBm2YKTJKWBnRprRpJXs502bcXVWXGZVnAHQlOOW/b1upl32spBdRUtVZ1h3cfnxzo9PDu9Pj+8f33nz9/NrPT6XQp1RFvdMUApoDoguQsTBgCHJ6ptTat2Njg2YXNzBgP47/WiChcQvRYBCgm7N3d1UiW4cJv0V5UNM0suJ7n0VT1th/qdqJuY60Fcae9kiBPPAh/KDxVPVIOjBfgqLQdjReMevC3vzy+86jpb45wVO3x11GjQsLDPoChobcicVSFiWelvLsoP4wHuzvTzV3NCcM7K4QIh0h9/omZibmZqvahl+OZLMPagvWStogggFnLOee8EEPdybU1FhB3ltOyLsvycD4nWT69+7DvVVWXZX98fPz04eOn9x8elxMBem1a27vHp/g6dat1L/uuVuHehBKRD2O7pNwAlcDd15x//P7794/v3QzT41bb9bI/v74+PL8+v+4AxJKenp4yE6F99+n9dx8eWivXy0sp9S9fftq2y972jx/f53VBt1pB7QJzjx+EGqAHBDHHXmkCUwyeG6KUkhOfz2eWFMZKVQno5pVn/mM9ITzCVG7hRe829NrB/KXZ7WcdxDaqRqSc5Pjx8azNej3TCQ9Qo/uXz7ceZ7QMzCxao0GLYwMlq6rB3tPMkMxJDc0NIMoZOKZeDlc6NSrK9senGRce+M+jRsy04agpU8XebAWYxy+lHBWNxpT7zakdVAAPYcSEuJ/P5++///777y+/fPly2be9mBHZVpuZsEzK+OhRmLdSeuOC3MxsL63WHikYIOfkQIaxmD1+G7P0bOhqXlr1TRvCRmVh0lJR11X4vAq4IIskJLK2tzXl07p+fb1oa4QuIoCUUpr9BHIeJK9g28acT+tCtDICIyC5tcrMEJMcDoGdiTnhL//2ZzP7m7/9w7IsAFBK+fr16+XlFRHBvbbiQKWU6/W1lNZKfTRdn57eP70DAURcloXX1eoeceC81WiGEW/J2xbH7T3f9j0Qwd+2haf5+0Z8bz/SN0eiQYV9jL7eHhYwaOkpEkWiAD4ctWgOZmMv8MVfb1ekpgNlQiklb3Wqlar2LVBzpQQaaKyJG6N09tZrvJF5PJRXANDd3l7FCN6+9Tv4Ztrw4KFCQ9+06xHRVeNm9iHG1hDxdDq9vGzMhQXFhRyQoLb9crmsCxKgW/NaySpFwwwaurm1NWXCvho0SjbX687MTLfTU3d0DufeakeEuvvlutdmasXM9qaq6gBEUpIzc61VhJiZ+i40Df6Un375XLfdQZckOYlrNWvoJiJh6iJaJqIIUGLth5mhd4Mc1YvZgT0KDyLOhYd4mAg1962Uuu/LsuS8NDcSBqM//fyX30DPrMCs90zcAYBzrJANUbA5B9qnUN2DkHcClILbvT+jGSc44L1xp4OhmzJylJajXCF2qHl3N4eG+RSJjnA+RibQ5W2+E49G+WBv8f5L52+O4t1HYMxC0G+C+msuIE512gcf4OqhBAeA93jNdGy20CMhjId+K7v0NBKmmZmiAvd7kt76NRwEPvHu6fnsAImMG+g9lewnN10UAIA50MAGtDa/2AK7fNDbEbfdjm9mNUbJgcLKxxd1HpGxlQ7HOJbTbYBy3lNEFMlEVLSAec5ZwfFgI44RITGmlMxaszYy2GSt+jdbt81skCS5Uzc6zaxqW+SGyou6nSGAe0esauA3Ydz0mC4DAMw5i0jcQDMToczihLrtpi2JLIC7Wk4Lm7G5OqjF/gtigFVwEVzQV8H353WVEzytrbXHhS+l/fy8/fsvzz+91JeXL8DymJdaMgM3syxP6OcvX54L+Iu/ePP92q7bftnKrhUTmdj1eq38sOQaHbnY8EqIKfGaskgWYubTkjLnjGZMrZbPbptXx4RPKT8tiz8Y05Lbv//p37b/7X//47//8nw+r9/98Olv/u73V3l3+fz69fn1y/NLVX9+eX19vS5LArcshOiJiJFIhDiBJM5LwtZaA8OHh4eHh5Obuevj6bxkAQC0vllbGGutX79+/fPP19fL9h9/+bK3fz4/vf/ldX9toAopLYRojIAY228MFAEBjQgX4USMvRBQ3UCiOmtuCOiAY5n91C9EH1xkyMxohuSEfT9eYtnHGMAs5TLzJPIdEanHn0RENDYuNPeOyO9Wwd3M9n2vtWaSox5Bh6rfGC/MrMXKRea/NvB3tI9TfeZ/59u+jTngGP2MF9zb5RlzIOIxxPHDlL+ZkfSTxzQD6xt6fpp1v81G9slpRIybOh2wHejH+++Hzbmdo2O4cHBIKZ0QLvvV3bXWEptygCjW++5VEBZhQDMCQlhFFmYzO5/6ar6clmVZQGHbtvxgp7ycTychttadHwuel4WZnXDf91r3METrujK2qF9YbKwiSswiwqYL4taq1kYLv3t8onfi7pdrfTxlfTrX8uG6l9fr1tSJ5OnpycxaK+npxIyXl+cv7KrrK+2tta+ffwGwp6envCZhYkEHSgDI4sDWmcZvpYR56+LmC3EBOJ/PTfG05nXNAJCyyFYixj2Kirt344zHsQ3riZlE41ZxDLmZWa369Hi+DYUeMKKId8IzNeggupMT6s4lH/x+R2jfH7n/7IQ2uIVD79zYoHe9YnMm9i5HpyY/6gsdXkP7bPpQuE//jvpyFMipd3CYlTpGaW/ec9TB4298Dr0QzQrmjInXdf3+++//63/lquWX56/1l+f1tBj418tu1q7bZclrLaXWygCEaGaCtLtra0XbXmrTIINlIgTiZiNQQiQEJwQHbRYbx4igqRt6cmTAy9aE60OxqmiUkNjQCT1LUmnkUK5b3UtKWZiLGqFH6QAAmJCIiYGRHh4/LcuyLGlJIoQsGPit/foKAERoCO+f3r17fCqtllJ++Xz5+vX593+HCLy9Xna119dXEX44Lb8QaqkKLptsiNXMtC7Ebd+I3+clSzAjqLZSSQA7y2jssiKI7pYdhfyuoOb+jcXtNfu7INLH/PfhPZFPwv/khUTHr57h4JQNNHe4JaXujmYw8GXTQfCIW4LEExxm7yL0kTwaS86IMPjL3N0MVNVVPeGUWwd3tdvucRzokUPOfK/db7zGXXXn+Nc3cg4jIn2T8h1V7Fs1sc4v2plAwcBjhnyRWvd1XXJet73+8uX5Wq6pZhbHzuHab1xzIFPpJJZoFsbDC1H02bDVlFIsczdHMHTDpq01a61dr1tpRiSl6l6+MDPn1Forrbaq6hbYgRnQArggsSAABN3Tf/z0l1IKgZ2WfDqvS5IlcRbOOatGHa23fNmYiErU9ZKwMXNQ9qAhWCk+Gs3TezpFf818ToQxAyIQcJLPX35BZhCu1RT8fH7EJQV4B8zcDEc2GK1aAI9unbv3PRQAPpD5NirFsTgAhW9ZGeC3dXAHx8NK4VFBuA0T4izKH6wrDbMvSMe3TSEhP26GOXzdLDPfJ37T9eBc4nKsdPza7MxsuHWnEAHPIWeBoynAWNTSGViOKjCbEFO2Z0D419TqaGfmSU6VmcXESbxy+M28bwdGo/lhHlvRiAg6F6jN11H3jp4PR4Qabf04v7g781MGjn3M1SIvsoG1HTeRVCP1CqqbXnmNvuohEHRVcHdDMiQed99sLCB3t8E7FzcsLkctOoSIyNrQHU379o3+7MctJgZwts4rRghgYxKTiOY0tI9H1x+IA0FfvO7q1tTNCElVCSD22oXzZmZEyimty1Jba7Waaj6diROVZkiC0Bd3EqE7AxLiSSCjsje2IibixKSL2Pd//zfP1+3Hrf5+//GPf/78j//2p19eGtZm+MisWggpNeNyVUd4/fLV1XX3vWltzRiTsDXYC9RFtIk1bc2ZeUksOZ/OD9paAWgKaNyQlQQAGlJeqVpr+0a75fO6pHVZTuvp6fHvn0r9x3/84/Pnf3mm/Hz++fXHrzsywWt9fr28vr7GlMrj6d3D+0fydkrMfUspNGBiZMnEiTgtSxKRx/PD6byElVyX9O7pobVyysv5tCDiui7o8PLy8u9/2V4u13/7j5/+xz/96cvL86WY01LNOC3uDh7TC45o5IyBmmKURCzzYUbF8W1jHAAIkQSPWjBBCkQwsQeJWBILUzCt11pVlaRHkTFucTRkRAErUBospoff80yHbhp+CDKmxbTDC9H4QEbqo2B2tBRHtT2a1Dd256jgb36Y5+OH13yDuaEDHojpEiEMqEZkda01AOl6bbdg2u12/tMuz8hbD9wYMLpMB/N64OHqfm5U70TAQSjBQkU7xyYiAhIjILIg5Uw54GruArAGFQY5Ojw8nLMsZuZacUciWhkZAbSV11e9XjfAxLIuyyKpn3OzUYKFNQkwcXV319qsubsLcc75vJ62WgRpkaSA1lozyBmXvL5++YWw82XIyqucAAhIUpJaa3EXrIyMGU/fvT+dTvv+6fPnz5ftNbPEk2HALMkcjRE5uYNqLFoF0wqQHMGaam2u5uTeSACJaF2X61ZzzqfTKRF5kmutze5q/91l4O2JTHk8PiBzJ+jezg5V7SEtROS9IBjpBrLbjZvUrCVmR6PYsoyIAA7kfV8thpG/PwHoHIEB8wZnZpEo2fVJeyIiZiRxhAqtN1iAERgdAXqU7Af+s5uIHpRudnvMzOmmm0elmJc8nez/5OVvIKMHK3S8b1Mp4g06iB9hdGzWdf27P/z+sr3+yx//+OX5KzE+8Lma76Vd940ASyla6rquTIDmnpILqapG8keIWSSlAH5BIzA1cGaKoouZIRuRIToyRKkAiJylqdcGW9Vtt714TuAOBbTZ9vXLl5//8tPl5TUiaFCzWpjQo+4LxkwimBiZ+fG85JzXGLkVYsagFaxrIqKUEiB++PAh5/x8ed330r7uP/3yGQYRlwKi2/unJ0R6eXm5XC7eGrTqlYWJ3cH165cvj0/nx8ffLsvS6t6uG5B1vgiHLoQOQLelzW8eFoyC/fFPYXWjoozHGaHADR/wTR5zfADwa73BN8Z26l3/YGSGYwE9DrH0Q1ltSsgox/dr6AfRW+sAEIlZsEdxpi113gTC+4SVQlPGqdjEo/x6+/4uXDwILR5JYt60Uuf9nLZ9Xrgd7t6b90+j1J+FIRJijJgCIFGQCanWlGSVJeW811JNc6KcpWuPB5CC3dUdACm2rSsgOMTsTFgGMtJYZg/WCfVrq1VLKe6wldaapYVKaZfLlZnl9Fhr3fd9q83MkEgkU5J1zXXbW2sp88NpRcTXy/Xy/PL1tWor7l7WrKr4+LBmicVgrq3WvbXWWjFrKaWUUg3oaQN3d4Tg24BAFgAA9E3gGp1+Q6LeLIpskIicyN0buhL8cn3xzKe8PO/7E8LDx/fHqblo7ET7q5USfar5jAJnCUMgwck6J1I09n3IRV8tGE9Nbc4Kmt9nUGAOgxDmGJbM96D32Ug0NxpqcrDXaA4id6KlFkQPMKIdftukRO87wO9/i4hjsngGSwAQm5OO/qJ7ilF26W87tPrfxFrH30zJn3+dcdFU88iVjgCoQ0gGgTs95G5xFXcZ3NGhCAwglqoGdmg6165XQZA1Togc3EG/aSAQYEoJIuqK3xCllLSWOLiDwaBEVG0TTRr0BhAjQymhZB1Lq8ft4DdN4Pm6pe+HuzDxZjgcsB1s6KGY6rMnq6p8Dzkbkk3gzkwkDACtxIQDS07MXEi7yFK/rfGpAK8SUWlVVW+VeICYAW3NSimRdTNClsTMq8iOuEgyIHff1dAMzQWJCRyFmUV4Jc0EK/tKsJCehSR8gu8n0nzm7z+++5vffPr9bz78H//jn/7l369XvZIICYH75fJarldIfN02b9aqORIJUfQQzBHB1dRbRLFLSjmvj+eHd+/etVJrrSXWkakFczcRFSiIiqSLQW4mbbNiTYXXp48//PZfvl6f1dL58V9frv/0//3nvC6rptaaKrjbp/fvP/7ut48r+/6KbWPsCBZGAs6YFmQxXh8fH095SUmWsNfumYkJihUAEs5q9fLyGnWNdx+e3n/8IOvp8/PldVNauLhsz9daWnMwVBI27AB0AmOiKLkJxXAUYEqZcN/brfkA6u5oCkQiEjvZcgo+QwhWDut77SszmWt0gbQ19Nl1JxpVXkTMORORV9WxkzNSR3SYHHF0YFLh8QplnBaEmf2AO+3iF1HsrXrdlcfMCP9qMDqPOTUF/rrrfeObp7Zy6pts3D0aLQcPcRfKwLTj30TPR9N2tH3eA3Q9fmMPi4dp62o1QRFAjlBNW2s8JhVS4iVnYiYkRhIkImIiIVklMxOTC1Ja5ZTOqhXME/OS0rIkcti2UrdLJ3xzIDdBZHBGXIVPOa3Lgoi11r2U0iqhLzkHn+ddWc0BzMGcETMLEiQhQN627XK51H3Hsz0tWd1qLde91KoOxJyA+Hr9iohoSoxZltPDIozn8/l5T5Ko1nfLKYtItRb1e2FyIGJpIfYA5k4OOvpL1hq4QkXFtqT0cF4TC9K2nJbTeZVEW6/c3QWGOOIvGN0zv38peNR/b0Jw2AI4RjRjRdYbMbgd8yiihMiUENwJ8UYxQX4QpPlm7yU+BPDjd2tgJZu11tCtqWGKnUigyGQEgKbmB27tNzJ/c3xvqzZ3bYqjPM8fprOHaNQc/mr3SLnjO+M10SXz4PH+lFIMV/NYYhZvEMG/+f1v/st//YfL9vr8utfme80OUErRupftqqpM0MzN7HQ6qVn8z8CBiTmAT8BEDkYtboL/35T9V7PsSpYmiC3h7gAitjjiapE6s7KyqqwVh02akU/9SJvfSCN/AN/5QDMae3qGojkz1dM1JbMyq1PcvOqILQKAu6+1+LDcEYh9bnYPYdeu7RMBIAD3pcW3CE1NxSTFZOATXLRaRalGBGKB+bRUgkcQKXm+GplZQHOe4dXrt3ePD4njOB5KlbmsDJbioCAAgESBOAT2cpQYYAh4mOJhTERgolUKql0fppTS4XAw4pQSM79+/bqu+XGZ53lZVx/2o8iBiGLgF89u5/k0z/OyLJxCYKLAqoUp1lLyshDoYRzGmBCMuwlBBm0aYU9Zq11kdJ/s736/+lcXGS3qfYD7je5D7duew3ceWw5kV1DtD7T/1p+MfZDGzkHa0157GMTWGeWZQ6/GF/GuJCdFEFHoJaZN1xgZCBjCBb4gQ4Pce7Ise0reP8n+hM3i2v9zE+DftRIXTLHXEfjOBAuPNqGhKBCDpzdyzeM4rmvOpYSQbm4OcYwhBIMKQBsGp6kCSFVEtvlULUUVYeZIaBAAgYgVoHa73MxArYpWlbVUouD2rSmWUue8EgaVJec8z/O8FlWlwClKjHHN4vw7GBkKiD4+5sfHgohIwWt/oRc1xBg5EGqMkZd5XtdVpexTumbmqRdE1N4NRYiGwIZGSF2tk4HhGTu3mqGZInz6/e+998lHv/zlL3/71R+ur24Ph8PNNDz/9CNvTPV4H5iBqVQfTuZuAfkPAZi26omNWdrIBAIERJXixSAeR9e2ldBroy4cJKdzu5xt7p9vgbPtFczMyfyCerQVJbrN38hMzrgyHk0L2NwfaBCv5ghjfvoF3W4Jw3eOPV80wkaU3m24ZwREJKQnPYR2zud/BxDoE87qirT90NYOurOOXD8Hol2LR5ck+/hmWwH/Wjr6bVNsvfJz54M6JHSvN+jBl71kZCTdsXHLyG0MvNtL7ZBcuiFqwl4m7syCNiP1ophteyTrdrPufO7Nm0UfS+R2mFsnIqrmvZF8Dm8BADBHoqra3IC+K2qKQEa1UZkn3EWEQu/K7KEzn2pMLt4BCZAa6qkhYuJA0OTvuq4ze1KTTSQyTjytx6uUC6VQxWrl4TDNaznlVcwAWRHQFIqEoJFoYEwMiSCBJgI0WNbTzTAo8VrnIY3Xn7//7BA+fP7F/+NvZ9BTYgA75VVLeVSKhwOlMNQqzDHEKAQKUhRDlMcVzGStGcyiAhWEBTTgcRwXn/W7PK6qsERLKYSwDCcWjbUS8gAxGEI1EAWAcDjkEN/O8zThSShXuB7G0yqRI3AMCNcvPrh+/lLmt6rAqkjADK2Cj1nUcllXQ0KSYgBliJpSTJEhERiDqkkSjVJsmTMzDkNa82NIo5mUUnIV4jilqRi/fZjNrGUVwBAYUBEQTQg4MhOaWvUymw1OtpG6toZWUkFitSpazNrEalTTWqxLEBcWZoYqqMahDxRqFqpqtwYale4yLW78XuS7tsRz7fM8taHbOctwiLY79pLCJaKZ4W5iBOIFK22C6Ql//TEBtJ18lqqXv35O1DsmpGOaNevHS7sDM1sXEbYLPG2/5esUQ9zLR+wacYtzq6r1r7x8xnqflS+RtfObzuDQYEhjjFGKOPa3qBKgGnjJMFREIUghUuQRTEthyaupoNUxTMfDwY42zzMajMMUrpCZGcnhgggwIJFpLTWXXHKGNmSZoGoV0SxSFFTJwAxFRUqVUgmQUImYUzTR2eY6r7PCIVqKaQh8SEkMwFABVbVG7HE0vro+MtKynKRmAjyMEx2mNA5AUGs1wpBiETUgZiLFGpWVqmgIASEws6jWXCzXilZAgfDm6nqNK6U0TYOj7+aHE6ionltD7TKHvKefPeU0SjBAq53e9m7bFn0gRLZz8wls5h0AOFg6mJK1ngkjRAoAZfcAm+voxckXUQMfh9SYjAlgG/0sIkrB7ZtuWZorWxHZg5eeCZ53YzY2r5Wot77tmAt2dvwTtjLb5mtd+IT7tX1y7RNfdDtn62ncLGkXF1WWFy9v/uLPfm4mv/yn375+81Ckikgd+HRapGSiYFVKrYyEBjnntixoxIhoKsVr20TdGwIDqFo6lxGYjxBSkCoNQDoQhpLz6/k0372+fx2nZGDFJJPQ6XQapuPLTz4cpsOru3ucF04RCNUCoCEaIxGDo7Qch0BkVpZZVxA1k8AcYzxMQ0rj9dUVAKy5nk4Pv/3Nb+Z5Zo6Hw7GUsswzILJqDCEwx5Tef/l8OZ0eTqe2X4Sn0wnMDtMYmaVULzxh4lJXRjJTMSMEpA3aEJ6gLuOuvOKJBIZmDp5pgAG9l+t8ubuCuM8/fIdDiJcZRT8Z1Y3gS2HODIhtKXdicx/XO5ss/Z8hBGQ25xBQZkamjrn9XRakwH6+WVNk7kN9d7zRNpWx527oprb11Pcm2DfjCt7x/b6Tv1pY8JIFEBGQ2xQhPwiHaXz27NnD/aOuImpsTAwhYIw8JFJEAWhZXLOqhuhJRiyiLIaoBciKqAIzEiQTn4fbBmtpValuowuFGBUFQIEQuKrc389rzuu65lwVLAQTKVnhbp7HmFJKa7Xl7tGrcykMpS7IxK6zhpTGIU3jMI2EQKYhEJiHlJuMCv7uneoEjLQN0vQq4Q0s3VPKjujWGgeIDACZOPD3f/rjl++/d3j27K/++q9fvHjx8ccff/bZZy9+9CMndBB1Wam947RtlolLUZ80LiKJWli2xdUdTMtD2OSQSNwovG3ZRVTaUzIAYFQRzlEwgi0X3cmoE+QWc3G6AwA4h2F30+96RQt1GiNzKCcAz0a2Us9zQOfM4HrxkPsTLoZE744tZb2l4xpH6NNYxp7gbec9+iUNUtFtwvMjXXCW9Y5fM3AI383x2zy4zYY0s32INvhzeFWbXyMiWqvGlsEzz++dd333kh5dQETEhpXfO2g3PxURkQnACM4M788hpqDnIU7qSJJ2hu71GSkqYFDNsIdTN4ewbn1HqlpUyM5pjfYMzDlnEUGf+lwrtCCKIoZNMhOytLEhtEXI2osjS2lIOZ5cFtNSCoXgMGtGSLuSXEb0ijaIwcM5VgU6IBIAILDHp8cY0zDRLIF4GtPVYSQiAyQSjTxOEyKKlNrCGv7iirVyoEQYCEBNpYhaQJkijykYElkpdQ48/vCj559/8Pz3669+859+xwjHIT+/PtpUB6bE44tnN6WUFMc0jWKaS6mmYvX119lxkBFxGCYf6jCOY4pxXfUU8j3z6TRLfQRBYw4HAqmkMAa8TokxCARN4fW6PKxZQCCgqqYwDIShAKSEAKKFmKfjwUwe7u4PLENMgarluixLEVTkU5aHU34QHoZ7ZgoEKTCSBcRpDIdpGAKPQ7g6jCkFME0JieI4DtLCdW3y9WGahqtn98tvWRDAHHHTaw8QkbSmwCmEEIKXTYlIhbMWNDMAMrOWUnmiGk23Gdnk6QIHvwTbchv29ADoYw+23utGKmaOid/O6zUXzhoiYr1TfOOjWqv2jOJZmnyXF4ffFXmBd7TsWVTtRNL+ZPsub3D/VO3h3ZBVEZGcMx7GTSASUVHFXmvX3l3PgF3YfWbadQ/u4Tr8TBHB7oU69kETc9VlBYK3SfhTARIGIwtIKcSZSFW0SlFjIgUkA0aqsCIFASaKISJjTAEkoOQSGJlgjGE4pufXBwROMWqUVoWjaiIitaiZ99uAB+0IAIqLF+2TWovWWqVUBgtIwzCsJYuCiaLa1eEQiB7vHpdleVjmaZrG6ThNQ0ojhQQAO1AW9Q6rNc9SFzAZOeAQgRu2+5AChxCG8bTMYoQUVNA7VRGxEgMyGtSc54f7WgpYwWE4ENk4IOLxeGRmr4gupaQQRXErwThTyDsq84I2NrSVbub1bMDZzu7WYctwOLJj05HwlNi0tQgygtLTn0NodoML5HOrnqqaAQITBqbIzOCuJVJAMqxNcQioCvfn3F5nzx3vfrLRrQM2YmfDFu6BLWq4cZPXpO35a1tVZ7Fz5fNmKCBiKdm+y4DYzLKLYBMisSHa+x88/7M///n17c0fvnr19Tdvvvr61bffvv2qfo1qMQylFKmZwgAApooAMQQi4kjMXGuVkmub1Ume9/DuCcMQPI5DhoiKDErMPIQwBF4zSC6ZTANSSkNEC2GkIKUy2sB0nEYzizFyDCIi0HDLCRRACQ1MVaqsZVVVETMZYrq9vb06TLfXN4g4pkgUar3XKqe390teP/rk+yGEcRxFhJjN7DAOwDQM8fnN9d3NgVAIg5vXd4HrUo/TIQVOKTy7vgkxmYh3hBi4B4ygG+wFEMBGqRsBACCIgjXwOd9wgBYA1j6OzAnaaZGI0Hbp5E1iv5N1QGhztC9orP8EXNI/qsKu2/As3nv0wUvyaHetVw0CtOCjdQgoNx8DcRFHLDyja7itaBe2VgBARzXccfT5D9uppJ3KIKIdrfYxFXCpdzZlt3+pJ2prnzPfbmhmaFBrZSRmjyDX6+vjz372k6+++qqUUoogSa2qNUPiELE0iOnzowpCMPTaOzFgQxVTrGTKxsknv5tHD8ynH5cihlCqEAYMwbIAIYW4nE6n1YdwmLqRiSQEYKoGuYrACgAgFRFT5BgCEoRAQ+QxxeNhPBwO0zSN4wCmIBXJBh0AQMQHs5nXsvlRTUlBe68WGXV/7DzZAjuQGBIjoiIQUxgSpkAffvQv/vX4vZ/95Pr2Jo0jpARE0KoyjREdGWbz3q1rZUQ0VTBBUFX3I6z5e9ZjE462jQimfcQxYq/m66sPfS9k283WWw49kajnqN9+bidTqxnZpg46PYNKg0x3P6X/lvccNsrc/AJqleN7E8jesYjOTApAHd8LdqlCU/XwrV3aTgBgembSjS+63sG9waMdC20v+fu15zO158CsO4ob5z4xnzxxsU+tIWJvFGzRLKLdGFwPG3iuc3vJ/WFm1OeqbU8cNOxzlxQYLgc3tYXort2e51VVci6lOGg3IgKQqooo9ab9ru+7GjZCYDAQkVYuDiDmAAdghAYCqMzBR1awoe8OM4udi3pLqS1nyoEQoAMkcGDVKiLIFEIIKYppVeVt4AzRft3ZGkJpQmTiSOyobaaAgZksMIFgCGmYxsNhqeUxEgaEQMzoZ9GBSLVaKVALAXIkjsn3UurJf9RdNSJlgkDIiPPpgUO4vblF4lNeifTZzfHP//Tjt9/8ri7w7Iq//+kLtps8P46BGSWgjKMcb0gU1koUYgjHH78caq3rWsyMKUqppRRmCCQSsKQxH3FZQinFoQW/JCUKA8vzw/HF9a1WfTuvc10qp8f1vkKOBFjyszhdpymfljuyWkpZZ4tMsq6PZX54e/PimJdHAZU1L2s2jGE4xhBSAlzGnJUAYUwENC+PUtfDkPL69e3N9ZTiOPDts+sppWEgZqNDyaIiJSYOonPJa8mRBwAyUzQDAjPxJjFEG2IYhjgdhsOQAmFV0cZpZG3m9VnborWS4BBCCFxKm67KzK2Gm3ahYj1nq0SkS8Y9xhogtMzYFqF5kotovCpqwRAxcpAYN3eUDNYu6YI/E1fagN46ZyHu4mmXyfZNreJFwOkcUd6SeE8u2STAE3Vetp5h95cBaq0+01xEtni5quLuJ5hZSDejFrpZgNiK+5nZ7BxmOy9yF6AxxpaY7Q/DzBxCK8fV8wA9P2cIESoUqWBiooiBAAMREiEjMioqkHGgMI7B0KSaaEQ0LQwxpUhEALbOp1przcVxn125AiEzDyFyDIa0llxK8Uygr7O3keQhjzEMw4CBH06PbFZEQe14PNwer19T+ParUsRyzrXozKdhOlxfX0/jMaSAiNXUTEJgA3Gc3uNxoopuYSOagLUGCbBpGEXNMAqZiBVQ1cqIFVWtrstpeTyVdWbTUEsJvDBX02kcFRskOoKO05RFyOAdEjoP5dsv8o4CbZvb6Rvk+yLSpiB2OAxNw4j98H1uioZJTcGgOzwkVkB4aKH8s5PWGEeaQ+hqgIgIAxFUlXouxaEm8IErqm6elZk3ujAF6R8/cQRE6o5lFNp4AXjyJNv77iO721ohom/cE9sXd1lBvLSGtxXeGwqucbZ463YHRAyBlvkhxOHjjz+8uX3+2Zv7P3z16re/++KX//DrmsvjwymE9ObNXVkzqK0LBWpPGEKIiRFxdeBlrSGEFELDpG2VO8BGZqboz5BFgAgiQ0A0Bg18NcYPXj77+IPb2+MYIta5gPzy2zdvHu7eTNM0pmhmYpqGZCBmolrB7RPXznUlxBQDDYGIDuP48uWLF8+eX19f5VqGEJk5p0EPMk1TSokInj27maZhGKL3sAxjVNUUCIc4Bl4JgezqONzc3AwBH96ewhBCpNvr4/NnN4BW1iXdXgEIypYZbFVkYDtgie5TbWu+yUzc7EgAE3X7RxzUqu8j7IyfPTfRrjgFLt3DZkQZKGID/8BezqaqKmZKprijCkQEZNrAPJ2XocWXWw8nIiA5vEHEaHCGKwSANIysqqLSMB3MsVWY+Qw6fM7MPz32LL8R82bBI1KMQXdJSOy5vv0n+/WxvuBPvgpd1O9/FADUtFZFJo5sUksp6frZ93/4g7/6q7/y0k0z5hATBkbJeYEweELJkQC91amCwyWipmBmzBzI03BsjoRkpoampmq5Ss05xmHJOUZM4JkDAsRStagaIAbmZg8zBgai4zSt63KaZzAZYxqG6DAkL99/ETnEyCnQFMM0Ta2xC9BM1ZCZhyGqcsMhd7AraMpRXFoiIKKAMZL1cICLxlqFiIzN7WwgIAQLdPP8GZjCcXp5ewUxQikQA9R63hci4AiqgVqnGvrsUARE5I6ib7WnxTrLaJ8Rgf0TtDPYsvfDwC7j574AIaFtWT3byhM3vtiSui7AxSMaPXRCIbT76ZmXccP0UqMQTLWh2HQ7beMvR/FojYieI+xu8FkO+INxoB6w4xC85g1FfH02xj8TuULLLHQBwg205Rza2N7x/KM7/tqr2s2MNOt1SfDU5Wt8nZKZ7L9yk6k1/Hgdl9tt4zguopvw8pJlXykRoTR4CtqfsFZlBqLAbMxVFYgCARMwAplaTKF3zlit1SuCOAbr4Io+dsLMPHQbMQJALm2KV60VMfrDoOcaAQBcHqKIZGi5aateYtdCp2Maaq1qFRFdWJxOp2EYlmUxLz0pxRvhmKOv+DhOtZZlWQAgDee5lAAaUgwhbCzX1i6lksWqGIGqBoMUk9q6JZQvTH/EUooSjAFBpZrGIYU0UlxUSorHZ9dXpaxv7x8UIA7j+jjXdWHTcZhCigrmBvEwDKjGzMMwmCkAMfOaT4cxHqYrZACpHMLz2xtErKo/+/yDX73kX/+DvLie3r+9Ri2PVpm05jxECFRQ5ykMU0rAFDiNkQBSraHWiooAyWxQVVArxQoDTSNzz17Wep2uH1+/pjWPzDlnCjEehzwvGQCpQoVjgPeP6fkwPYvTQ13/kOjV6fHlzeH0+CCPb8Lt9e0xkVYDWU6PWoWQjchMSpHT6cTxWIoWybXWJVAgO0zHaUhDCutWscnMAAEAAElEQVSynE6nq0MCgFOMV4eROUR4OBxvN38+pVRzee/DW63/JMXENJiFJs6RCRo2Qi2VyadyM/OyroEcp75wikOa8tBs7jVnDqGUkmvxMi3yAS+IHnFwy8w/YWbsRQReY+aNZO5AImKzgGptHS+ItWUOz6DnGxvnnGVMRFRKQTVi7vEyhB2OLjMDca7VumvnImQTFlvLk3Z4jya7uym5l0FbtOiJppc+lPnJJ15dxsxMHY0jUIxxnufrw7DZ9y5Ycs4S0dnc163dR5SZc86B0hCTv7tXc0zTdHfnc8yVmYeUwM7VDVsYaxN8oiBgWGvO2YZhTEOKAdUih5wzmgYOgchEQStAVLCqGlFD4jAkcjphXh9mBICG2KkP88NymnPOUnJKaRzHafDcnYtXuj5eGQIFLqVoFUQMRGutQ/SAy+oOpCuFIURo4tFAk5QcwvD85vbFze2bu2/v7u7u7+91hdOyPjw8HA/XV1dXKaWUUhwiAEhVt3/NLE0BgwfjVEzFm0mQWWFes2oxY0aCQAQMajWX+fHh9ddfne4fQKsBpsMEZqY1xCSqITAQer7lzZs3AtM2h/BMGK7hcA/41uiHI1oVM8/H1lozwOC4fKpKBMwcOHoXRJtvgUFEEHleFo6TaXUlRUQxMAcUw6rCHCkw5oJIXUCVWuswDESk6g4SAEDOOVeJMap61Ykb661zHq0CIQaPy14UyIGpiLhy3DYXd6kM7QWcOxM2bIsALeWCqrArD3c9jRtmJOJFrm+zdDdO3AxoAGCmvRralJTXdGwPtp1vJjFy1aLZpjHF918Mw3B9ff389vnjw0MKMaXhn3716/fff8leO2MyDAMzOvxVKcVMx8MYQri+vp6myXyWt7ZM0RTTsq6Apgg5syoEjgwMivez5Lq+//y9P/3pjz//5L33nl+ByuvXbxH07X//Py6PJ5OCZschAVFIPuVZVcVAGMFMtBYKUURSCMfj4fp4NY5jSmmIMRDG6ZBzvr9bOIT54ZHQcqnTNPniiMg4jsysVYiAEU7rcn08DF65kyKhvXx+GymeTqePP/zg5uooUkFqujpArSDFQ1pnp8wQrA2QtJ6G3VgA9919zRPzwJZsG+oaxIO5W47iiRSF3pOC7zTQ+nwOUDWRhvFIBHsY+u6yEhGFAKrq1Ul9jrzWythCYwHJALx/iWHnYnlNnVqD/iutNlhFCLiqqp5VBqiniZqzF0LwVPiO/Jq16uf3uhjoKpJgZ9Rux6Z09q7dds/NLNbele0/vRnZusN5osApMCKCGoaQiKHkT7/32f/6f/Ov/0//x//zNE3EQURyXqbpuZT1OF0LcS7LcirVIIQQAyEaiCpiFTOrQUwDDUhCVksFVZFiZg4GQURiKDn7fokCsusuGo+HkTIAALVUR1X1pQA2DpggMIYUKKVwfTweDodpTETEaERwGNI0JWYqpQQmZGJgETElZAgURSTbuXag6WxCAKimJqJmXiPqTiwRMbACDMOgbT4qrSXfHt7nOEDNkCKYQlmBGUwNDJFrqQAQEGGdfUO3evVGwQCb1ArkCcwWjQXPFQU2NcMzKG6bhNmCLU3oibgoICJS7aO8TKAn6sEMyUQqAnAMgKi1IoBjlZBnJbHdC9xtcJbZBR08vllzbm1BXbBCk/Z71gTskDNnr6xrwPavnJ3aa615XbcgKXbra3+tqnIflbfpHTh7jBV6RN5l/pPG2j2XnQm+zcpu1p3zUwgMvXnHL8k5+8jTfdthKSX09IVuHTjuAW49hPuAzZMw8Par2juYN6sUEcFMRQBa5SR0KCFkCohuFjvheo0fegc/lvNcUdpQIuK6rkRmwMAuQRrFVL2QyF4OSESGDQvEg892gcnTPXXR/sDsY4sAwEIr/AJU3wkxYbnITrS6vUYJ7mC2ozVeynmPAcAQBAw9SE5BQaqIAMYh0TjEwGYqNWvNUrLPLTUEVEUQ0KziaX90KEwiImBEBjNREDXmCMb+WIokqiwKTIzwybOX3//go3/6m9/ZItfD8fRwx8ZBWa1IqTlXySXGgUIahgGHIFCZ4zhFhsFhxBz4QasQHQxkXdf58SGXjIhp4KHA7fP3NK9a8phiBVseHpUhDXz3m29hhU/fjz/75IMPx+tbHuC9m/9Z8u9o/e3v304RjiQvr2INR9IiPEQQEUFgjBPH6aA8DMPv3iym2ayYAVpMMRwPdDjE0+MyDmNeHh4eX9dyur29PkxYZR4ZAdRMVKXUUusahsH7nonFxMAE1ZAxEXFAs1zKuhIGIgxsBtZHDCmQGJKiK1TmAIhaCnjRRR/x5OyRa40iyS46oDrU4jle60awM7CqiqhssPW1MTB0b9AZJ3EwFgC4zPy1o5QCQLVuw9xEejoQL4QZPJE47/5zf/4TufPuHf7YsTdtbSc09vreX5x3dU37Q1VhhzJ65m5HIeuOKLVCcSWUENht4CapWh63oTh5cvjih8yCy02r+xQwqClZDBhipBiATMUqAnl3ueiyLOtpVpG6ZhMFgEOKUqygmSTP06aUKIRxHGsfd+dF44E4hVhzAeV1XXPOtdaBAqCJqp+fa2XEyByYmTkSh/TBNB2Px+t1XefTuq7r3d3dsiwuqCmSuytxCO5lmRmbRmLgGEzEtKioGZimwGKUiylajKMySKm53FlVKRlqZoOAwCIo1cyQ0Air6mlZlpKJwjgOp3VfUfWUKvZmwbt0cqntWtXSee3VfLqOmVkb3+OnaZ9ED2IAwNI7PLBn3ZHQFDcF5CFONQCQfVU2cws9FNmPAEVtA4sFetkcmZpom0VxafE77ckO0nP/ppuu3C7ZzPr9EullXdyTq3AX98VeQeT/9AyS9pLR7apNmcJlZbhaJQqBsBbVmgHCEMPN8VBfPP/4g/etfnV/94gm18cjAMynNRFFBC/EDYhMEBIH4tvb28MwppTMkACZo9v3UvPIVrVUESZQBmbQUmJMJVAc4ovrw4vb4/Pj4eYwEAADkn3v97///du7B0Yap6GIiWnglCKnNEZGB60Rz+eMo5SKCOM4TsOQIqfIPrFNpNY157ywxJwXEA0EbuWYiUipNSM6Th7P81xrRYIQWzGFObgdweE4Auo0TWNMgAhVoA116yYqQC8K+w55dfYDLxNZ0D2fJxtK5JBFsrcLnbBccrv02tMVItZaUQ2ptTbBZhP7KCO7xNxrOZCL+g5/jF5L1p7cgJoZsxVQgHhOHHszDrQ/QDW7Dmq3BYbeN7QZpiGcR/7suX7PQZe6oPUyPSmH21y7bfWshxH399/u47YydRTfbVNqrX4LBgMkUANDUPvggw8+/fyTP3zx1SHGWo2IHh7uxnFUq4Ct0IOIEEwUxFSrREYIjEgKKAa5atUcLCKiIZuWXAuLqBlHz6YQAiETiCm2rN3hONRaq4pKBdBAQBGGgCEgI6eIiTkNYRrGq6vD4XAwUSIKBIS28T6oguNj+PgiIh/BrQopJexpKzMTU7eBocGyaFEBBRFpRaRIRFRrrWjEGMzSNL18/z0eEjB5d6uqtVBICBvyMyACMGKz1Uspjaigy0lojdjYcF4bKZqnzclaMhCxdW8DgILUcxyB0Lxr7OzwmIBR//XGaec/DHy+LiJqqVty33UMYmsrh05L0Aq5AeA8+MJ1j0dzEGAL6ACcs/FOjRv9bc+DALX7NV6egD1uyH087Bata0S7dRXsjs0g2ZP33vPapE3/P3gFysbs0Hy/FhzpXt+5JEG1umZs3ESGZOjzFvZe38ZOey3ohzuKaGdQrL5SYGbUfXqRrbTAG2Zg42dVBWYy2/KYRKRdFhB5pgW2O1PzvLZxVXuotzOyqrZynYayQB079PzyYGYCgKoV0aAV+l3Af6mAaFFVouZec8AYYxEDtW0gjxPcWRO3yw1UQdSDPttWmZkiKBB4XqhN8EIBy7VWkThOYTmFQISGpJE5BZSsUmYjBquMSFatmCFwCAFoCMyEPreXkMysGgwcZYtdmKFqVQmERmhr/fHnP/jL4Xf3X99jiTfTyxGm+XSPgcQ4l6WcZNYHAAo8hvB4/XyIIaWUmCMTIrKPnLl/nENgIsqlLqVWqSEEJr6O8fp4KDmIDGkcXj8+rCZxGqtIfoDPX8D//p/9+b/88c9exsiPy4D4MdGv3r/9v37931UFmO+uwkfTe88ZseZTXmerYsSAkdNRjR5Oy3h7Wtd1XVczCYzTNF1fHQ+H8fFOUuRl4bs3Uspq+lAKlAIMV2DCSDHGQFJq5YAiJRBKY2ojkEQ8RozRcei3GHwL6MZhKGv2IjHb4vpEzFxUGojVrpIMVN0fQ0TiKFoACJlQkYn3zMLM1NuaPZ24DTJx2RGYa4/xI2LDki5lT6tkrS0bLiEo2q90Lbmf7fPEWNkLoO3xNpbfq/DtR7cz9xdut7JeyUP7HEU/c5ODdIYNOMtEAGBmZvXTiMh2g8vPb91DX9DNX4+wMinsGidUW18Z9sg09MaYJs26WcZIAq0zgaz5AoDoXVLWwUUc6n6AAU1QzETKagxDCnEYhoFbTtI9uBjjMAwcIwB5wYXP77YqUorkAkxgHuUXAAdX0lwK47kUkJEAlIyJ6Ob4bBqPtzfPc86Pj4/3d3c5Z0S8f3hY8+yLG2McpuTVRFdXV4FDDESBzaiIEFFVmOdHphiQVl3NLFKDbIsIFnAICIFJLRJGBDSTkiGlGNgQlrwua2nlRJegGk+o60Lq7vJUcKksdOu6tJ0YVxOQwE1l+wxU/8rMyFNGhtgaqqnDsZAbHNZHOTlbhXAoVTwMf0H8gOrzxBpBRTMEAxVANmwonYyA1qGktCfSrUMW7en/CXPtF2dzBuAyC7RX6m7kbXSOl42Lewa0nX2wBVm2t9vWdv8M5xt2xD8iNanrcnr7+hUTDjG8qbnk+ubVtx5DSUMKiACKaGNgjkmVAnFCMVnqXNCAQvI0qJYseWUtCIBQMXEaBqKQl4wKCypHvDoMz66mwxjGGAMp4nEax48/eP/Nmzfz6fH6+noYOEtNKaYhDsMQmThYIMcyKBgHR4INIQTi7T9TkVKRYEqDtAx1NbMU2JiYOTIxAoGhqdamf8eYLBg7SHjO67qGQLfH60h0fXM1HQYgA63eKdATFp4bvCjWeLrUuygePBW2ZzZpuykN5OMsOR0R2gyJXAFtoux8K1Gli9JMv9BjI7AzOc5ivAes94J9I8gzodoZL7Fx245bRRyjBRCbDmIiYq61tvg2kRuE8s6777genzzD9nNbFGMLEu3duScPbL1kGnZqa1OmTxa/KcFd2qp5yQYG9uGHH/785z//6stv/NtA/PDwMMQkpfoDeBVPVaOmqkAM2UDR64GpqpjYFu8UBEf88GsB3B9nAAKfvEcUYhQWQEfzRDMC4hA4BNIqhBoTp8DDEMaBmQAlj8PEzETAhB7vQFBkVBAVqWLioGVEBh14ua+YgpFCUWnRLTMjpFqB0OGhzExUKLC26AUWlUMcn798CSkCAJgCoiGJCoFRCPgOqBIA+Ky1ix1vxAdSlZm9iBqpbY+BIDhaTKve3GwVbCaNegQPqYO+qvmMe9tl5PaE5PFZp/atmZBai4xtbLClhQAcVxABAHpwn58wBfb+4Q1Jzc6ey/bT2/NAV3x7Gb4J9u1p+yQqIiKt51s9YZ8nlpj/380Ge3pcTDLbM/jlTQjxIiXWTkPdXM1zD2HnzGYJ0W7XN/WDiK0+01oT6MY8tdYN9cF2LcgeIRCRqgId0eWcezyD0rYqAsAGg+O9upv2ZY5dwrXnJCIkQGRENcKt9r6ass+n6i9sJtoReX3h2nN26GQiqqWWUgGVe0MgGcUYgUwEmHgzCBqQ3FY07P+pbfoAsaE8tQYWMzUrUo2YGQXMEHKtReqYBmxhlIqmIcIwxmpVV0GQwGjoNSpqhJFpiGFK5A/JYDFGqBXAwK1JIEMA06qKokZCQPlheX714oOX9Pqb+8e79eMPP2RJgaKURW0pNZeyrmtZl5xnWURmnd0kMkMyYvZeKco5O/5YChSHwxCufHzqNY5SCxEOx6sKlh8VY0op/ePf/gMt8OPPXvzppx/+6P3bFzFKhAGRn714f4p3f/jtX/7Vb8rDI0g+TtccEDWVPAIoIgMQpwMQ3czj9w4vvNa/lNXMIuMwDCmFenVLjKAH/ez5vDze3d0FhKuJiBEAxhSvDuNSSBa9Ohybh49ITEQQGIdIQ+IUiGhy1Fk/qiqoIZEagqEaqoAoVPEQqiIHAABRxY4TgGhEyKzQIBLNDAiZY63nWTp6zvifmXYTAU+khnacTMdBtu61aq/L2wK38F3HE638nzlhL9z/l9/n/9+rVNXIS5U2lW+b6LB3HFT8LusZO5Qi7Bntj1g8m53BIRF5W2ULW2JH/vBoE7S8YAvqu2NWSylIPMSU4piGIbKyjCEOMUQkEDVDdwiDKvKWBOiCmELO2cwYeRgGESFAEyDIFNhx20XE673NyCx7hC+E4CVtIFo1B249ZsMwXB2Ptzc364tnfqYnGHNdAQC8vTnGEHw0i3tuqgjBwKNQoCJGyBCJBRxF2kD15mpEA328Wg1RlMAOKYUQSimolZmNqaoVqVVCtQwQn2y09bqaTU06vW/yH1SJrS3MDmOt7VuHBzdFQ2sATi0WeZ6wQkSmrf3Ed9gQEdmsIiJhUETY9L6bBLoxjQfTpdaOgitNB0lLRBrxWV8QEXb1Fi7b7LeX2tTB9qEfIfDlC7bGjz2JwoWevujM31+y/+fuJ8K73/rdtj82LQwAMUQzq1VNNYRgavf393/4/e9+80+/e/XNVzWXFOj6MM7LqeT67NkzM0ZkJkwpHqcxsJlUZp4f7zVDrqZVGMOJo5lpUaPeXc+YAo+HiSjMBloVQUGFCSIzE6AVE2MKQvDi+W1K4e3rb29ubt774P0JUMAIsOYVCLSieyFM5CV5nuA4byRAiNFhw6qU07I+PNx5zRGixRhSSuOYkrfaNvBMhxdmM6NARAigXlb64uVzQLy+PqYhAjgCRGseNNuD158HY26b7tIM/niXte7KOjZIDFXlGN1y8H1qVlOtRmePaAvT7/G0cMuzAQKg1vIu/VgzhXEjDOs5lka3fViFeiD7wnwkgt4R+xSNsA01QQ+g9Gm3fucAFyuzPxC31t9zUMO/2kKE+8/3N9m+3f5+cn+/antO6Eb5pgigQ4bsXxJi/MlPfvK3f/P3b9/eRQ5FC3NE5Fqrw6vSeT4ZBiR3aTw7IOC7ZaoaWADAG8scXYagVWkSkBGCebddGAzNTOojMUVyIICgCEQUiZlHRIxMQ0pD4BDZSyVjZCZyUD0C3MK8qioOu2yoYqgKgIDsD4zcLHYjdKmqPnvQy516XSQQStaR2QgDM1FcpXIM0zQBIqgYInJEbC/L1ikYzi4QGtg2N9WlWRsB7u18ETzgCs0pRxNz1BkiVDMEM3FOw1ac5bnH1uzn5Kp4QecNbw+AYmAk3akb7HUdHcy0m1z9hMYguymC/v+wj8Rt4SBsRvyl0urzAO3p4SaKthnLjby3lhzYuhZ3R+e2c6SpswPZLq66fb6n/H5bSinpGUtmE0G4P3/PL7AFHEFg93IXDmFTuj2I25L4O8VTSom9OWQrS9t8RU+SUgwbD3tsA7rSby4fQK0aQsDguA/Q+0m8iOv8DrbThSEE7xR1IbnlQbb1cgPdzGCHab4JBd9jREQCA+1e5UYoLSCkCi46RATJRCSEhOjdiSjao8VVPE9P1sIMfjfqXUywewtfDQGLTIpgQMjRrBTRoDIFQJIqec3zus5aC4ECVgCOTMgRPaWKGCOmAIeBEUMiQ9DICdBMKwCoA0gAeuipqgYhUx3HQau9/+KDX/7yD9988+aDlx+J4jheZQbieIWqqiXXdS3rIjmXEh8QWFVrVVVFVKKAwMfDNaBGwmGM0zTG6B1xZZjtblnSceLEb+7vlTiN0zdfv77/Wj55Bn/22fdfpAnnU6RhGGAKwOv6fEj/6s9/8Y+//M1pARAlYpHivTZgDj7JiWUch0Oi6QiqodZUSvE39YUN1ykSE1GIVEp5+/ZtrfVwOBRm6/mfyJg4HA7jaV1VCnEMHIhgYBxSmBKHwEqtnbWb0SCGpKCubv0/BRErRQyVRxd8pi3kRQYIIDFGzygCkmgXGUySBQByziUFADARQNvKqkMI1nMaG7dbT0roLtPehn+r8k7vduo9/62q4kL7HV9xE0nwTiZhf8Mnn+DOlt0/4Z5J97JG9iO8rYHsiQgP0aWK8zJ74X49l5pvPYTWjbC95NoElAuZLU+IAGDnwBjtSpJgM9eY0OHwW9YJXGOmwGZRRESLqapgKTQxgKmACFZK3rPGoBiRGGkIw5ASA5ooATLxOA7bOohYVR+up6WUUioixhCO01UKQworIz3U1Yu0stQ1Z5fgrsVjjNAlpKo60uOyzkREmBh4HIfjFLeqrSJZRID73GFCIqprNvJdI68XsipSZExpyRWUQohRbF7XWtREx2MiwOvDkKqQKgFOwwjcQn1IpIir1CKmgFk09DoHe8cs23lr5xS3mZiKUdgrwbNVcd4spygnWnSBTOpc1iuEe6KGiNw270SP1ud5IzBRICKvk3WreGMokfOYvlpr8ay+iSqESIoIqm7OOFKo22pPJLntzNMnLLbX6084bv/VRp+IDjfQ8fbOF0IrT/BgYxv+3Ax02+VJnnD0nlX7A8BmbJuZiPcmyHx6WOcl51pKmaZBFcqS58fH2yEQwTDEaRqvjgMTmNaAmFjJQKuVNWuRZsMFjMNEIcQhACEwpXGoCis5/DiYiZkYSNtxNARb5tPz588//fjDX/+n397fv3324jkSDSmpKoCBut+kEDhSVIDAuzZLxa0K6f7+fp7nu4fHN3d3Dw+PIdKzZ7dIwoFDJC+CcnDiKuqlaMBkYNbj0YjmvZEh8fF4BGa3NJEaxNB5izea3ULAex8GzzAVGyl42m0rXWjFoN2yBCKo1VwReIFo7/khbw7s15KBg8FsJNd47TIPcEGIBo73vv+2jQrrE+Q2Yvbn2aw493w2FMrNNd20pAu68/nnn77IBDx5tr25tVlluLPftluJiFfE7E/bDtn1TG53xl5p8oQZwWsCtx3x/xOCEah+8tn3fvGLX/z7f///5UQi5ojKpawhkFcqeutQCAEHRgM1MQPLVUSYkMx75FBVPW7b+hgBDCSGoZnSIIbAzJoAkY99TgMREbYx4N5oAGYMxsxDilMaQiQCFL8zuT5VU1HHrtkDIop5YycyVS1ERAro3mAfMeiTXg3Bi3MoMHpEgck/hBDN1BCmaRrHEXykEwYHrOaemNn2txGP7gJkoK2kopsiqhpDMvPaDsAGbwMEKKYu2pDaa3iUtk+kIPCOM3kKWtsaZ86pPIBdVUUg9roO0AYsSZcMwsy4A5+Dxkqdo/tLeY/lJjY3KvWbED6V/OZBFjMcAiCSSOjLst1hW7qNdLuWPFPsu1p1z/iwG/N+eRD1bpotheY3iGFoFk5TDeTJ2loztGTVecqDiATY6S3EBqpBRFtLqhP6BpnI6WyQgdOmqqo6HgAAYDjXyBGRWHknKmyquhl22+GQANCbLFWtK0bCVt7pY6Vd8Pq+2TnXiNxnBDHR+eERrTnKhEYoxUvVFSA4Ory3mNK5grS9cgjnwDB0u1ariAqRUG1JSUSMyJE4IAUkZaZQtaJ6J4b/LgKCNzWZgnEMbCQip2X98DhxjLWKSCl1FREAZcZaK3IM0S0hQ+TAhGBjwJRSCpERUwg+At3MeoaUENswUF9n1Dkm+uzzj/76r//wq1/98oc//P50HJf5bYNrRQNUHmgMY0gwVKXAIQQAUgUVIOKAAcBblZAZkUCkaC0GQt6jEm2Y0mx6d3pUTFr1D7/54orhn//o53/xg5+9d304BgiEPCGAhkWx1vefvziO/OZRSpE0TEjDspwASKSUkk2ryoK6AkCIiZkpMqaw508iSimVUpZlCQYvr25dm+YhrqWggc9pIArTMHz9zVcAEBhTCoEgEgwRUwzMKBRaCTQRYeCAYFUMDVkBvY1QgRQIiJhIDEw66C6BZ57VkDi6nO4cyhQiLOtG8E0GESG0ZkJEDIG1jY5oweC9hbclA0UEqTcf73Rw15JEvXSFiMgAfB7ad0kO2FmN8K4l8V2+4pNzzP4z9774FrthYWYhBGemBvpS5Ynix12v1B5qDy/fN8aIiCK96AJAHe+xP6eq4mV5AiIr1p1QJjKIMRoqNNgMqSLeHx/ZIjERsxEbs1JZcy7l6ni0KpoLICMxGDjQ2yaFVaHPO0EzrbUuy6KqhKFtnCq7YGGOzGANadPVQ0gxolVVby8kI40RANIwMHNgJDTT2jp+wKE3hAmJsJo3zIpHtcnLKT2iLAaioiXGuBYxUUAtpaynpYipAooESoHQIkWLAWFMURiHaaxDpBgLYK6iBpwCvUMbti8VdsJvMrN3uFnz+fcFzPtt7dqnpSVMz14T9f7P85mehDQfbQyGQl2ZgVmvCEYikmqIRK13gsxqv23zGM+GJrCXXImigSKoR7gZzNi0NeD0t97acza6vci3t4Hg7Z9wBvUNfQBUmyHQb2rdHIfLBMjeMthbxtZnOW7rD70ke+Mj2HF3KcVb5gBUqmrV6+PV9z7/bFnys9sXpdjf/s3fffWHL4EwpVhrMUIOGFJIQ4iJGQEBUqBpDIEIxbRozlVK9RQtpYmIwhCAqagAoRXhFB8f7xSMiMTUUXbNGBBBNOfl6ur48ccfv3pzV9b126+/CSkO4wgATBQCMUFGC4RElLvDbCDYpjw3ofJ4OgHAkkutdRyHq5vrFy9eZKuBicCk5ErAPHS4kVVRG2BKN86GYUK0EOjDDz88HEYgBDPsXcWtTmwrf0MwRa11v7zNpmyhhO+WhwDgY6z3BI+1Wmd/ZxvwYifw6QUKWwDuMkiHPfLS/NI+03ujFQe/dwjBjaLQsTgAxNDMPCjmUEpK4CY7IAJfVjCbMTDCeSI4EamCWYkhbCLXzKBBRXZ49x3pmhnAGZzsiX6xXu2yUbjsuiv39O+HdBSZ3c3P1/r5m02MLQHSpj/1VWroO9c3N7/4xZ/97d/+g5Q6z2uMg/d7iwhRt8LJnSg06svkK+ktO9jCmsacOPgAj67ZQymlVlUP6HSX9XqYisiW9SEK4zgOwxCQVKvUCl7GQDDFgZmNUUTQlJkJSQRLcUFoZqZAClY1iwhDCB0gBAAULxDS2/p7ZQohS6OlGMaiIhUiMZjGw/jixYs0TeA9opsfwsHAHOdzW2Szc1mcbZGOLWRP5+IH7eijiIihuWEIAKgb6frznMu1DLTDmhCDiCJuTon/qJmZ1bM85E06S8cgsDbolfr9L2a9QHcvrTuZiKjmsfeNn/YMAb0lXS+pNCB5UNJq9ZDK3utzkm6c2I+utuDJsTNdzt+1IuT+DLDjIOh+43fqDm2lMWrQsKCcEty9wf5Y0H2lsMVpmnWCvdmx47qGEMwS9Xzglv3sVk5fpi31Lwqx8bmqKomHQGrNWx4AERXBl6m7f9oyCWFbFN8K3KK828u7aN0IfSur3WSo9rZ428luaBJkJ4V3N9QODCOi3WwlALAqJm0UQVtxNS9fITQ04N5d1tZazoYp9FlSRGQIwCSl1FrjmAipSM1ztjS4wl7XYckJsZAiZkSsaYzjMLiDvc1rDgDX05RSYCQCQ9omPDa8AfSKn8b/+ji/naarTz/74LPPh6+/ffXXf/tXf/qLP8l1TgMqFbEqtagqQlAkI41SIyFRmzRF4OVTFtCYkNBqyVqWUjJ6NyVzMKyas2hVeTzdv331sD7CTz766Kef/+gmHSegMbLqKlgy1OPwcl1LJP3B9374u2//4dWrN5+JXh+OCqJsqCSrrUuFmtfHtyIy1NH76LwwiSgigIkYSCm2LMs8z23xL6OJBJhCpDgMw/Dw8BAJY+QhREJl0sAUyZi8Kh9NtFY1K6hW1aQUR+3zMiQvOvKct2r1qIQYOoyPm8DdqWuRuZ5wVsRzTKFZq4DnuuWteX0LRF1GXrvwOMc797bsxfteOoRi70ia/wXHE2Z59wT7L912k4Ob6MKzp0dEvonn3uhOsexo2tarmKxn/J4IPtxx4iZhPdqqIG2EaU/AYncdnR8dyVSDekVKL0BVFVYRUAPROlceCFixqpUqiIYmpUosgYOxmk+3AWCkwEk0A7gb0WLbpUitdVnW+/v7kiWEQC55FHPO8XYaUhqZ3bMFpsAhDGnQagh1WeZ5NjMyelBV1efPb6dpOhxGVCFTcNjn0OHOAREMRWpeqyo70DMAgAE7rjeEwEZoikOIS5ZlPc2PSykCQODTINkCI4U4IAfmcRwLWmWGkGKMhYOqKgJzIBJ4x+41syaN9Yx1dt5ZYm3jNGXLZe+jmL6Hm07amYzUa/HOOtntD+p19mKQyKu3zES2kl0zI4rQEi1bATYReZzIM9iBORIFdDg61D4gHKEVmbi9sUNa3zHmRngbNTY9srNW3+WpPdm3m+xAYnaK+Tuy8f2nv+Num3jZP0+TKuq9aRQCoQkaXV2lm5tnx8PNsmTiOKTx229fv379Fg5kZsQAjIpaJC9FE3NgY2ZDY8KYIh8CiGo1MkLEorFa5RiArBRdcs1FpNoyrxhCYiKidV3ndTlUHkIQFSJSkMPh8Omnn949PJ5OpyNf3b19CwDMHCMzAXqDCdJjEQ9pW0voMREZyDAMqno4HNI4jIcDMyPTOE5Q12EYXPxCKyQT1coxblSXHUvcyHGhFey9D98bxxFAG9CCyN5q9CV+dzfb8Z3gWOe4GGrrgLoQXyJCm7RUPZukfVDbpsWcDs+/2Ji+29949qn8qq1YFLpr5A2RzWCjcDau9oHFjku5fxFENPVOl9b4pNqX6OKFW/70icq4pFt48rdLAtj1pPktXZs/sfeeKIInlN/W83JiU/sbDEDgYmC4gRkBA+AHH3xwc3Nz9+Y+pZGIpFRV1VILgJlxSBGqmeVaAjE4hpU3NitogEhBTDxZImwIPnuWmRlQRWQtxRSAAyAQMhAye41xi0oQhTGmwzilENWq5GIqKcQxDbE1IpqoqFlgZA4ABsoCqorVVMSdTQK8oD9VRUXoWe0W0yL0QQO2n9dNKLWKQsQRAKZpevbsGThiLRIYWCnobTaAJuogtL7IF+7ZWW77aAuX09jzeWdS8EuIyIGMG4c0c70JTxDVC5HbDG+7EH0KACKtE8e1gLsDtZRGQmYNjxd88rQBXgbdumfukETQ4zvbc+5V1RPe32iMvM8YEQDcocWdw7K3Up4YMHsCfsI7tmvE6BqN9vro8pDNptrfynrLhAuK/bGZkdsAIQA1o7DZZ16JAXR+xO1ezc7cyZ39y2xrtPl7rNE3RlXD2ExAkeJ9NeR1X31p3EZUVfeLcLOPYaMxJz6CNujZU21tmZjDJkDPiwtm9LTjf5Ms2wHdvGZmJHfBF61t3r2B1FpTHNuWnNMX5BFZL81qd+i+n27GajeJN5fbFzmDTpAQsZRyOp3u0sqJp/E4TVOuhWhZclbV43gcpnGaJo5DE+5+w3UeYhjSAFrRxEzQ0aycxbHNwkbvMVUbJj7Nbw9Xt//8X/3Ff/Nv//2//W//ejrw59//iCJ4KFRNUIG80tQwgTEKGoAaKDJSAGBGlaLVxMRMImmK3CZDhlDL/Pj4oDER0evXr1998/aQ6Bd/8vPvf/QZllyg0uFQTWMMYYinB2XmGOnnP//5//A3//DVN69ev3qLAQNxSmlKA9RhmaPmtdZa8vr221ODTwTHm/XspRJRLSIicRwiD3kpRHR1Nd3XWRWYOaU0EQmNKaVlmUNIiUOIBAqM1sqAe9FZrdVHuzKwTwUouYK1dJaq1toS3RBb4LAdFMjnKOde5EmegijYQtG74Igq74w8xI4ytNPf1pniCblup9EOSC0Q1zojmhczb5S35/0nevQMom4X5js+ebBdqnAv2vZffefBO4D+vUNYa7UQtxyO9ZLyrVIUO4ioqpoDDu/YdnuAC2Xvf3uFMFHtpYzWDQsksly2q7QV2fd5GAhEFJglBBNVVALUKsoCUWsuK2NASkyMmJeVB1CN7liCmjGgFmVvYHacap3n+f7+cZ7neV4eHh5McRzHIcbACaD38bk3uI3QIWKfIoOwluLUaNVOj48PDw+vX3/9/Nmzly9fHoaEiDHwNE0xoNsuyGRAIlW1EkDkUIpWlVoLAFAMcUgxxoF4LaLGVdd6mksp7g7lnEMIQ2QJUZMNyIlDTAEAllrBjGOIrWq9L/wf2XcR2WC7NwIgImKyy+3Ti6Kjs7iEXgu9nbAnTmwkupXxnAly293dbc/mqXbAJwAghMfH+XrRLHULQaKJGQak1jWIjEpKBnoerLLxxUaKW9B3zykA4GVg+M46bU+1fymALdlzJuxN2z6h+c4y5x/1V3j357bziSjGweNZgWNKLOTNO+H589tvv307DocPP/zw/ffff3yc1/W+iiGRIYjIugpC1YBjijJEAkMEDjTGlDihEZoh8N1jhdqi+fNpvT89rrnmXAVsGIaRQ0hJHDZdAAKoaoy8njKAffTRB+Pd4xdffnl9fY3dl45MSAaiiBaIdV69MxABmHEchxgjkk3TtOYcY1zyiktmZjGttYRAKYUQQq0O1Aml1HVd03jQ7rmt6/rw8ABG0zRNYxKRm5sbDGEzB9Ursftieo7QLRAOLWIN3Q/xpfZSLrs0OQCAQ0IDz5vhrghCVbGD4EufI+LwqbYrJ+mB+x2CaE+DwGWq7WybASAAp3TpZxo1A49aenCHiuHQWf3q9kN+glRhRD4D+zUSLaXAWVWdlZTsAiiws+l1l8HeyQEwO5fVuVPndqDs0FBtl0X8Tp7a/ng3INWR9mB3OQKgqdWSmXmaprev72KMtTSMq1qr9wB6jDLnXEoRq8QAxG2IDAEqVETvBCUz1erZVwzInGqtuZacMxgyUgjBJ7+j1cAhhAgtF8rjOI5pICICEmJTIUBQnee5rDkcUs7ZQEzHHppou2CKqrJheSCyQBuWYITcAs5tFzwZZdYai/wS9WEDVTFwjNEQhmEYhgFqhcDAAKpFKiAmSEAEovCdFYtuSPYHA2gDY80M8Cw5/YdbLILI0Sk3VjJQMEjDwWdztd101xSEOnbR7ucIAJhDI11R2AnG5s44jzh5eJtJCBsNYQfItX477OM9d3L+3AKzpzQO4cySOyLcEzN2dFC3c5iZmGEnNBAbus93HmfV1iXGk0Aq7ETNZlPtuQzAW1jJMxWbau6GkD9Aa771zwPIOgQoUBHUtCpQCOHRlBEwJQxcpORlVSnMnJiqrUBEAWsVZHLzpohVFeRYda05hyFNh4Oq5pwjHSRD0WpCEQdSQGVCsmqmoIBqKIpzEVKCqClIqRUBOKCPLkSyNJJIRmmIWg5Iy8CMNMssJoAWiREDtmniiIasQZGkQq21VAiBQ0jZBDggoxEpFEPJddZTTkNUkxQQwqhVSilkYQzpNC+HwxFAcykB4zTxsixLXqfD0RCUraCVvAjjEJBVGQM5wjCYU5xYrVoYaVnmIUQGW+Z8c5iIx4T57748/a8+fPlYTscxcaX79YTl4WocwxQxjRQSEJgImySywEjTpDVDPEfRiFlMiZgCAYBIERFmJQZUiBlAkNf1T3/0/Wfj4d/9t/+v/+d/8x9//Y+/+cEPf3x7e3v7/CWnCFJKXZUER/zm1BqDicg7n9EMpA7DpFVCmBzkcAhxXVcAeCUv4+EzK6vVZb5bvvmn1wPC//a/+vlPf3AQ+22aUhxHTXEarhEJRCMV1oUl/+jjZ/+7f/6T/8u//Ydvf/N3n7z3L1UlpVDXPKUUxskQrm5vTw93j5lLKWIWIhnAYz6JlM2FiEPkUNVmDkBEuTzWx3U8Hg0EJlvW9fD82R9OX50gf3D9YuRh4gGtiGSwaoFwCnUJc57neXX+UdUsVQVyzdu4ms0Tg7kOqUUuqNSrONA4LesKBitAjKmarvlEUNGqyTJGmDWQgVbUaqyQonfkV0QEJgIUqaKgxMpcRRGVI6lKKWu1owAacUgDERSpRSWGCBVqLcGoaOVQ1dSTAEbsU1zdmwdUADNAFQQhMmIgTkF7OQCIbgao7GZnbTLGDOo2CWP31RPTdqdrgWsmDopYPaTDEABOtU5DKlXHmMaYvPgGUCtKKeswTJG46KqmhJgCKyEamMmyLBqYabAqwjWmMIyAVInsdLp78fxKpKBUHIfTwyOIgkoKzMxg1bQEjknSFKY1Wy6S1QJBBRsYGa3WUuuqIqTCKGZaRDgdK/LjSkUBI+OqlTUFTjGKQSnCWJWAAAMSp1gecxwSmC2n9f40353WV68fXr9+++bt22EYrm+OFEceooHWWhXL8FCmdJ0oishqOqsaKY5D1ozDWO4flmqHwzURP5S6wvLN737/7es3r97evffi5fX1bUpprmualQiGYWBCnWspJRKbWT1lLGswBDUFI1OFWletBhSTiIhWAhsnN5sKs5EEyQBMQgBDjIfririonAqP07Ov73K8uUYaGbjM8xDHtUjdXKBekgCA3MPztQ2LcD2uRSoDxhhZAaVsPpUXe49jADQzQSQgqDWHw5UScgiqmjA+LhmJRSSDACAGMgIpRdUQOREVyGYcLFAciZMoV0GDoKpAVCQDT5wiZEEIa4ZphGGYfv/bN598+PFHnxw0KpQc46EsZDASF4xqsBgBYESeQB6xWVbNsvR3j2HY6/4WO1Wj2Gqecde7qKpADYV8N3B5C3ZsdjYCWK3ZzLylonPb2RTQ6rFsJAJGZsewadSF1DLe1bzii7ysODqYX3ULHFG1liLvv/+8VkiRallurqfIlOdFyxJguBoHkVLXTBqUwnoqxyGhMSqThUAxhCS5rOv6dn5Va53necmrJ5GSWiS7uQqnh/sDxagnyDPjFVEIPGGk+fGNLvOz8cpMxyu4wvdyXq6urjgMimSASKGoVLUQwnhDzW9HY2ZiMFPVuuZcJM/rqahbV60ayuhYC4MxUhSRZZYQ+ebZCzMTU2Y+nU7r+uht8sVAyY4f3BTWlFBr6R1MhEagAETeo4vW5lxB7WVpbl8SgfdlVSFvqNh540AEKohn7xK0uqBkBDABVUDg4H6GSs2E3AMZwmZA1Edjozuge68MCUHZs4WErUZOVM0Ua2/V2zyiVuLTSjXayV5KQGTNv1XTM4qMmaUQpK61Vi2K6mPGKyAhsSMCUmAgVDMFx+e8CIiYgYgB1Cf+4S6ocQ64bAzl5TlbNezGI6pW2zQBUG3pAddftVYA9FlQWzIGAKh1rXUv2lUiqILGabBaP/r0w3/8p19WrUaGo9EKH338wXg4/P4PX5gZqL159c3NzU3OWasa4hhSSikCoQAK1IDMAZiMEUKIgVTlUWYEWHVZNSPyAKyAqKCqPEUiMsbEMYSgIrIuD3n1NsIQghiccqmPrbCFlwfPk9yd7hgppUQEqsrMAYEj5lqLVQ4MhDkvVtUtN8/81H4oglUzhBRiSiOHsOWuU2Qkeni8O9xcc+LD7REiGQiAAlGKXjgqAIJkQXvaGRHZwQ0NzFSk11YAAJsZtla7amK1evUTElGIkZjBGOxcjh8oNGcMuvfVSRTMAJk9ndj9OsQW1SVDyFV2NUGMhClxCEQ7iGlEjMHMIDCUjtdAiIBe2ge1Ngbo+Xw/wkieXi8l674Gm9OWXdybQzEkJ1oP9XiaAVrcxEw9zS5dniNi810R0SdIdZ1SzuPtOsujV863T5SIIidsSJ+lf+71IAG3WSAIIeIG6Inoo2ra4iA5gJejn1jwVivr8RV3M5i55qWUIpK8X9YvZmBVBmuDIkSMyAueW2bAYyrM58kNPQOgpthHNhB6U6O3YbdAV+fslsc02/Iqu4pebRWx1ZvEPD9r1oZsEpIPAFQzh/Rg5Ba622VdmuBzIsY2QLzWauIFzbhFlAFSjNHxrD3PmZeTiLRyL2h8QQ5c4eTvh2QF7+VpJ4TAhP3VQFQVCRRhXddvXn37wbODmR2Px7I8iBkQV9WBeDyMHJOUrDmzVSKItDV54iYutUcmGtmZMbPj8D0+PqaURGSe508//fi//q//D//4j//pl7/6p//wl/8DcXz53vPv//AHn3zy0YsXz47H4/E4EZZXr159++23p4c5L8XMxnG8uroqa5ZqCGaKTARAPg7kMIz3d2/evPr69Tdfvv36y8OAf/qTH/3sxz8jguAA+oYqJqIBW1Zqmqa7ZeZIf/7nf/6Pv//9r3717YsPfvf9H/5gWWep62G6YRtmWcRsvLpOcPR5kmZaVXzedIy8yzYPQ/SMq+ScwbKIvXz2/NdffjUmToH+8R9/m5iuDmPEYaBAwNXYKBtCra1lq5SiagKmqrU4vRHuQBGdqhBhLVIVuIgqmACAgmogCiGhqZiqmOPie2rCLO7SIGbW5s40+rZ9uNRAzb36TV+euWCX6GjcS8yMwYIoGiDWc6vVVpje1O+5Fu5p/mQz0LfDLqsXzt7g7tjpe/tjf+wPatUOkjuMyrY+iONmH2x3UFW3g2DXkC07HLnzqxGhNSAv7XVZwEQN1NTMgjemu2EkIqVAKcRoABo5mOhSCiLEyAA9Nt8FhaoKmNJFZa8p+bi8dV2RaV3XZS2Pc17W8vj4+ObNm6+/+dbMpmm6Ot68fPliGFJe54eHu6VmnwXDCBQTM4mWUoyZI7GYpBAiIwNOw/D+i5cB6fVXv3n16tXpdMprfbGWw9QOFzUhBNEiUhgQ0NOexExAVESKCmY0D1CR1Kq9sbahB6tqFcs5o9rV1dWUhpLL69Mpg6XbF3FIFZhjYGZFMPV6lYuE8962g53Bt1FI4ACiqgpitKEpkDmU3ybSiZCI3fdrUh+UzqEJNDXoo1w9H+tjEjykTkIef1xKDtGIqEgx1VLLhvEdgpfpSinl8fFxnteW5fPgqIiBqFUE8VQuAoJbWpdUvXHZ9uKdErsu2OXooJfAbYS9neOf7Kvj9ofINv8KsDcgITpShD/1TkrAvnLnogavtQ40mdNli+IwOBAuPn/+/IMPPnjz5m2IFPAwr3dSqogMMULk4HCFBr5T67qWpdwbmGFd87qub/PsNx9iujocvVZTSl1OD8dpOozDMAzefAGipRSpxMzTmFSVCMcUGO3N/d39/f0wjopURYljNV1yUVWLnFJKKYHI6XQSLczEMahWVa2mqhUIg7XMRVsK1+uIiOhIdKfTCejcfx4wuGXy8uXLq6srx3MCx5rzyrG+gqC2NWMhdjSXHeWfDzvPb2iE8eSEnrE5n79L/bX4/Y6ctgnfUuuZAJ7cgamV8/gvGG7ghJcE1ZVYT5NuhAo7RsaWicft7fQMZ+1jn1vaHxG3XOL+N76TmM9vd5nTRnRL/6Lt3LpzuGeiTaqEc0sRALht3Yzp/TNs4K7YS7fa+mtbNmKWnBHx088+/vGPf/zrX//69evXwzB873vf+zf/5t988eWXX3/7zZs3bzjFTz755M2bNzFGAKFNd4MREjMbiTesMPE2QqYZx22RcZMD7vWhgYjOeXb4Cm8DOZ1OvWkC/WKXhIfIUiVjdVpaS2UEAJiOBzPAwBSA+5IBYZPVl7V7BuKK0k/AZs+DqqaUigqYcUxE9L3vfS9dX0teOQbrXvgF1RFRt8+t988/sRn2NLDF7hFRsWF6Qd8O2kkqc1jOGD3JiWaAaJ1aai374qOzENbzqNiLn+4lSNpidR04sHtxZ3LVVo2J3dLA7kIiohavxkSi4E1SuHlorVL6AhVmA3O6+ImepH1XaOzVwXa+mfk8Se0QA92Cqts0hW6ciPXUeuevswmnqnu+ANgnNfvEFzxXvCNi2MzNjXqglUk0MhuGAUxrWben9ygaAPlgEF/oLQvp7or/HVOo5dLUA6T2uOTtVQBNtW9+jt9tu+GmR7e/xWHcN6KH1qjvhvC2o25nwE6F6xn2R0VEerkabAmQPYwHoqrGGH3gwTQOiA3iLI3Dfhf3ZtC2owZi5tEyIEJxeEk0glaqGiMPw/C2li+++vK92x9g5Gm4WuZHQ1yrGVBMPI5jCKEQCioJBkYA8hGfqrpBllsfgeMGAQCoVR+ANgwxxhRCymt59epVSuNPf/yjH/zgB69fv/7iyy+/+OKL//gf/vKv/qf/cToM19dXh6vjT3/0QxEZx/GjH3wUw3A6nbyujA/k+ARbDWRe35ZSv371+y9+9/tvvv7Dero7xvDTn/3kT//kZ9MwWM3MTBRQTWqtzEjRWhGmxRgV4L333vtn/+yf/fUv/7u///u/f/bi5TQN03RdqtW1chyMhnWdKXFKk6PsWF0ZgCOFENIBEdGbOQ0RkFVy1nx7+/ybN69OJ7u9vj7E9OWrx7ev7p5dXUXm4GUMaERUkauUpdbTqfmEbXirOI4fIhuiI3F5QEeBlLBN9FpV3Y0MxJGRhtEtj9aIy02Aam+Bsl0hXOfVChBcHp1lBJ5BkjYRNgyD5PLwcLd1MzbuMPBp3fAdmpgAZNPZzcCwcy0f7FQyXErw/T+f2B8XtsjOddxf3p7EhWmX3S2U2+Wsq0bZNTfvwhmNiby2lHqJ+JMfch+bAMH1nAekRD0/uknDWitS6yXw6YJaagYZUqgEiBaH6IhZgBaGqKq11BAo+CRgFwUetFaJQAZYawUkwrhoBlEmWtYyz/PDaX08Ld++vXvz5s2yLGkajRzzMsQYzWSYRmSKzEQQUxiGIUWWkospjdG0msqYoh6ORJRSAoCHhwcK6eHu4c3dQ66ac518Sv31dQqxTCmlpFrNJDBvfT3M4DF7UTMRYzMkZC9o5BijQoNd8dhHLSUx3d7ejjH94atvTqdT5XBIqfEvInmzJaGZIMY9kWw74qKVOmD1RmnuqKuqiXclP6UxuzhU4OwQAl5oJdjprSeka4Sgu34JJsPmavZWXucBIrLuEM7+zN4LQEQGZ4OeMFAfNXSW6JdcYHZhyG4EvKnU/otEXjK9M0S+k+/293/3577z2+3DYRjOvNPPqbXyDq7G3wyaHue8Fub47PnNhx++//d///fH47GuWWVc11VEEhMTMMEY4zjEh7s3gbyzogHio6KqPn924xyd4sgx1Frnx9MqykhpGq6PV2MamqR2pVtrYEwp1VoiMw0RAATs7du3IaRqKrWueS2i8zznqqus4ziO44hotVbVyswxBd3ogYyBHPcLgawjb20rL9aRC5hqrTlnTjEx55yPx+Onn356e3uLzLAZmj2l0ASNSAe/vVhx+CMyE3bmIHTT5byP2//7CWeSJgJEk2Ib+gWAgz1usTDaCeR24S7cSB1ScpPbjnMB74jovTW1PXX/p+1P4G6wMpsRer9cC6B3c6v7PO8MZ3uHaG33vrsAytOVNLMQgsuN/cnwTuWeda91K9ndPt8kw8YRiNhLzrFnMIaf/OQnV8ebzz///Ouvv76+vv7o/Y9//ItfpHH85a/+cZ7n07o4+YkIBGBvSRQx0JEjIkJgDIwe7+9iT1VF1NAjob3Yj6G1tqrVWiWXWitY68X1ksKNcrR3YNWYfG15A9IMHEKogMiUcDAiJW6Aohyp17qbGTQlaooAqkjAFMkRF/v6ZDmDJBHRhx99BCHIfOLYS6DFthgUIkrJG3X5HXwX3+1ta+vfT0bEbRLKxgidyc4H9baRTZr5g22hcF/MjR5kA+bt98FuexBzd99t+wqqXHiw3eDnrTBKL2jX28ewH608VdVE96dt77gFdDY+3UIqZ67cdwSI8m4oxXY3/3BP/274bXB72/ps/NI5qw+M2AGs7HbkHDp048r6FEC/Z/C+atgFb7a1s+5kb32GGCICe1mET4zczndUvXVdXTOZGaASRSLnYVJVlTYuZgNxYmZTZJZW7yFnnOa97HhXKRL60ELDXFQVRMVL482txe11zvD9/o7MLA0pq7GNfzuOQ1mzanVQO6cw0YJ29vVbZKQ/y0UkBrivMiATKyM0zYmq2LpkRSkamhkLCHgDJ/Pbu7tTLrdXVxhxurrOtbDVEIcY2XPkBIYhcAyRUasRkfUJXZ4ZMPVCI2+bIgDY5ocepvHx8dEGm6ajnZbl9IAjhCE9u7n+6IP3/+xPf/rNN9+8fvNqWU4PDw9f/NOvXn/x1el02rDpmMM0TcfDdUrp4fTomXowKqW8efOm1vqoqZZstRyG9P1PP/n+J98bQ7x/e3c9TWREgGYo1YxNg5qZgs3z49VhenNa7k6nDz744Oc/f/5Pv3v97/7dv/v5L37xi5//bEjhsQJIrhYUBlFGwyq45qqKKcUQxxDp/v5+GAYAWLOK1BjRLGCY3r59O43XzDBc3/7tb37793/z65HgvRc3bFbX3MITEY1klVokP85SSmkYVohmgBwQoBal4HUFZKrSosbExAYgplIELHNAjSkltaoIBj0FzswhpBCClqemQ2dksjNdK/V2VrG6P9O5jzs21FmOmImKmQhjFajVG0bO5vIFxfbUyiae9qdt/zwT8x93AmGn3fd/7NWw2XeMfkFER9DZRLY7fFskaB8P6jfE85lte6D1ukrvUusvRUS6E5H+CSAFoP1jmJqXHCAm1YzI48jrupaygprXHaQUujLrepNwnudAEImrSoEGBi21qlYRW3J+nE9fv3r99u5BwW5fPA8hhBTFtEgtGgRMgYwwEBIIg6UAjFbzYsJDZDdVRdTR7AiNicA0DVMaplIf7x9Oat9cHY5rqWKQUrrSw1B9hi9KdDWjUmoIAQirQhuMaVgN8jIbUkojx6SqtWiW2YccEpFnHT1WHYeUpoMRCphaC3kagpcz4bkHHZ4Q5J48dkaDI/grqnJX/9ptXGaOIYVQ3GwCO9veqqqkm7glIu2GjqCP5RQz05oRIjtkYuAQAmIxrxCRc85cRIpYzpnIu0D1bHOgggEzA5oB+yBqp0xC7lhjZ2W0Mctei2+msO6O/bLsM36bhqVe1GR7m/VyDd/59XfYCtvw5ScmxaaXL28rDi4hIiFyYE4pXN8cUwrTNN4ty7Nnz9Z1Ba0hhBgIoY2DKqUiYwghEqcwTMPgCQ0I2FBGCGuteZ2X08OyLEQ0xjCOiZl9DplX0QYkNSUwUC1avdn72bNnwzhiiLlUPi33p8f7x9PjvBLRaVke5xkRY4zjlFJKBlhLaQ4AIiEbMhgbsFqbSKYb8EGPFMQhmZnrtelqQsQQwvPnzz/77LOGorGzaG3nEWHDs7kM7X+XJ2+7lOB+o5+YMXv5/OT68310v+PNDqYOrLDdHADOIzSspSqwQ1AAwFbhvCfUTQJvX3nge7ORNjI2M+6Gpl1aumbWx7zB5hACAL7jPO81yNNXbpgZT5fRdn1Wu9Mu9M6eExGxV5kC7FjjYmvObgkCQj6dkjfLAXz86Ucv33s+n9ZxHIc45cdHJ4wvvvpyreXu7u5wONTaan3NYQkNsgERhRSpoySaWc/q9t8i8Pks2xvVuaIvb6nuHDqdbWAkWwWNY1avULxlnYg8QBYjT9O0lBJCOBikcRBjRMDGaNU7IQ0VgJCaO1drJQBEz19Vt1eJsIikYaAYxOz2+bNhGGCet3r1tv7Sp7S3LMsZvAd2MHLfub8mal17t3nxPgGqley0EnpCdBQZEAVR6JPloIXUjQEDUvPpAdG86gnNnbTm3enm2GCPubh4UvMR3Wq7gDhiy99i/yECr4E9H9Qt6kbznd0C8Z5DtxPcBN/T4RPL7QlTbA7kRvnQ+X17ho0lzawXRbdMrxcGb3fbomDYE5LvyP93wqmbO6MKACGltCWX3F9yBbDW6t3OvQayDSTcCAJ3YXjruq3Wmobgpl6HQSVAcixks6otP0tux7CxeNcpsFQrUPZzNi7eHADpnZfv4wRVFdQosBudAMB9orev8bY3ZVcfa7sX8ZN9IBj1aWDWy9kNodZKvXKs1kocrBOBnecKsIIBqJExekFwq6SOkdHBncwcX4fRaikUeD4tr968/fiDZ1VlOl6fTicewNHtzQSqorcZIDAR90bVbYnMlN2T6e+IiFtwfV1XRCRAqYUJrg6Tqrz6+mtVlasrQEUtU6Sr8fq9Z9frB8/fvppvDsd1Xed5zmtZrZzuT1/Wr6+vrt8+PAIAAns0Yp4XZl6X9ZNPP7qaxuMYP/nwvQSw3D/eHAcUBTZUsy3Sb2BqoqWsc6SDajWtjPYnP/3JN9/+f0T1P/yH/3j3+u2/+Of/7Od/8oubq+P6eLq7e7NWAoA5n9ZqomsWlCXbbLWCmJhZKRUAjFBKXZaMFFXtNM+/+uK3f/13X04Bvv+jzw0GqbyoSF5XUQNSslWtiJVSVBWAwFvu0MAICMWq48MYmCAB9FYEx6NCUFQBgwoVtRZVKTEwM3NMaNpr7CMUcLS7LW2gAA5pYwBbwAZQuWEmJVX1p3Kj2V2F7p+33tGoEUTNQBChldS+k6lr+AJm4KC4TyFknoiz/bX7E96V8nt98J1irv/6WfqUUghN1RF6aikFEDjsBnJceqG1VDQUACXcbGsE4D4yuDmE3JoNzNw7pApVa0UgGkIMoRREA0dXQwOvp/Jiy5yrmQ3DeDwe7+4a5BUiApDHlVWgkIBBKVTWJXEIxCAKpiISkDyIKyKPy3J3ejwts4JNx+P17TNVVQRFqKbFrIquJa9rvj2giKg1yD6v4ay1BqQ5l2UtIgJAOWdVIKI4jjfPn2MID/en5fXbtQjGFNIwr7mITnUgIiaMEhvciEoRVe+dYWZCMBDV0zKHNKaUIg8iYlqYI7Mg6PVhOkwTiJ6KUODr2xs+XmUO1aCalLx6Ly0HJETT7zCzvpMY9p97rRDvEOro8jBDU4YN2xrR2xD9b7FzJnxHaecQrFllEMXWdgA9qrr9053AWk0Vtj4xaPh2RA7wQGqg2pC6CMwUK4cL9Lbt2INeXC7C0w+7Im+6dVPw1MEGNiNge/L9Yu71N8B5jsXlasA8t9JNT6hvQWKpLmp6ywoQgCI1J0dNQqDb2+txSqoVyQggMqdpuL25SinUsloVJpxuhhQjI6Aanuu9cX6c/THUMOfs2cVAPA0xhTiEFtVsHrJA4FBUGClyWMviHtrhcJgOVwpYpPKwrKrz168e59M4jsiU13WtJdRQ0Y4cIjFCqEpO4wDYoBjNx8+ItAn1COzTknxkrppZCCFNo2OWOMzpRx99NF1dQQhg6jYyAOxlopvWRrt/7l2sy5Tvu47iE3tx+/8WjziLPjt31GlPLfqLMDPsrt2TYrOAL3zV7vvZE34B3NXgYatDa5kEdLAAAB/BZ90lbm5Mj6G0ldxZmXAOcMB/hno3o2u/OHa5etv/ceeNb+vTTbkzj+yv2neR7T/f+ghcfbRyYARkgsBlWWrRcRzTMATvAzLLZbm6vvrgw/di5BBoHMfT6YSIDOgniBmaVYOqFhFNQbFV9HjTKRqz93uqmAEwIYWqUnPJy0qAIiKlqKrPfPRqtQ6+KLV3lBGRKpastTjuqakqR5qXHMeBI60ARzcbkJnRiFWK9bkkiHhuduhasqqgKfUUKTPHGI1QTX/4wx+GENZ5Hp4/g1o2/2R/dGjuFkfQVvDfpM27xG9mjU33m+5hPjNwkJvWAkOACFX2duxGA+wITO8wFG3QoE9y4B5Y7dbFlnCjM3m0C5uf3/iRMLQLvfqMKfhocjRtRdcCDd56p5LMzAFVMbTWx81E8ef32OsFv/sa9vfdrzb2YtH9OvfLL26yscm6rv5HCMl2sDp/ZG4hbAuCdK4eFxHfXTKzFk5mYqYYoysYOOcTaStJQkSioOpftR9IKXkRXXtYMhAv2nFx01+jN92JODiTtboA1+q1bop2kyDnFUQPCqBfVWsVlYAkgBtGrbdNA4BhgzqSS1e71irVkAwRCMO+KdmNM2YmQh9LSES5Vo+X1Fqgtzk2Y70VwVG1EtVjELD1wABVMy+H827YDRhpl91mMqS16hdff/29zz+GMXDgdDimENeSgRzcq70ymFbVVuFkZnYOOYtZ6HEaRPRJTdSQHiDGhAjz/LgsOYU0DNP1cUDEnBeRMkYiSw+Pd/M8i5Tr6QYA1pASchklhAhGuTYaGIaJmYvouq6INI6HX/zJx5999sk4pMD44uYo67wuDylEkwxGZmQA+9prJh2HcP/4FhSe3Rxf/+4rUrm9ubqfl7zW3/72t1/8/vf/9//b+P3PP//pT37ygx/84P2XNz1FhmYmWuZ5XpaT79c8zzlXAKi1PjycXr++Z6p/9w9/H6b41Zv76xH+4p//hXH68su3GkkLrExuha+gWWuWYoZmDkWEKlpVwYMXLuAUxFPBAICgPcII1LSKmJVa51wioQFaL1D07YUNb9M2nxDFzKtr0ETV3DfossBH75yxKPbVktuH2AwFMkNFVAMShTZ5qFHa3oA2MABTUwTeatDP0rnLoD/2yf7z7zxt0822a03xbCl06lVVOwMu+PcGADUX5rgXeZv7Zwb7an6PyaWUYhiKLv45YwvrGBD4TonvVG9EkX63KmZGTKplnld7ZtsI+CkNSwhlXRExprR9XmsVqMWk1kIqj8uMiExABqbgJa+KcCrr/cPDaZlDjMPxajoc4zip6mme7x8e1Gw6nFwyYIiloQNjsyoU0GwtgojSAFNRFe7uHh4eHt6+ubu6fTZdXYc4rPmrt2/fyt0J6S4XOxwOxSCrBcf1StXDNBF9WGtRBA4xGiqCKBgyAKn5HGBq4OlmkWBICQDu7u6KGHIcx6DDiESGqAC5iABWEWBXqE8HQL9LJE8sCSIiQPYROU2enzVQzjnnDEDbEK2NlvaWb8+NALR+dWQjRDQeZLWaVaXUSuf4ushWHmbN7CAiUP/eCIzU4VdEiEFVCc0ngJupD4EGEGLbXufJS23K+ImOh0vDd3/axdt91/+fWMz7O7cLe9pzu7OagMIW1G/at7ensRsoPWXRVwSRMa9FJA/D9N57L16+fP7FF1+kFEB9BDZItQoiRUwqEhokRXGkQdNadIVecVNrLSouV01xmqabq6vIZDvx5RhCFniIaCZEECNX5ZyzIUcgQyIm9nmuiAqmYmpAQ4yMlqnW+nhaitrhcBjHEbWNW/KtNEUfpCfSWkMVAQMzc4yBAjvigIDlnGvNh8Ph088/+9M/+8U0TcDsAyqa0dzhtc74PwBkrSCoGbKXxLCXe084Yp9e2J/wrjjtBPA0+QzYWmb3N98Iw2EKDN+hNzW7fB6/pPYSMu5IEv6QtRRoLHau4FBV3c2S9btAz4Rs1XTQ7GolIteRT3633/nMKbv3/S8Q/JaA6grivJ7v/sr+5u1yuzhBVfwOMTWUyOlqghBknl0g1JynaVSwTz755OOPP/72228Ph3FZFkT0/lQiRGWHNlEfNGHexggBXNcgIgIF0wqNR0BVS5HT6ZRP2cxyzmVZwSxGjsSm6vl2B1RPZp455xBA0rIsy7KUUkSLSBXR02mhkoGwqFSROIaQUlJRM0IxMyMjCsZ9aBuRZ4AVDEwA2FCKAiKGmEQkV7m5vf30008hRp2zp9rMo0du8ANslL+RLO22zwt/99TfFhy3PKCBY/wQXniDLoyd+4gAu3lDHcwJmpHRLtl1t6kqGOAfL4F+wmWqysSICNrAaURkGxvjVVrg0L7EAA0T/skbcQM+3X2uO7Dsek6wb4E/Z6XtJnuZsKdtuBQI+39CD/ZtU1j9KqmNibaeVbcPzxHz7qh3Q7TpR5/s4Lfa/HlVDSLmPaz+tTtpfjDFJtAJY4zekEtEG5AUIhNZCMGDK208gOP2gqcsTZXNfGjEOS5Fu1xfy5tg8/a2pdkO2/Jv2kvgrGHcqqpbjYzEzIFaLBC8OeQsxM71M7sV91VrX8kOJdbMRAV9aoyZmOITUcVk1s03dPxfH/KDwKQVfDeoIzQSUV5WAFACZYpIyICBgxKlISt8/frhy29fpQ/fz/PMGJB55BGRDFFVi9QtoKgdIwFQuc/QFZHIwUwcOwjJTNTxBCKHnPOyPDLz1XGSaqeHtwAwTVNZTyIyppimmPi6TImI1gdARDwc5MUzM1PDWiVXfXycx7HENJYi+e5BRI7H48cff/xf/cWfH49HRmCUIcW88uO91DzHQN30AGBiRkBTELQyJrp/LIxkNUfUMdKPvvfpP/z6n6Y03Nzcruu6nObf/Oa3f/c3f6eqnIZhjDc3N8+fPz9Ok4FXeBaXqsuSa61imtd6Op3mOb94fng8LT/57NPPf/B9DHx9+/x3X34DZXGJhIFRtaquIqecs9QkZAaIBuBNgKoIDIQUFKGPsiJAsBaJNkRQb9hANDVRq7WGGHxeRV4TU+uWQgNARpA98ZgZwBbdBIBW34tkAWmu2TdUuUlGzw3mvPRU/Q728yycn6bscGdD+C/20VMX+hgvRZ5dSrr/4ufn3+rSjZCszc84f6WqIQ1ossm1EIIxA0ApJUZ5YgP1AlrEjgmwpXpGoBBCLVs15/nx9kbGlhcyI6bN7gkh4LrW5fT4OC/Pbq7RdFny0OBGkIiIg0uh2iA4xGSVUsYUTvNqZmNMKTIoVBVQq1bXXHItijBcHW5unoU0qIKazUue19O8LuEuINowDNM0PZryUKMIVwtgazVVqTDHGJei85LdsH7z+u7u7s7MPrj9TFWvgF4aQYiPD6e3j4+nnF+qilkVb8rFVBKHbGYBARFF1QwpFC6FQ1CkmJIooHi4lFxc5VwM6j2Rlvo4r+Nhunr2XFOsYMZBkBRZpKlbRqwqrpDgjxhk2+fbt+bxYKJdGT9gb8ZWbXoIkdv/rRWWu/rYMCF6PmYXQzXgHjT1UPrWMehbT3DGyrcWPW2wBLqLEvq+GyqRhxPZDAgZgVWxo1NcYGxAj7A2ct0hDcC5a/eCszZEBNhZUVsDjOqFmwfvWAZPFnn7p4GaGgB40+m2/ltUZcOz2a6yVoWLHDDnjDi+fO/Fj3/8w6+//tJEhjioKoEPijDQ6ulGv4KZmAM69ryo94aLiFWpKgAQYzwej7e3tzWveV1FxJfOg3fCgUayNs5ba1WgoGDzmhWMQ3rM+ZtXr+4eHpF4Oh4oBguKRIBcbF7WdX2UqraKDMNARI7fH6jFzd0FarijS8uxUBgYQwhh6TiBx+vjD37wgx/96EfXH35Y8sym3rjKXWhgR3HYdz1Rs0ifenT7PzaKOotofNoysNHMk/3dO0Lbseco2Ela0BZ3pA4241e2irgeqN8/z554tkfd6Hk3b/NcUKq9WQsRzURMrYOXKgJsw+daTNBJ6yLX8eTVvlOPbILiiX0IPWa0dxr367z/8AlrnG/bRtvx7hwzg5Kbno3T5EiAwzhKrTFGTrHkfHt784s/+9Mvvvjim2++SSl4x4+qmiICIDWoXxHpm9DQ2dXE+3qIgpFVFckVQNfTfH//WOYVEUFURGIIiVMK0cxub258Qr2boCEECoiItnIpZV3XUkq1qirzuszrac5zLXmeKSRWHBGhEJjZSpWIuLMD9EoE8WYJA3DZheg75YkNRPzo00/SOAJRjFFzbvkhbCmJ/apuMnzz1Z+Il3c3q52GAO46ck9HGxgCqimo+3Uo1hCS/DcdrhoREGAblk7UMES0OQJPyAx2GWPYqSpyoCY9i0fbIuzYeX8XcQMHs229sgYABKQAhKDShb/7yNYcYpGLlp+NOHc9fufkp6pGDvsWvnM8fdeJs3fYWrD9nCE4j4vf2tT377vx9cbx/s9dz6dJH+TOzEFEqroPpY6ooaprrmddhdA7Q7ErdPEVl53D40+DZKqW8xIgAqhjErSlIQQ5NxC7sPbn8HbVxEwYNuqBS7GlqmTnXBNuy1dlW31E1A5DehhHIvLZL9Ali/+u5wocC4oERESQQkPvZAAoOYsIkZmxYYv/xBixyykiErC4YxWzNmUlpSAiuZTtsa175GgbcPbZyw1jUoSHBb5+c/fee+/dv3n78upoZrfXR0RQ1bWWmouYhhBiHBQcSVbbkhKiApIBE4oCANKeIWFeTkMaQ6jzfAJx4AfwofbXx4NPKgXQNI04DWaWbq8dchOZpNpSaq3VgAAVH63U/Pbt3Zs3b4bp+Nlnn/7Lf/kvn0dghiHFWu3+7hWajEMoS0H0zg4AACIERgMQUMyrAk4xoNlX3371/Ob6cHX86KOPUuS/+p//7us//P76+vb9l++lEN7GeyKqKKWUh7u79XTyRGtkTClZQw4MwzQCAN3Eq6vvH4/HUsrVzdFMBIQCVanXKf0uLwqmmpCRQ4A2whxLNaoCHf5VgQy51ZMwoarPtrGdXhZANlRDs47jC+ZF7yplXdechxTZRQxRICLoMRgMjjaD0AubyXxHDE2IiFgjRv+hvcWgHbFjU+1ERA6EXquI+IQMAEBkROmztRvs03bspcz+k3fPgXeOJ2L3XSmMPWItXUhvOthfx3r+Hx2WYGeaU8/8N0Zu4s8awrCoiCi7U8cErRvB/T0zM0XCsxil0Apri1SCYWtTpMAUAqzLupZ5Xt978Vy1zo+PPA4b4rOYNQBZtErotWhmkiCtJatVbOUrDj5kyzKrKoQYAdM0jodDHIZatEhN45BUaq1znpdlAcRxHO3mCkOhWI1qCLCuNecclnpzc/Nwyq+/eT3Ps4jc398D0MuXL9eSzTCk4eX778U0fP311w8PpyJ6d5oFqZgyMzGmWr1kNAK5D1BNgTCIDGmiAFVWCnUYhhDTMIzM9fHxUVXvH++991sFwjSEFCWmUqqAVVBDBkLqZRQMKLvWuD0h7TMke3PNQ26qCqrWJq4pcCMzh91LaTRDz29vsVUzH47c83zgqDYbJ6oKCABoJgvMTBBoG1LvNIzoA4DqLq9OxGBV1UvnmhLhgNoqWYi8iA7b4D3mc5/8k8NfkJmdPTdRvymjs+4D8EyaNIjXBvRXax2GpGdYDtgUNhHvjBnbaHt/c0T0xKeZneZ5ExHbM2yR6d2DuQQgABnHqafLxu//4Hv/4X/6y7vXb+4f36YQx3FkZgRFiGBSRBzeLIbIDnCqNYsUldUjcXlFxBiGMIRxHIdpKqWUWgGpDho5ZFFei7ClIERUpM7zXEQ4RkBacskqHO3Vm7vf/+HL07xg4OEwAbLQahUFqmPFZDVZ1qXKlTEzJ2N3ipmJDAEAA5NKKcXDcxwDFyKiLNXLU589f/79H3z+Z3/2Z8fbW1gW5OawERGwF43u1tCgITTvtuKJfdKWd4tV78IEAG06xX4LNhNiv7t4zhu8k2HeDQ1rx27e9FYQhoieP7HajOb9421pjS0Wb71gyjYAxnacAfzMTNqcQHIhXIowsxFttqCqeRuuOyD0XQ7hk/d9ojJgB3W2PQZeZjm2V38Sytl/vme37f+OC+pAG3j2T+CMy1/zsiwAAClAzcgMoHGIj6f5008//enPfvKrX/1qGAaioGYq4gPfA7MRG3ERDdiqYdhL3hSqSgrRa0x9kWvVh/vT3d0dVRhjijEOcbw6HG6vb2JkFbm9uh6GYRxHHz7R9JcKoNgw1HGsKsAIoI/z6fXDmzd3uhSLhIzIiEwQEAygqjACYfSeFQWvO0EtPRjHEZmZ2vSpajqEEMfhe9/7njf3hpTEtAXQt52C3pIXIqh6Vs2xHH2bQghgYG3cEDQTHaCnUlsZkfi35HVNTa6bhzDMBCxY20oGQMcpafMDm0fU6McLaQBQz3WhtINacGb3ZKSIbDaYaVUTNQUzMvDGWwIgr2wXkU6KzanuIlRV0dqLnHnWunA4x74bL++JHM/hwrND2Iy64CDBTufcMT+Vg6tNMTAiRjJTFS2bSD9TOJF7m1tx4vaE9F2ZSb9wXdfOEWcuI6LgaGwxkpmt6zpNE3bQz1LKaZmHGLZmzcBYSmnd5Ofh3AgAIbQJFntFyMxGAdhLMMkCmpgL6xBCIiLmIcRaWxKMo3Fozc3MDC0FbyJK5CUotJU+M/OaFawGZlOFbkMT0TiOXu/ElxFTRExpwDbekCIToBLR4XAwUGDzqKffxDMzqacXMDSbEgqIiFgLEmPgAchdYqZQRavIFpe1DixeJAfH/FGKjCJWRWKMuYICYrJf/eYPP/3JT97/8PM638UhuWxFpEA8xCAGyMEQKIYqBQWHtgsGhIFCrXWMARFFCiON46gqy7IQ4ryczGwaBvfsxyEQUWqQVug1SMSNc8hsGpMh1KIAdQAEoFyFAQyklGUceBji7fX00ccfLOtpqXo8HtecpeRhDHnOS17HMda8cuRhTFplkRUXj0upSfXIkazr1WEysjTEA/K//lf/4qP3P/h///v//ttv3x7TCEQDOwolXU9XiEhoIQRGY8ZpmrZmV2BCYGrtdTFHH7ghhAQGIYbfvH2dOCwCVWUpOmedc52XJVcxIGI2s9qQCFrwPEuF2iLZhuCS0RkdpAq0RHwRYzNGVtWcM6FJLqA2pUFqXszAJIRUq661hNlS5DQNxC2Li8CoQgSllHVZMcRlWcI4/f84+9cvSZIrPwy8DzNzj4jMrCp0o4EBBsTMSCORQy2lw/9+9+xn7kMftHukQ4nEgEPMAxhgGt1VlZkR7mZ2790P18zCIrJ6SK0fnEZUpIc/zO778bujTTfn3Lg0hFLaBplZjFFyzTm7hzy6prv6R/U2B7SWXzHCDtYkt8MbYAp1D86FyVIZjDMr72FYwI2t2QzYWiuHKNYQQUopiAcRMRGMIcaYUsItI2KI0UVHzvl0OL68vDCzAuaaowvBqYzcj71WFwiRk6rFuFT3NrUCEDN7PCilFJdUa/XCsn3f63EFgG3bjOjh6XHb91zluKbLGbNUk5pzXtclxRRjJLRac62V0JjZexDR1JRFoagNuFdgqmpFhUI8HR8du4JjAEIKAQCqihoakJhtpf72j99tRhAPyotq9qme0ezzP3677/v5kr/77tOnT9+nlL755huNASjEEFQ1bzun+P6rryl+/vz586eXZyPEwCR1L1lV1nU9Ho9BgWNAxCICAFFNFIiDqh6Px8DRq/rzvpdS1nVlzYgYY+SQ0nLYaq0AWe287XRa9pyXh3c558PhUKuEyFq+nFJ2lLyhCLG798xca0YERkZ0fC+MMZiZ1Guko1ZVFebrWOq4HhCiWgM3Pm87EhKxgldzkNfe55wXZlNNMYbQ0juuHQfgkG8WwBJCAEAz7IUwmFKySq+vr4yh1h0wYxAi8ijQMJStpweHap9N9vEnP2gKnNut3zh09sgDeHuwF3TJBK83utmvXsCtHTxz4nAyB0viyLQ4EBARtU6HAuCj9TjnPcYIYIz8s5/99K/+6l/+3/+v/7cFHy3Gbduq5MOyqgigPp0ezZrXWtQYjTnGgFLtXM7P51ep9vD0eDgekcJ52+n55fx6AQMCzFUCKiJuuViEy74xsxgUs13MNBtQNeVl/c3f/t1vf/ePey3Luh7CEtdDjNEqY87FdEE0LrJt520XvezV1rQcjlV1DUGXyDHGGOOSgpuDRWrsI/WqKSI+Pj5+/eMf/+IXv/jq6w+Hw8Htv7Aso9ZAJ2CSIYRNrUoZBpN2Wwjmejkiu5WKLhCb2do3blDL2NBZtPZzptMAxoTuRhgGpmqT0B504t+QNdtIR71uN6PtNhro/xyVxuMc6gaPdKzBmQVc4Tq/d6obOWcTkR7Lu/Zuzes2KHbICg+Oc39mb553pEOiVgg9ng0AROqg7VkrvS2Bm7VYM9SnkgHslQ4551YXtm0cAhgAKnBclhjCwy9/+cs/+/Nffv/dRzMTg10MCJYQRex8ueRSDmtqjobaJtXMas1aq4ipgIgaUi7l9fVy2S5mCCLhEJ4eHh9Pp9PheFwPKaVIbFoP62Fd1/bkombCZmtMAFAYS4EsuUgFK4dIcjy8T49xjSFFH7ECJkzk807V69pMzRpaqSIAk/fCiohZM/IxLp+fX//H//Yv3334EQKDmIjwYTGpplZ9YlPHCQfAum2DeGa69ZF3s5HQSJTbXnj2ioEQEUSVFNt8LhjFRAEQ5yTkcFRiBG2hWL+dlIKtspT5FpfL7xtj60OhEBixllJrRZEY2B05qyK9nQG9t7ZfJExlyY0RvD9wkvmqimp1avSF7jTSlG8c63AT8iBrFGNW4VpxU/pLDQZsdmYprl69DGQwOyKm2PJq7pdp71wbDNIc486Ao1lj5KJFy+y4hWVZLpcLIqpcqzfdtm4uEAXT4q9UDJfVUZUYsZcTwNUI8G+YOQRCxBBCyWZm0t1HIPP0iGoVQWobLCbq0xSwFz/M3O6rg1436lr5ChlwDYJqTw+6T8jMAARGKE04ppS2bSvbrirEgKbUMYXUFNTAqoEnCVREKlbtwJE+H8CLFck7sTowvaoB2dyyYmbMEdt4HNLax1MCEyEzUei9i4AV0cSg6D/+8fOf/fTrdHyUuqsAQhUpNWcxMCQ1VaHYWwcVAEA7lH8L/XqRMVqTmz751MxAvLsZoXWnQ62ZRgM9mhNKIHbQUu/PNAMzrTXv+4YkIjmXXQ1ChBAp5/Pnz3B6SPqS3ZsC1FL2mst5PzMAXoKAkan1QKOZpVIFLOdaShEA5jUFprhWgJ//9Ju//PM/g/prkB0lPj6c1nWNR1pTMrOad0I0UzIIkc7nMzCb6xUOBJEwUnA2MDMg9fmoplVKKVuVXTgX3MWySNGmtITaxLvRiq1AYOCDgF0FOx12rUgEaKRt6nsTD2YEhIYtIphNhQBjjC97FRHqF0CmEBiARIrXmSQOIVCIjHwd4Dn0351uG2p1tjFGgIeZ0YtVkIu6cdMLjQzuAKl+6JgthiHi3/5qtm+++P34SWcHBsHBpKrezmYAV0t3vo6ID33qyB/dwmiDRn3YzoS4peAE1tqhHfKbmaX3/YK3QxCioiIrwF5yShE7zjUAlFIICgAEdtBLUy8EUS1SAzEQFqmwt2lMwASEQBpSPKyn9Xhw9g8hvLy81FqRaVkW5KhAVosBfnrdin28ZDseP2MHD4wxhsDn83k/Xy7bbhSX4+N6ekqHQ4hu1gMyIROHEFOK60Ix7LWUTx9TSjEycShVPr88Rwy0t6ELxLyYZVVEFJGiUlSoT631/6782AzcEOOSDDGrFTMhULSihl6oXKsJAPIYDH1HFT9EV65BqEOOD5K4E+8wl3iYOpG4tnAY9xB8akx13maYEzve4WLQUMRBwThGMhJt89+doraSCU9mVdVjxCYirWvOs+rDxAEzA1OBN3Dq48m7jmr+59z0243da07Pwwd3VpRN3S8wNczPhvu8XNR7RebA67Cz+5XHr9pnUwBsEFzT82vPKgEink6HP/3Tn/3VX/3Vb371D446gx2hbmm1PF3AARqCKlz27fX8er7sxDEd1vV4gsC5SrlciqiZoagSUFEiUSRAQ7Xn8x5CMMKqJoC56F4u55z//rf/x8fnz5ctp8MaVqyAKAKInGJAOBBxEKCihsXIRD5+flnTlutRjnY6riFQBFDVfa9FqoJBLyIQMAaIMXIIx+Px8fHx8emJ1hXQExX3uW7sKJ3XfXHCMDBRb0tp519dv1aSYWZXmlYDU+ilcfOgLDDzVLDXusJw47txDL1ibuYjT1fO9OO3aTeza1tOk/jYyomh4Y9eSW6+8t27OxoF9IB+VYfPBvX/YRvKSL28lppuw9mZpSkPeccXcIuF6DVv1gu4xmmzMT2uSdTACMcbaYdjdVvZ6/q8UcrvINPYM0ABax7F3ZgKtx9BDWPMWwlmITIz/8mf/OQv/uIv/t//9D9br0iquexVPGIkBufLvpgmSCF0a5iCEuxFVFWLlNJDh0DI9O7x4XQ6PR4fDsuyxBSJIxICrGlhQCu1+f3UUotweWVmZCAVK7nUjcFOh0Na2JiATBFUq5p3LAIdPJdgVlGhIqIYKlDJmaiVtnZbBhDxfLn86OuvvvmTnx6PR0wRYgTTkjO54zQWH1rmwDv5bcjIbpOMkvVBSW0qSaerQKRtfwkIHanFNaypWYclTxQYycBo1GGqQRUHr72W/Pa0PPYq7hGdmeUqduYaMhkMGFA7fRJcAW/sbdIPgFuPrs0XB4DEwYOKMOk+Zp4RiW/l7fUWNhr0pgJ1nNATdCrVxp4DdJXt4DRX5lIppcyKBm/biUclpjaQwvZ43EFlZntPVYObTUwRvqTgfcazUbcwWpzgmrUcdT7TflzxgkU8nggEwUygdSECEXjtgWgxs1prKTtWBNCQkqpCB8kBAAcpbRZYD4sOEFEtVVWtinGXh0xIVFWCSItaqc1P2PcVCN096qa+2YiFmJk/p0oF9LrZXs4EhmbMgTCgg1JCK69l5mpsSIZmCIEj0QhcoSkCASJ7AQMQKyDFCJzUSgX7+999+82Hr57eP22fv1UFMJFSTdQMjNHcB2neCvYtNEQDYkISMEYlIhMdATwwA9HadqS9LABU02hIPn1LGwiHGQDUWjVnj0VRUbls55fX8+t2yWVXrcQxxqAo234WLXhWM00pLcviCatS98vlsqZlF7nkHELAnr9l5lhyjBEockIRraL1fAbOFNPpsH7z1fvf/sPy+rLhQY+HeDwe1iOfHo5oUPMWiEMgVAPUTwxEpABmKKZmxRShAqVErg29NFesFClZdtGL6KVCrh0nyZm2DWZAM9A+un1wl3mvoJo7/3Crp133dtmhiD4juKoENAsExgFYPYlp6FPRSwAjgpxzjNHamPpuYtqU7+9h/jsL8hp7QwQw5oBOF0Sj/7/Zo6Pazm6eGb50jLvcGaPj/Ldibnz+oWPc2h/JcARQPPIEdCtP735LLdjdTAciisTQGjJhOJbzIxEFMbXe7MEU1QtmwKpKrer2rBpU1cuWHx5OcT1IuZDCui4Du9zLvwUBRKr6bAk5rgtVynUPVJZlWWJCRGYDgBiWh4cHRyc30BACBg4hLLBUBaBSVcVMwTid9grffX59vmS3fhgphEAMl5fXnDdiXA7H49PjejyGtHCoiKhA6H3SUUOK67oa4fl83nIWMMWIrcJKEi8uJBUhhLBUoR0BIKWEF0TEQ1pSSlKrV4s9HFcf94AxGYdMIGDAJIqoikyALXhvAtha8W+corv/ztQLAC7CzbDX/3rMTlMMflqtXhEH6Dg3zU4wVQVsGDbu3ssE2kZEPinXu4OaI9cFeyPj3itCbXCCx1yDtK7GBvJORiEErTeU7JwGhleYtH7AbWZmPA90o2Reh7FWo/DEr3+niYeDN1/h7UX8GBbDCBVJr0mZmentRQCb/Kc2n6pHtbEuy/KLX/yi5vz9H75/fX0tAhSwiBCgxlhqk5ZK4BMFay7Pz8+fP39WsOPxeHx4oBjyXl8ul1IKA6eUCOwQk/c2iJEa5goRAbkgchHZS3m+nD89vzy/vn78/EmBltPD+/dPp8eHVn2DaCaAFOJC0SgtGCIvRUTUPqraed9UVa2aCWiNMcbUQtL+7tLfXc0k5/P5/Ho5r6/LQTUsC/QqpLciCImsF2oSESC26jUicM9tInJf7qtf16Tudeg83ArMIaUdOBE6pij2EN7YuDuROH8ehGFm0Ftd25N0PK07+hmXnS817jJISzrKJTN74/f0AM2QGLmagYN/95zjfZvB1nfk7km0t0VZ97iwx9RU6xzrHIXW/vyz5B9qcbzvdeOG1Ydt4ETr9WojawFGOggAwEik1B1QEx9qze/fv/9X/+pf/epXv/qnP3yLrdkYailERBhEBK8Zy4CefELpas7cGaOYkjFHAbEHiEtMTARqWqogKSATKAKqVWlmGxlpqTVn2rcsUlWqagVRxrjGuKZzMQERADMhROsY4qUaoI5uEew1gUVqpMjk3ULY8NqQq+Zf/OIXP//5z8OytMSCN010fwSgQcKYWevbnvZxFkfz+o8TVMWjJdD7OMFD8C7ozLiXWjsU3N2VYdRB9F2+CxMkH7A7ttjRQb3v1MxM3W1EbwcGtGmcSSsIn+j2tkQcAED0GhDUURoAQOF2Pp5PRfZ0Qj95MktuUoszV9ptKHD8dWZw7sgm1gMxY2usY6BM17lxjEcLrrQp8T04Pqab4tQyDRBERjSFKDBQ06My2Vu+jX3VrkkMB+lXVW9O67sIqlUVzcSMq7osBQCPuAsBI5pXuBCBKjKagZheIQGbO9iAXoiZY4xaqv9l+NOllAG47GMIhwHtfVXUstUtwyAiMcaGN9iipN2yNMVWUtzUSdtIac9j1kcpOWx9h58DAEMyvY3UGpmiopqC1xqCmJKKIJEKswoIGoBQSIJBUQ35H//pu9//8fsPjw/io5OIOEYEMNECpA3qSchBhL2IEyz0sOC0/VdLiIisjfoYIrIbTISKhGZi6nUo/gq11rzXLFVEcpXnl+fPL6/7XmJKT08HCvzyfK61fvr0kZk/5fO+7yklL+NcT8dlSbnaLjtv+/MlrzGBmReyppRWoJgwJaqGe5FcqyEhUVjWEKtISRz2SABaaz6fXzgkrTGFAITHJR0PCyKC6RKCC69c65ZzVREw02rCwBAIEGmXUlRL1WpQAStgBauAasZgAF7U3huTbpNjOIKrLQ3XvQ4KQ3x4CpEBEC14ORo2xFImUKTikJdAbqKXUnJGQkthaHf13ihsMJn9yrOQelNc1BiYyEx88Nr4laqKw+21J8Qh88wMgMzuMx6Nfbqg+WeskPm4O+3+JwMM5NZJmH/l5Or1jbOVgNfjKkxdG7VJaF5TMdIy3l6o48lJVbwcEUMTVl44UKQSAYKKCpp+fn159/5hSYmgEkXRqi9DWCNzBAAzcFTYUkqJkaiaGYS4Mntdd61bTCszU+AhzUVkCVETKKCUAkTMHOICAB7Sy1s9X4q/XfCXirzvGxGt6/r4cDoejyExkDlSlJkQEQYCJYrEKYYQDCDEaCC5llqrqhCRVDLwQVlARHvOFAMifgihSM05byWvpThIr5nC6ciMyBE4bGa7qIUUl9X2jMRMHGNc15U9yKUN5QUmPTdI9C2pvCEn6mHat45T23KaysnADR3HGhAd1mEIIXBEI9VmjlqPjvnFzKxWQZ2M5qnMDLTFXK9alklBsJmMzWwBROtjjWaC92+8bWMIfCLyTorRoH9H6vam1HPmjhEnHtT+dhm7/DEEQEKiFo0a6nuUA93yKfYiIkAkA3V0YkeLGI+HZB8+fIA///Nf/Nmvf/WrX50/fkY05ogGodazGQOaRWEDqTnn/bK9vH4+n8+HxydeDpSWXPV1z6973fdca2XAFOLpcBBDg1DVNiyIyKbMrEC5lpfXy6eX548vr+ftktZlXdfju4fD01NIySF+Tc2RvQ2JgYHMIHAo1SyEuO/bdnnd94tpBiuBnkJgb9tm5qpa21iX1OrpzRoop784E6SINX9BfHVitdGMemvY3Rlh3dnoY9N+wPWad/96o25NodvKb3oO26E2Kutuvveq1El4dke03ct6fHBcc/Av9HzyeM47RfOWhsdnug4Qvzp7d+fP38x2sPamLAAQ0YYf3nsZzKyU4kOG75hunDYrC/+v194PThlvNIugeek8oOO/mL8vRVKKACiSL5dLSuvPfvbTf/2v//W/+/j/yJdcqyIyMnhDGQAgmIKpoaojxKqKVRXseeUQAhHE0BTxcq6RWErdcym4lRA3RkZaQuSAPpmaiMxkL3nf90OpIlJELND6sB6PBwycS9n3vYKAdw1wCMyOgqtWEciADEC0gXqIGlGgVkDF2sZiMREdj8dv/uSnh6cnMIVam+fGPEUoOta3mgFQx3y8boEBA/pARZdlnYScIczMR726HTVodYBHXpU9AWi5mZs3CAbpKszhi8dEnPN/3Ym4cqLzLPTnG/zSgteDgFtoRqa5i35B7gAQ3W90la/6JfjQt//EHstwoV2ljtecpYp173eIHf/njCbtQXZEdPOmX+eGhWddQ0Q+kcjv0gVRc3n8m2Bm7mV6oQ4AAF2NMO3outPv7z/P/Ea92P16V6sKsbleDWFfAPq8B0AKmFKIJZpZiK1F0CbwiRhjiBRCKFVErhEjf4zD4eA5U38G7X3SKaWm9QERsXq0TBVtLIchKECbisaBzMRuX7AaEFHtznRcEjMj2l4EiImlNgiD1p9digihNgfAilQwx772OpPGL6pajU3FCmXly5bNLB7Sx5dPv/mHf/zmw7tHZFWhwAsHRKx7bvehQCRdkA3141EEF9Bze7pjqbtRdQPr2lxlZaYGiAJgqjBGZmSzLZd9y+d9O5+382U/Ho+Pj+/SujBFjs/Pz88+v55Unp+fG9swffjw4euvv+4phcq0Jw6gJlJSCIfDgTEwF8QXMRURI1jXdTms+/lS7fJ62SkG719FxBB0WaGUAlpNVGJRjYTgjr2/OcfAKeRSs1Qz23WPsA6veMtaRI1YgURRwAFDzDw3BybgU9TFOg5nEwZDMoxP7cNV+zIgIjBRRFhiJBCHNI7MjCC4aXE0bwigSkPdWo94emEiEhEjIRIH0j6h3t4k9G2KkHXldzUvfOOuMgWv6b5JvMIPHfYleTo+3ElhmyJYd38dwn1UN0EXgqbe1XtFZfTIYpvvNLmCAWl3UJkqhiCEpZTSQ05MkeDaDH33/H53VfVYiUslcCB7T5kCA9Wc6/PLy+tli3y0hhhHRcWkmBkgO2ayEAAoMXigUAwYKS7LejymZVXVnDcvx88516qBWA0cptz7YliNzUJckETAMGdEVfSko5hZ5BBT3XcDk4eH47vHh9NpSQs5mnU1BVBDA4YAJELMzJE40IopRCqlVFVFUwQ1zfsFAKypT+XKSVNKaffWQS77vud1BwARr2VaOUaOoRjkUrYqFgIShrgIIlMAgMBMBoysBHyNDN7kAd6aX+M0YmYkUkEw7yANwetEaIQRCSORj5dH6vBLNiUlPGYHt6pORKCPm6qNxNSBo2ut3KPZI1THzLWKASIyUfAcY4+VNlzgJj8drAWIQrq1MAAAzSClpKqel+1WUEt3zNzaCfDa7zQ0ZteSV1TtWbEOFT5+1Yz7XgsN4CM0uk8yLfgwuBHRlKwhurOHX6EhuCL0vE0p1XOMT09P/+Z//B9ez8+/Oj/vNUdVolCkIiLHBZDVcM/l9fW8vZ5LKQaEMUGIFbiYCgYMEaqUvW617EWLaMly2fMSU+KAACiVmI1wz/Xz68vreasqECKlhZeV0qLExVRNjZAR4nIQEVGoPspeW18gp5hMpWaPhtRazJQJvDyvqvqHEEIuJcSYcz6cjo+Pj+/fv394eFAEqxXVB//dSLBZkuAtMAwiQtvu666NnwEAduyWmUe+eNCUG7xerUn1ezFNgFVncGkAAGxgjNeiOGhJA1VV5pv5YzOHfvF5cEq+jVlzIgLosB3qdfiuNe/GHr41f+cvR9hovp0OmMduvo7WKTelmFn1mpDwBLt9ycEeJ8w9zDCZ1PML4hRbubuI/wQI4/FQtk1VQyTVuq7rX/3VX/31r37997/5+33fj8cHZt5ydelEymYoIpWg9YaZQ2eCt/ErtByVJ3uWSJEYEVQERPO2ZxOrsqTgcn1U2Hnnc0E9HA7LmozJUWe2Wj6/vgh6up4CBeCg7NExU/ChaewxN+0h0xgTEyOQR4jMgJkA6Onp9Pj4CExWBAy0VgMLIeY+h9BLP7mRJiBf5/tN1T66Ho9jO+3W/2mizHrMvW29V770LejTsGZHFHpQ5kolb60OuY58GBQ+RyjMruJOVUMDW28RsqtDqFM+f/IMHagYelIL1X9kpnVcU6v0u98MYxwnzEbU/C5EhFOKb35T/zzqqGePbDiKzA6FrdoztDB5gK5eO+DLEGvqoAwppaYp+qr6xQNTRMSq4nAvIuo620tDx+vNrNUeugNK+bblnMezAgBe07GKPcIAAJ42bNWXJoAWooPLYOuanuzgeZlGO/5MKNh7AEb8bJTgj+zzrFPHoao9Bt2IL8ZogiNc125qYPHarD+JrWuNaOtfcmzuPkRIAMhMRclrsYGJAoD253P9TYrkM6qlmgIiL9/+8ePzy+X9jx60nh3/mcGhU1HBkGhMvDAzQABTASSrAJG5jXtkuulGU9VmSwGAGVRT1RgWo1qBxsL1pAEKWCl62etl23NRNUxpfXj8cHp4YOa4LsvxeDgcL5fL58+f91KNoxpWM6j48Xkr+j0zHw4HrYImjK19NjGvmyowIroTIFLNJKTXlNL5fCYiEUMmNdvzBoQhLfu+n8/nyEiAKVDmTAhkkFJQBECKTBgYiTVDKQVQ0KphA3kXkaomBmpUAcSw9mUhMwLLKABgYDf6qhM5WZu/CTeOoVuFiGRMHJgTU4xEio5QjKaBCUBNK/NiZohX6SAiBVrNvfPPnRDxqAr07ginOu1Ihi1v34PLRYuqmrklZEQU+uCHhojsPmG/uP6AU3jH3T90jL/Op90pgPHluO84396YDv6B3twTEVtjan+Fzv7Dym+CnomZWfpyDTdj/NOZtLkgITKaVUUqey2vr68pYAxA0CoSZ9vaxY7UqlYDcZFqZpgSELnMFBGKIa4LA27bpqqcCAFFdV1XjhgXTaVetgx4Oe9Zaw1IyIEMBKmU0ubSGtRSUoA1xcMhrksMrivAZQmAQ/UCc6gcOWrw0I8CCGhMIS2x1rrvl4u3Kw/9ChYgtoHtISBizvu+74h4uVxeX18fH9KRCFVzlT0XAfCQMsXkM/g8vqZVUggBCfVm34cOG0bkTADQumsBbrnLzGp1ee5o1UboPUpmaHdnqqrqtf8NXMVqVQUplZLH2UxNW8TtlhSdbIb6qEUpNLvNHXLwwgpAL57pGqFFT0Zo5o6G3eefia1JFbq6ysMUHtp6rNiVX8i6heLX7/wF0Dpchk+Caqa12nU2NDSF3p/qZl9ah1q7YdM4rZC2mxoeVhsmBSL+t//dn39++ZRz/t3vfrdvNZp37oOZVVMrcr7sz59ft20jhhhTUdtKNapVzZgpJaxmXInY1M573S6fPr+GQ1pSCEwUCYmCIuw5v5zPey0hLsu6PLx/vyzLsi7EjAzMAQ2QWhVAqVrUtr1spbiXUktR05AiQQIVRKyStw15Wd/aKn4sy3I8HmOM1dTUEBFELLZqoDu6NTNmHnaDTqH6O3ENXR5Bd8lgbHH31uZ9mY1au3Ms30TlXDjMQ2Xbfbscb8nxKQFiZibq5tb8UmSAo3P06pLdxCP8w5yRAL4ykZn1yo8rrAVcAxlfyHLMRQTj/PkBhmUMkx08+OVOGpuZwy/dLRFMfYnzT+7ea1zfeg8hkuFYwfZGDFKcO9Z1LVlKKe/fv//v//IvXz+/nvcMDU9RhlBTn8BpYhZjAiCk4Fi87kOrqjXn1uiUjVNaUooxESCYaKm17FbFDCpWaz45pcMCsDzGdDidKPDLdnm97BfRgpbVFAGJOUQICX34OKp5mYMLE8WKagZiqmCRXUxhK8ZHUwEBS4eVmUEVmSEQdCCTsQU4iu8AHVhh3sd5+2BiIhgRCoNx/rUDFoCQAIGb0LdejwWtKmOQeqvpUK0tc4jX6/f/63s3HmnmvjmlbGZVS+cg7w6ETkXUOn5tXBrAjLj3MFgTBNe3a9t/pfYRcbg5YcTmbh9ypt7ZdRrcNygW+pSj8XauuVTVC/i193C6KTiT/axurrTRL+XSRXv5pIiE4S9NPH/rRE26fCgPVz9DC7bv7ToHYzxT41EGMxxR2HEpEaEgiMiAVUutxq0B97qUpRRRCCFAx1FtP/QmkL3kWkop2DFpEL3rvR+3niQiwhTkHj4hdsEwv+ys2sfKMHMI1qal94wcITFHZq61Wp/Pg9waOnsdETowjAXzfD0ihhBDXERLqbCuh/P5ebtk5ggVVFWKutWo5tgJbd2qCTfU3h4grzWlEJndjB4F92NzB5k6aAOy9il75kVEOZdt28BCVdn3/XLZRJQ5nB4ORHw6nR4eHhXBQw4InOKKFF5ezg+P5GDyyFRrveRsWYoRqKGaaQGpCBA5vF6qhIUAQ2BiKOWy7edaCwOq1YeHh6fH94fjaduzwi4Kl21zyPgl8hLTYYm11hgYQ6CYQFVUAZGQmY2Zi9SFGEBFGkyOz2zfRQtAFa3aM+GmCEpzyAMMZ+HV5SCAsudbWrehS5MmLJkwhRgDoDeGkrUZCQTUB+6pthk7TpY5i2DTbWIG02QUVTUacHYOyWieJJ8kUuM+PzojaDOsKZBpbcXeOJCfZmEEXzoGedzJ9FHJ88/85It/ajEIVeoOg9YrOk77+WwA3YoOItI+ZJynMsIwAMRdrk0dLO156PpPEalGAqb9GRF9JCogUq310/NnJH3/cGICVaWA7IPX7Vr7UU1BtQKQmrFFiyLycj67bD4eklljI0djQiS/BxFFYgUsVZkjYhkBGlAjwMjBxzgj2tPDkQOcDuthTSEiozhemRL0eA6ZKSA6+N7r6yvFwEaWDREPh0UsAllteb/m2zC3GBbHsCxL6FgFzFzK/vz86dtv6fHd03o47QAVKa6rxHiRqhxEBAIDQBzT2bUihrtNHB9m4hlfegGzApoI1CoSa60VHM17apid4nXNwvBWwF7RgNg7p12/Apihy/yeDIfxW5fSJleI2qFHoQdcXSmMe+Gw5idq1I74Pz8b9q4Bl+3uno3vvWt/eA5DGc+fZ/pE0lERMBYTp5DtWMmxXKPLYz7TzBBvxqP3Z+IeSyEAbTLGY+utIyPM9z0cDv/m3/wPRPT/+n/+z7/9+99Vc4BWqqhoVErZzufLnkVlDUuKq1KoClarmQOyoRFTTAHJRGvOuVbZiwpkzgCQHNgcrIjtpQgYM3FaYlo5MBCKqYo1Q6dUEXNE7r3KtpdLztWHXZkRYSBc15VQI3u52Q4hDgHCzAOcYzmsDw8PDw8PnDrYW4wAkCW7FTALvbG80N0SxGtp2djrq1XXUXaxT5u0Uag5+2m3nsn4PPrxQGRAQtitxJ7FLLXyOz8Hbyi5H8OgHFezifAmYmun3RldM0fPpOhqDoG0r4y7lNZjGXf3HbcbzzY/pz/UfIL02WDwRhnNimym2HGm9cDNOHPs0durATQtbrMOAiDCy+Xi/QIAwMyXy05E//2/+lf/+IdvP7+eVaAUzwdwrdUQa2tJIFUFjI1wWr8DkgtwU6mmWvIupkpqGOKSwpoWXg8gh/2yIflsdEgpeYGbqq4hGEKWWqRe8la2C6SAMRgahoQUkYKYl/iBofO4iYD3EBqIS0QAEkBCxzW0URzRlrRWiAFcfsZYamk9hAYDdcWJYJQ0ExGQW16ektHGL9bbVKwP7uyybYKwAOg8h4Ms8Sq4rhm5iYoGTcLEHYwE3v81CXA3eVu7rzVwELOeh+qEQd3BRDUOAayPQJxod5b/1L1H66Asfv7sLs10OEju7pnH9yIypkPPvDbES0cz+UIlaotFwk2oEW51B9w2qxMR9njrvu/te7xBYQiq6i372OFTx6JNa8fdEQrNuTI0uqYvzZuADUIIIleAZhsOocdZJ+aMkV2Gch/+M/MzNH3ZHwOMiMLUH+j62OzGHPS+QW8+WJbFXVkvo/PTvPFmLK4ZuLIUkZyz1uIFId6N7ZRlvWtl7DH1ih3rlaJVjQko3JTUMsX2di18ambennnTVEqB47Lue7nk/fT4eM5y2XMpJSGrllH60nJqyABtwiFdTRBQtSKaUmB2h7o1TyIoUbyj0cakRKqdQQBUddvL+bKbllrr5XJxPPT1cHx4eEgphZAent77dXLOIoJMx+MxrQ9E9HreSikhxVIK0kZEl9czIoJDF1dhxIoZEfm0ihTddoNayl72i2omotPxENJ6enpkDoj8+fPn8+v26dOnd++WC6JIJKJatIaKYNyHelURZMIYMTApB0kQdhVQKUhtTqSq1qoaTMHhZ7q1BMh4owzsthgG0RDBu7UREdUAsXQLwEAQgwc7YghYa5vQBVpr9UrlyJQvxUQZVdHlUqPWWisRIodePcpeql1vI6/QLRu8PYjQriWU11g2IoKBiFgr2m+kNsXVvuwQzopz/uc0wPRezX/xOv2WnZe7MPUcHfbCJO1JziFtZ5vJhi0OqpFtOlwKMbNKnXiqx7MQfE6Udeu/1ipSWrdhk11aVHItW94/f64IsgYOvHgVvqN4AUCIxA3kYCGILkARfRqe7fsOAACUIm3b5pNsTLTmgkgxxvxcgAIQF9Fa+wRUMxYtSEICnq8xIQMk+/DhEUyPp3WJgckAFdSRL9ncrQLxTigXQbXW42ENIez7Xms1hBSTmRHCtm05Z3PKCeg1CiNQ6oo2RnZP5g/f/lOu5fhUIcR4eliWQ01pq1JKEcKYKHBMKQUiUJNcYAkzfd6R053mczmDE+UM2uC7gwIim4FMNO5+vCGoKI8Rnv1GzAERc93tVtFitzirtfIYxauNSEQhJAdRa0qXGIxAb5/fevVpr1Wb3xER13Wd32j8la699DYzgk3e4HD/RIT4PtMyq7N53fy/o1fCr0PMTthmLdHR16BJDGtl5A25pwe0cV1XH0U1vBEiIqPL5eXnP/95rfVv/tNv/vCP/1RKLaXkXFOIIial7jmbWQgprctyWO1woD6DR9z7Z0rLQUUIG6QjigFBEau1vuzPhoDAGFiJOcaQlhgXYDJiQ8JAAQHJpFQzFNFaa6m65/183p7P51wKABwOh8CEEZcUD+tyiIH9V1Pk19cw52xme8lfffWVT/hoTpeq1UqxGRU09VojInHH40b07Dyomgh2R3Emdevd+9gpxybLBKaM1ni2O6cRuy0RU8DJXbzjrPZ5ek5or6KD3ca7D3ZQVZp0gN2YdldW7XTbZL7HTeQ274fdfhWRrsxGEabd3Xf+4VsTuZuIHTWk179QH8dCdF1e7MVZPt7gjt+xWzjS53zOEZlxvk4lo1Xy24v4W2/bdnh4ALH9sjNFB4r/xc9//s033/zDP/zu08fn1n9IpGbBA/QqJOLNBSEEr1hDszmdMrhMVS+XS8a9LhEPdlhTDCE+PCAoem4wJSLKZS85l8tly7un8tbjSfdtNyWDtB4wBo7JCKt3yKuYWUztTQfUKoepateoRzeuEShv3nFMFwoBQrCSPYXejB9PF4mq+ny8vilvxvCMBR/EWaUGAOtSzrq3Y7W6z2yDTfxD9+tm8jYzDlMJ9MxQ4m1u5HzqL++OHNk1QjGsDp7Rg0c/PPQcTk+KjhtJNzOIiPgauQMw0w4QQtjnIIOXmnVmuQEJG1/O0p74hgjvSAUne0lESik+gntwEFObQDi+HDlA/6Gfrx2kE9HcexoaBKndyN20YHaJLAGhVggQo8YEgZM9VzXMCCkhVqlmiLQYkCGtx5OI7PtmaMAIjERYioYYUkr7bmYMGgnATBCt7LXsBIZkCcysAVhboKhmda8ABJZIgSF47bOaRg6BOF/OZnZcVlUV2Zk5BUY1FAGrAUwoklniFZgYGA1TWGqtpIRCFIjA/TFAxD0Xo1KtAFtwqaGiCNkMu1YlQgBSFDMDRlGyKjHyYVnU6r7vVgubWlEFDMaJk9SKGMiSFrKKVhCFAYkwMEd3UxUBEFM6EFJRIMXIqwLE7bmcv9tNOYV/vHz71bvlsn1i+zHIZgGey+cXveAai+xY9hMd99pjYECgrZJI0I6HRRVqUWIWAx9hF+IyJCIRhBDIoFbNVfJ2BgreJKxac6lb3jfZwWH9bQ+R3r9/fHp6cqDUwxoRckoxS7W6p6OSWoJQX85SDWIpIHu+QKkRTASlFjUoVS+5qELgSIHN0ORbM/HCMwRjxBDXEGhZD4fDcQmR0N49pkAHk5f99SIlCiqylSKfzucs9Xg8ChHWmpZoSHvdUbPLtWUhFqugYVmysTA9n881hnRgA841G1vEcBbZlRQomy2a2qxtQkMVUJ9Oo1bVkAAJMBEGJA4EAKsaqJgqiiYtB7AT08oYI/v8CkIgBogoihthDFjB1LQQM0AECIGXlFAyYyYSqdlAlW0Xo8MqFyEzUGGUBWrUnHMt+xbDKoBZQtGoEkCqFQkKidat5FKyMdABi9RaRNkCppbhNHBciUoVAPi+Nvaqru5MHJgsjLtjtk5mEQYAFawakHkXO5lUARFTI0wpsamAcQwYHOBRzAxQ0xK2nIlIRJZlUVUpGSkwo3EQJDUTRCbatk9EBUxAoBTbshyWWFWKZmRTkaoKgIEPidcU11I/WZvF5VYaG3KVanRUgn/8fH5WrWs42FIqlVLe6XMIJCK6l3BIAUMGc9dKFM4ipGEJayAFq2sKuOy7iQEW1L1KyVoEVHC7VKkIgBwDYZsCDACgl5CiByeXw1prVtWH0wFDOK7r6XgkJDIKyMEAKkjAkncvyC8l7/uOAZdlefrwPufMjE/vjq+vr2AlxRhDOp7WbUvbtolUJmIEBmQtB1twz5AihnAGLSHoT75e352+f7XfIx7S6d3TezmsFzWqvKS1bhdGCWVHo1pzQRIOwolQh/rxuQx4W1Hirki39nSxihxUFOOiwFkjhBOGgmS11rqfySSwGahRQOKouUA9BHi9nIEWKRI4IBEQihRVI/Yx9AQGKBA5oGAIIRKR7poLVSKxYAyGvJ4YQ9lKrqgGj0/vXz8utXw0egE6c7C82/LwpEWzfKJQGBFwQVjMSKkalZgWG727oh4lQqKiBXqp89D9iAgQtA2KazmT5m80R3EYB55tIg/QqqqKQ0UiMQd2bOqbuO+UITEwUMFugYMZjJ6xzrmtxQUJexWrNz+7kWdFBQOLqtSMiMimIGJ1Ob37w3efTk9f/+xf/Nn/9u9/VYv5aKhioABVNYsA0XI4pOOJUiIUJhIArWqgqAhIQFKLIqI59JdodqPEQNPXjEAEzLYEPBzj0+l4PMX3x4hoPhNGRHKRWqsZfvcsqrpt5+eXT5fLq5mGSJFtRQpIh3h4Oi3H45HdchIFKEXFDJEIKFW0TaECPp0eJS70cMqIZrIsS62VlwSSXUGWqYOOmE3lasBZ8y1VFZXGXuCU5UiLhxjEzIgRvaujFrFrOGDYfGpK7KD16vuJiMRIHK6Wbt9N8c5nvnaVS6uDIEQEEQL0YRjXB2Ke7WBENGqmNoMLIunzBh2RoiCTijJzTJEt7Puea2Fm5BiYtRpaZiRTERUMaWBDtihbo3DumSTtpv5wAB3gRKsYqo/jAkBDiCF43bIBoH/uJD+6dfxSTBREtnklxx6NGP3YqTmjiJOP3aXT7AleFV+Wz3Gl7fJClogXVWIiEXm97P/23/7bnPO/+3f/jlg5JB9giCYqVVuw115rjS3xYJEZ0ZCE2AJrtb2cL98WYOaIgRTSljbVdYuMHJlD4BhjMNS651pEipguyBZS0fKybZw4nNYjY1GJiRRU6sUMTJUUU3dpRKSqIhmHQMGAUEEVPUKsouiABcSBAHULQRPTUq1gBMUMW2VDKxooAiKoVsliVX2cqUXkABw9ntI8cGITAyMcGb/umZBV7z8HZkBP5hRVjYeDQzXYqBNBMLMYotUyyjqwIvkFFW/a/HxQEBEEv4b4aBS3S8hFoM/wHB6SO3VIVxxRAAeU8A4LBABGn10uvVAxIAEiMUEIYGbTVGSnnAHo0trH9JrZG6cBeKrsWtnhLhkRWa/5JyT0Hh8yRJxrW5gRQAHUs83uufk7eUMHTC0/MGUF0YVXK167Vi7EuAwQGjMjbOi+qnbVItpgUYoDPM7aZXi61JsUa71iAamqT0X0iv+rDOrMScDMxBTIANrkUytlYybkK/yJqhKaAVjPQZmZ5OxwcK7SeqoEJne6h5263nUYcRg5wxvvGbiFVNvkDJq6jLg13qhWkZYAv3abFPMag56q9viZY5BaJQueP0fpPWB9Vf2pSmmPEdmXvoE7E4ZAkSkTMigU0ddL+fhyfiI1k9dSX2uJi7cgghm2pUYspRBCjDFG9qZFVVVUpIa2PCSxk4XzHRGnFEII5/MFzLxLTtU8/LDve9n24/F4fDgxBp90SQSHHgautZZa5mjf6XTa9x0LhWBApVYpJVcFkeJXJgNDa+l5hEvOiMjQ8q7HNa1pSYEfjiszXy4XH0D/9PREREtYPn4+55xFyhPisixuGQMcmb1YwESFiKp5mxBUyVUUIlUE7dFHRAwcYrSKaopcKokMZdZ4aZrOMHbZOpowEQUOjFSlgICDUhFeE/QOnAFgnnshDIj5Spxwk3ZrtAGAHQZGVUVMnJn7VNPaBW5KCeA8FN4ge0DMOVepqmogZtjieKohWEMGRvUYX/vdfylD+F/88p8/rip24kr/RlVNqjfGMbNjHcoMZXwfSLvZjrfKG69NXHbV8U0ltdOYKLSQcS9VwBaNVoBa68fvP1exw+HAkVJK75KPNDBtIT9XVWBmgBaQYozLsjCKVAOAEJIqqJiIlSKvr9t2qbnox48vtTjiK4cQkiOREhHlVBIirsdDSmuMsdZKHJe0ImLJVQl9yJ45SBK2CGWtdd/3UkpMjGYxhjUmiuRFSmZGhGZE4LeLANGBlEkMzIooleKleFIiiqTD4d2yxlOotTLHZVmoJ95JKcSovRyjGVj8BXr4L1JIX8Kb4h9D8GG4NuKpYCqC9kPk2X84ohVtHLZnP2gkiccJLrQNZFjkvoCIB+jUqL02pO7VB9KqGoDDToEXPM2K9o5/m2CdctojejKYdBw6qPM2c4K90Zf5ar++vem4Nb3paRzaef7m7udDI99dbfqnjVuHEAKnb7758U9+8uPf/Obvct5E7OHw6NlFREwpnU6nw+EAAHvOzD4MrdcQAjqDXxeqdTybIEFYIjORIuhhjY9Ph9MhUQOqaB27Oec9Vx/H/HyBy+VyyZdctn2/oOmHD49fffXVw8PRVJjZ6+sCUa1VSKxW15XE7Jh6I1+UUkqx+x5qoAJ4BbG4iXzdAnr5CW57SHe0aGCQmtlU1tSWdKw/fGHZv0jbMGUOZ/oZKmYcNz+cnhNus3/zlae7X3nkzrS9o0z/1UhTY4/y+PVLKVfD6YrKziI3KNY3ohuI6Mo483Ld6bVZyGsv2/b04J2OuCNmnIJT2O29ccF5iXgag3G7Fd4i2BQY9Hz+uq5m8rOf/fRf/It/8Zvf/F0VYwo55+SlFjaQFHE8cPAUDl1RJWOMCQmAvFoFajGzgpV8DmHlpBI1Alk1VcemYQYEUiNTZTAAZGLGLeeOR+FJvCbQtIjfi9ht1Kbxu4cMMGSRqRGVUs7nrZRCC6qpmjqRR09dmYGHSADFq4s9F2cCYEjE3Q5pcEpGN6m2wVZm0GuCmj9TKxACEVID3lAXvlOJZtsvN8OYoefbzQxVwQBECumQwyICLbFGRDRGFF59TjPi+87e8ZqNVod/6mpCempx6JQegBhkNsS1qhKF+flnNrTxmtN/75jUrwId7utODtyLKYA5ndlZo+kI/8bRd2eWISIinjWIXcupJAyuFropuRl9nWNpqA9dGEJh3ollWdTqCIQPp0hVPdXKzGTX/c45pxSZEAmtT4xo2m5ksQG8+cf6XGN/JEVQBL4OYkWjK1sPBQBTYyQMnQ2svRqNiBjBrC09NuF7DXqZWYzBjSQ0cJOIiNTQtY6OKdt0ZYBupF6L+5kxxjiqHWb5yxyZI4kAsQns1b7/fPnd7z/Sj05Z5DXnXe0Uw8qLpy7NjWmiUooapJQoBuiFHdWUoc1q9+J+tQqoyG1FBIxBiSgs0RRq1VxyrpL3csmXUnYMjIGXZUkpxbh4Ubu7nR7BYMAYQqQmZ1kMiYirKqAj2ZgVtZyzeZwUkYGZmQgN4UirAzEjwLKkp9PD6bgGwhQoRU7M6MhOgHWt+XQMW8k5b68bIh4OByPwiWGlFAzYNCghAIjVNvQMwUUCqNNtYLbadDoH0BBCUi2irR4SVQEIWqMIIiIZt0YbA1AfY+Fz4AjMyBQFEdytnWvfPa7gwqILhRs8XgFTNxEm1PirsBBv/Bsw+t5DGBwSCq5llVf72qC1RQWEKsAGpBpsuIJ3RwdfenPcqcwvfp6PL2YO2xM5H02kDs1SBFXVCckJur9xjbjfKPsbkYpX9+/6qP0bt8XdVrYpJnhTL9ScT0IzU0DmAESXc97++JHT8/F4fPfunQUEIJ9oUKu4tOPRmNiexEIgwhiZCNlAiSDFmMlyrh8/frps9Y/fnkMAREbgGKMJuRe0rMicETHuZS8CADlviPh4OmgVQvMkuZYqpQLArruI7PmSS2Hm4/F4ejjknN+9e1rXlGLigI/HQyml1AwAYsl6oz4iIiiREXiwGsTAZ+vZZV9CiOthOR70cgFvLSEfGQzAFICqWTUVqXstRSqxGo72Jd9qArgjj3uqUFWa6tVmNTmLQRERA0BNKX6Zqm4VqnVP0z1DuNVKbnJgSzu00IQHLokIiZdliTES9UwFaAgBSBCto240M2c2iGdoHKdDekNpo8TaeX9QTTea7701b7y/e1NthaNXohusgZN7MC/gP3PMBsowKUZouX3TO0kCmntSf/Inf/KLX/zi7//+tzlnRFKriA4EC8wUAptpzvn18kpEQAEA3KnV7gK1BxM1VCIiphCoFlySl33D4bCcTmsgy3l7eckisu/7tm2lKgCIWK11LyHvl3y5bPliZg+Py1dfffXTb34SAuWyS5+yNWijtJJYRENVBQGs1QjB5HBclmVhAhVQrYiodjP+cVxk9u3HsvtpNtHtD8nMgVQx78Lbk3GKJkz64iZk8Pb6d9eZfzIuMjs8888H2dAEXNFslR4RMLsJgogId0CLcalh2Iyfz3zdnwdn4dBc6CnuYD52iMIX33cY3HcREGa6u5ffrjfEMk5Gs/Ve36E1Bi+MHb9/bKZWfUAALgxIyYAI1nX95S9/+enT88ePn//w+++QmKhZWWM9q2qtSCQcMLTmPQAgBA6caOFes21mbMTE0Yiq6KaV0UQwg3qxkutQ1opMFsgADbWAalVDKFIBwAjZAhEZqICaStl3CiGOrntAaHM1midg6rhS4v5VKeVv//ZvhfRnv/wZByp7sYiEbIQmBqAIBkyOJqNavY3wdjHBXWgA8Ax5pzkCAIrkYQDwhg7nGiITAQNUHXAI1D3DdsUmYDv1Wtc8V5kIDqs1zC1sizuUwj3lA4DeYyI074BDc+QmpAif13oNr8wkOrM/Tj1Z8AZ3apDcUE93onh4KGNVh+ukrSiXWv3xZCONZZl/C5Mo8H/mfC2xhm6j3hmBflm3xMKtEm0T/xxrVRzWoucW/K08UT4c5XEPERloMeN7cidgyiWKCIDB3MrVNZ/f2kV8DAszo0Gt7f1FHbbeAw3GzNoGPFx3vUmNHocYQlyYsCVOFAMNh9BJR6p5nwkjAV69QeqzqhBRFQGAMBgZYUAHg7rd8nl9x047vRLRqP29eoxkABA0gKEqgBAQV9FP5/q3v/9+iWkJ5Zyrki4PjLwwmCmGtCA7hg17Wg8AAjrozqRpCEG8adX87t7/6jLLTGKMtUjV/bJdtr2YYQjh4eEhrYtP2MHAHgP2LmXm1IgshABhtF5YFaRAtF0uFyI6Ho9A+HK+hNgQOptMbOOjkdbTukTn4TXGp8fT4XAgMDQ5HpYlRJVayl5LReB1XZ/ewev5+XJ5/fT8eVmWZY0xxr1kALCCAGAgDAYADjYQA9GIQilVU1E1orznKuAQJO7amZkg+IicRofY8n6E3vgEZEpt3Hwb/ktEoBYCGgKbY9U2wRqAAIwZRcFnwYkIQAscdJZDERT29RsRM3AfEG4dLZxqk4gI+pTOIfLQRwaBkQGaAjalCGYwGnYBFMQMrLVJwT9/zJT8/8cxxEIXr1e7BxGJGRGwXkdOwcQ+s9DEll20mbmw4/QQ0Ri8O6TNbCrBJFh7rLNfHBAMzQwIiaNxVgQVPedCly0fYoTAQGZSpGKzzjUy5mpVJedca104pMAxsQvYGIIKnkn2rTw/v55fRQRi5MAJkVNalmUxAxNDDg5CsJfz5bwrmNRsZs+HVUsltDUtBJq33aow86U8lwL7DiHC118fHk6nFMO+bVuMKbJoCRgPh2OIDBdj5mpJtOZSulWHGEIKUauPpyQkBOJS1Uo1rkYqpui5aryC7RK1GQUeLiciZEIgGaV0wDiFQvoe3RPPuOC8xaoa19gmXzdtamr4zxOnzUenU5d1I/QxmCUEEjFUGUUlrt1qrWnBdfWwFPlwEQYE3KHFINrlB5DvkOez4esTa+cH6+94w7/zcbc+7ZsxOWa82cT4MKn26RbXpqCZ7G2y2sdvb9bs+j244mhy8saecC2/PTwev/nJj5clSi4xJlNblrD4sB8199y2bcu1IKIhMgeOgVCth58BkEwVFAB6yzOZCTHHGGMAYt32V8n7tp9Nqkc5SxEKfFhPy0I5V1ENxxOYqOzH4+EX/+Ln33zzdUrBQFS8a5sc98bjth5YIApiClVN2KB4EOp0OEYmUAMvwwRQ0TFgejbaACDECFMg/062XBd82prpD9cvsf8Tu+g1a3PBryaEwUhZkINbgKm1ymPo17l7BucW73V8a37M2zoTwIgy2G1AAack5+wQzoQ322zDNJrXzVrHzQ0Fzo8B0OT2+BV0n3zcaC74xJ7lm/5pXexcnx8RO9yuhwWHUgCRbD17P3PNLc9eH5iAwAiRzZxO1FVnLhJCev/+/V/8xV/84fffvTzvL8/nFBfJZwBs7XA95khECVIBMUPGVlvn210jtlxYMKBgMRiiERdRRctaQEFBG9QmwrlKSokTI5OYyJ4VVMBiSojo80ja/BlDIChVHNamzTUm9HxMX0ZWs2rq8+J9Gf76V//pj58+hjX9+GffFDVQXAKrGHhrtQFoVfAmdk//t6grdqte+wwGxOsEb9fgRtZV+SB+BDNkBuhk04U2MUufD3mN/FqDRsUB/eVZSgAA4E7G7AkTj9l4ZruP+xjrDwA+6nyi7R6vH121t3J+2GNzYGKmnzv+Uikzn3a+uKK2jl/xNG5+pkCVa5nAEOlEjptzA6pkdg3Pzhw0c+4cQB9+ptm1Ox0nVBQRDaPIUzvCsyojAjN78FIdQqYRAdQ6xgCOmE0Dm/HBmn5XmTO/XpgBan2SKSKmlLxu1a884rtmXdu5427RTMTQVK3FBAypY/tWAr2KKhed0rEZva3C/TpArRM8AHmsk4iu80YCI/kgFjMjDCFQCEFv5SBhAPC3MICWmpiamK9DVCYqGc6AjB1Vq2YO14no1WGkMR6M7WXff/9xi/HjGnWXc1rs+KSnlcwM1OISVQ1IU0q11lKyqsISE5E7zNJRnRyRJ/SpksAKHZJRFb0QyGfdiAjFcFqPKaVdBBrsRIxrZGjYP1fSBIRW1dYobF1X1+VVgZmLVK2fvTjESInI4VKaWOe0LMsSAjOvSzwej0sMjMa8xkBo5giZyLSu67JGIaZIl8vl+fn1+0/fPcjDuix/NDkej8nbh82Hm1v14YrFihmzVKMimnMppVSIfioieUdY5IAGGbND6jialllD8fYQBCMxEQOyhwtFzcBBQQkRgNnQK9kVYC9ZjUJgAKBQQiBTHNOfXVACQMd4ADAyLzx1Op/ggO3WL/LAuSfYbWRCsLn3iGRWRUoppapVUd9AQuxQme4Hvs0W3hx3Mm7+8p//4d0x8WITr/4h5xzNG3eRSsuizNJq/ATavLjaDfQbg3sqUip2PfStQPS18vq3jtRPyMQUMLCJVrFcFZApRAPIAs/nfX/AZbHIwaz7Bu1igGiIZiognvanSCyKAGxGtbqvKGDELIcFAwcAkFKFGBdMMRKgBjSzGD06VgiJ46qqTdsiq4JUy9lxHShxPB34cDg8PT395Cc//vDhQ87b9x+/q1ZVq1ZUJjBlpEBMCNUAHI/Ta06uth2KWLGcQmAKcV1TOsa0FCYOAZGBWpagqlopnKKZAXHA4PUCzKwDrdUGBseNtTc28yozHQsagQEEzMOw6il5gAEHbeYGQfihiMWdInR08PFlq/CcKBg9MoJsZsVBEzCEFPd9D1FTCtAbOUSEEasImJjbC8ZgAuRc024x84KZT7mwORfdSfdqdHqW0om8l4fcc9M1N3XT+HFtnbpbhGHazkkP6I7rfPIX/zm+HECOc3TSzKQKgORc1/X4ox+9f3p62l437z9ZlmWNKUY+v7yWsnsJrvpUJAPEjG0ofAQmMwvoXQYZzFSgF5VE0S2XYoa5qEoxKR6aOx6P6/oIAMvheDqdVODl5QV12/d9DXx4/+Hrb77+k5/8eD2kbdscjXnUK3r5MSIjBYd3NoTaMuXGYDHGdU0AgCYMAOogsQodbnFs7hySuydmRJ0yxsPBszHGALGrEpt/+8VNvDthyM270AD8gBxu3xCB6h3xwC3a5yQnDRFGlT53EEEzG4Nq7m49fzm+d5vKOi9Pl4ee1Bnu2Y3b5gsM2EYoMbPU0Qd0VXzDcp1+1Q55g3rtPxwZUb3tDhzjnYZh7Ywjt/PKcWSJxcuAcQohmhmKFCIypa+//uZ/+p/+7aeP53//7/8PREb0yuFGNmTBezzN0EOHKUR3nlVB1YyDgSiZiJrWWhBEVSTGSErmPS+mjgklZovUKCVIRAYzq1aBAJgoRa8x0j5521EMQ0hefD4O7qDKwwgBNATwmlIE/vTp2+9fPp4+vKN1efrwLsUl150BvSMGUEWrqjbUdR+e3KirUQeAdbmtTeQTAPXizObFXYMHAK3E9EquTrG9jattencIAeaWl+nLBsxmzpVEhGrqWN/dCBn00+ikBVyu5OP/52tIvaoWe30fYyOYVhM7ATjNFIjXl7t3yQbjjCzReCrr5twIwbTrKIbE44d9J+9zjzCJJpr61KaH0TYU4Oq6O0HfoN1QB5VhB2obu+L9kY5F08I/0GPqoG5dMCeXBe7fzx16Lp0HY7e/Unur6TQKAQGBuZXbeQ9brRUh82mB1pcoXhftnoUieG+8ogEChWYX5txW3FldHENQREyR+FpCYK1ox09j5kjsOL/jHAT0NKTLlMZp146Oa/zMJQD2fAWLMOPQqWMjO4f48N/inlWbbdl30d1XULcJkSjsJi/F/vPvvl8XU3s9PvDTh/zuaGwgKmgdnTUhM5d6JXoXYqogIP0ZPLlVB25qG1TIIOJ9kkYBF4wUU4iEZPuWD8u6ruthOYYQTDGEkGIadKmqpibWBqFUscMhxBiPx2NYKgID4b5fQghV1cxL3pmI3C4P6cDMgdiz0IHQra4lLWaqoiFSigcA3bZt37IhcAyP796JmZS9SE0Y95KjRDRqcAwCiAaERFhqqWoQTBhEpNRaVI2B3DwHQsBFsTIjYsxBarXZ2vOxb2ZWxRgIicF724HQCMyDdy6JyP1BADNUsVawgMCFiBZEDCFEQWXfIwBHGQX2hk8EBfDpUICIDFhcn0GrhrfeW3jXO3GjmxGgR1Wtz8kwUDMeI+xctAMAoL2xSO+Pt9bA/6nD3OsehkUPa5VSSCWGzmtTcfWI/o5duLPJhnRTVZ8xwsw4JfMRcX6v8fCzPG12hrbIaYwRclHAYhiJDNnxeM57PhwOh0RAwVBpzMjxygXEGCMHdOMWAEoRHwW978UU379/fzw8idj333/yKcAiXlMApeScMwgjYloiBt4BArr3m1WE3OVUM4AlHWKMp8Ph8fHrx8fHp6eH4/F4Oh3WQ6q1nB6OLy+fY2TiNkqum2gipYJaA8Yw2/e9qmClNSUpomp7ldUgxCWmlWLcSjZCYq6qYqZmQFhUSlYDRIYQr3AmjvPxpUpRGOLxjnLmvXDu8HRNKaWUUqt185RhGij8ZizosC/1LgXnfALWOv+0w6mJoIiIigjUWksFsDjoIcboIUJVrVbB56rd6nhrmC0TYd9SFCLejSvA3k8ySI6meUV3nNuUPF9zMjgdQ4PYrS/alMh4yOmCb3bk+lTzpfyjT7IZ9jd2v1fE7WlDtA8fPvzkp9/80+9/v20bI6nWENbjcQWTy+UiaiFSqaBiPvnJ+3pSWt0KdzvfVFVrQFL/J+4VEdEIbYnheFofDh9C4FrKV199dTqdVCyEEJf1fD7nnE8HK/vlkNJXX331zU++TimUUhihVCFsysUnVfQhYgRIBj62RHq5UHg8nk6HA2hVMCYyFURF0zr10c1uttyU/raVnNNW9CYChejeYEu4DZofuzMMIXuTdbzubynwlsy+lNHyw8f8Ntk4HXdm6/g5UVCtnRLccndMDhzRjVEBCIAjoXdHUTT1E8JVYqPZXeSRRkjF/91f/Ir+PxbWJoMbJ6MWu9nqIujtyoDPkZ6O8dcYw2QHX/+q10jZze6ouq3sn6uqGgIQcQwKtu1b4tOPf/zNN9/89Ne//o1K31AFd4qasWlqZlpNQMgaNoU/RjGDViZuIlo0g6iIRcQQSMCqiIIQMjH6rhTJtNcGYMUQKaYYtNUyqGeSXZJwwEgLEblxRcTkEVhyYMUKACagqj7JKURCxHU9/uP3f/wP/+FXx6d3//Lx6XhaXl5eDykpI+NoVBEwZAKwqqajfNP6XHXvEXHiUZ+z5eWO3nw2koRtUw08Cezzgc2oS+0Yo3VJhYPgdSLjKSKAiEo2OjaZ2dyLyWVZljs2bBSI0NzUHzh+SJD63+bTZskwyHXWCDZVJo8s3ECLmSmQOkQL9DANIRtek2ojDtKdr37fL1hK7fOItI7w01tRNqsAf57QUzfNlOz+a7cs/SXJeozhCyVtzqVDwdzptlkm2lUwXbUUInrbFUAHzOn8Yx42p+vKAgFaL0VmHwTvAEOARDFGNBWRXIujKQ4BLKY9F9wWTlXJpx2Z91QoI3jctz25YtVKvWEaAMzQw5BEE4HeUNt1cRHR8YLmnSDqJfsefmA2KeRQNwBVBYGQuCBvl+cDQlXIQc65ZrMFERFzrQOiGgACRyQww2qK6n0TWj2X4qoLq1tL2LeguS0MXrgbQuBDjHExIBFZ1nR8OC5p9apHMEPD1uVlV0TblndSU1VHNI0xxhgFDOAhEG95b20qTITBDdaUUkwHbdPA0MwkFzDjGFULAHAwogCgtWiVvOeLUhCw0+MDEP7x2z9kqY9ExJxriZaA0QAB1YNUhEBMAIrsfRHklcFghMCIxsRkaNEiciCQEkqpCODd0ABgDRoLjCggMQCBmSghojH12K83lTWfzcx/5uJeQT23DECRGNHYSzlb6JQMSQEEzJHoGvYvAZExkUl3oq4sfS1XwMniHGw9ODEgALBaVb1z/L4MjPFfc/zQ+fYDIhVvbdDxmZnBh/PiVLoA1x6eQMRc8Y2ZNW43Imn45eL7G2F9zT3aTXhMVcnlNhFRQA7EphgFTCAg0ctlPz3Y0YAQDMGaRauISMwtwOJ7wYGZFYSgQeMcjsu7d++IuBY4nf5IRm6H1KL7vl8uuwlCYFUl8FSlrcuSUtj3nVz2iphZCrSu6+l0Oizrhw+HdV2XJTIzE6LBuq7rmo6HWGs2BCI0NQRIHHIeAhxHVl8MxFQMKUSsqgaipEhiUHLda04pGUCupUkDYkXbcwak4KVTvv4itSre9ib1BefZjJv/4tpk1gLMHIKJXNqVu50A971PM8jHzUAYNzuGCzouAt7y4mEUaw7hSDN4HQozE8mIp4YQVMADDV4eQEgIDIaGBliHvgCANs38DWVat4lnQ9yNpGGmxxg7f1z5AnsO/E616+0g43lJhxy40Ts94/fFY2bGfrVZcRliywa40kcMaQmI9uHDu1/84ue//o+/+lyeEbnWXCUv6RCYU0qXy2utddtgw63Weql121QUDgcBUC/KDYxmhub9qV6vD+u6GoiU+u7p4Zd/+qfv3783ra+vr1999dWyHF5fX2sRb9EXLYn58XhMKXzzzY8PxzXnjVSWZZFLBQD0mTMIAFDFw9NOQug4N9ZAYenp6elwOPi7E6OIgnUnph9z7/ckWK67bN7A/6XUmTO1/8Buj3kjhhwbkQK8zQFiT1sNvrvbu7Fl7cvb3OA47Y5+7mTyXX6SiKrKsBHfvjW+iWjINN553Ho8yLw+7iWOR+rmKVsffzVupLfvAm/aq8aNZs/cJjdyPvyapej82G9X4E5kkS7AXltjAABkzYMAUCEA+v7T59/99g/fffddDElQ1KJqUVGHPKRWGUiqZtpEny+U1zLnfCFiRFRCQgID4wCou1SlaKhFRVEjByBSUBUTNQIJFIiAGTkE9wq6HqGYiDn0MCsiomMKNitIrwpXq6jXXnJUFRAys7gupZR/+N0//ejvfvfhxz/lsHJI6pvlXTXoWAlqBjMQRls3bMhYfdlHYNrNGAaPnvQNarrbl1cNTEd4cSbyTlIw793MDu3kKgSAfYCEB8NxCsrALRfDPTHooIeZ48YDWJe3Qzq7qw1wG48eN7rl0/G99irouzcFIER2Yh8vxcyllNtVvSHg8SXeWmjObvMthtE4YiJ+E+sTjKSVbbZbhzl2Mj7fle1gb9OHFkR03LmrBBGRlJJIQwYDV7ROAVJm1lVVACtFOJgDeTATIscYa1VCRjUGBGruvrYySI0QfIEUBZyI/Jn7o7rLAaZOstd05Rjl5928qrVKrdUAScFBU0INQuL5HmbmntYQAUI2stvV8OkfaIraC20J1UwA2ngrAwFoWcRBZNiBmD1C4BPPVAqSMRugiBQAYiQLAQ9HjVZLLQgFWZCQjYmqCRIpQClCBIGYuNccqyLZGLCuAEwUOBlIm55s4G2HHhYtpexlY47ruh4fHgEg53wMy7IsKpIvGUgZg1WpuaSUjByBqmgrWwIiShy87tQ5qpQCZqeHwwkODiSILWyK7hAShX0XM0uRVTUrMOG6hm07e7drqbn9kPHh6eGlWiklLmmxWmq9bOV4XFNK1SQuSyA2AJ8uiIZIdAis1nItYlLEUUdrRbQ2NBWCD5QBlLicOWOLIl/77NlDz0QMDezYWqacgPx9mtknBh6/8t4FVV9YScm5gwGa4WKt+N45k8G7vf1WZO4QEhFAi5yJNnhJ5hBjNNtmgeVaEc10wvVlJgNGn5jifHwb07rt8/8vHPNv/+sPl4lvj5QSSFUVR25Ub7eoAscTdGN6GGRvn8EmhxDeaA4iUmnf39we0EF350SNh8Aul01MkQmYFElEC2gAft3qXkpWc1AzaoJGQ4hIJFpFJOe8JIYYQgjr2hxMVU1pOR5Ppri9Xn76zY/2veScTaAWjQGXxGZHANr3PYTgdUTv3j0+Hk973lJKNe+XyyWXLYX49PT08HBMKQUyIgpMMXJYWmG7Gazrej5XRASCfd99QXLO3EWQr5bTEBC/XLYQEhAbsiIYUAUsRTjFZVkQeNt2EcHADs9GRNDaOtqh5pUW9+mCt0Ay/hffNwAYcU64qhJqNaJ9xqaYgYgiHr7gcF69Jr+Xw+ciopd/m7p9N9mI1NVNRzM2w2xN5Y9CGPBIXDURRRMEdSHh2FsGBnC1pO02Q9gwz7q7OFR+CG39AWCk6MeO2G3zPBFpnw8Jtw7DAMq+YwRPCOPkt8zWzNvjalrd9BMqd2B07C2XTZuLMlMIUaoej8dvvvnmdDpdLpc1JQBQ1Zh4iYd1TTFyznlZ2MGo91r2fccu6h8eHpaUYvQJqdC8QYDTu8PT04OJvr48v3///i/+mz97PD18/vz5kJb3T+/M7PWzXi4XZq551yrB6Efvnh4ej4fDUqWYVkYKRImDD14Djw4gmVlRYUDpoXxDQCTmQDGsp6O38SMCEFltUQof2OAiyFMHKioivcgcrDehqBqQId8s9VhQUJPZTp12Z+zC4IL5T3cHxzi493pxgJjSiEwNkkBsGTm8bXOdjysXIcIt6JF/HlacTUbwkMNDuYygpE4phfknfjfmqzSwSWGN3sJxEBHoFQ3epjw5Ed05nNzj8qMgYtxXOz7NrG7Gu5cyTHYcL2V2Ba2526+uYAxRDRVdRxOVakQU4uG7P/7hf/lf/r//+W/+FggP6VAcBbpWQmIKiEBUtMfrvTNpRPoIsQGuE5ohiCqgK8QiFQgNsXjaUYUdB0vbDFuH9kAyJgI1YkIxRAhM67LGGM1AVSt2UDUgMDJFx3p0eA5/MPeyGC2b8FprrTnXWuXTp5d/+O3vl9Pp53/yTdleK1aTyoQjUCAKiCMRDVMmeTRCX4t9nOZ8mouYjt+Bp5WwYYroAOwgBg6mAtYm2sNNDmlOq5AjiYKvJ/kwiYYk00IMei3Eg6ky32P3eDUzcJDBHce1395mrRujMVut88lXKtIbBpz5bibjwbD7XiainUCeVAnoLbL3m4e8ufutZPAJE9dBhfNb+Jhx7DDv2KwvCrXmnGspRdWYWmeLF1v6Xb0qspYGklFr4yVXq4NdL5eLWgWA4/HIzO7tmNmyLKCoqjBJDSJG1FprSDHGOII7tdYQ2UO5rnGdWxAtEDsJue9RSvULdf9U1CGQVBVs1nbI7PloQyBmRFyWBcAkF0R258mNMz8RAFo0tFZmFhFsYwAMsZXD5VypT/ycGggNQEULoA4i8FSSu0/u95uq909ai5aBmZQqtARRFSiB07nsSwhn2cK6XPT88XwW1d1qQKxaXQGbWUKqKmSwLEtD2XLLA4mG2a0WQghIqlVEQI2IlmVhZlFAonQ4Lmn1wRXpsGbR8+VFiqIBKBsCiJqAme1qIQQk01IFDNFqraLkQ7pjjIiWcwZCn6zqFtKyJCLKOefzZXt5BSZPJue99qUj1eJorrVWN2JHaHBdjzlnJFjX9fT0eH75nGsxtKenJ7HKRhiRga0TuoLFddmkWkgClqusy/GiWKsWRdPqfikagMphiYcc932Xaq1oKlcAgBCQPe8ILf4EYN4Ih8mIEDASA/loC1OVEJiYyILDka3rUaRs24bWiEQRvMXFFHKuFLzGOvpg2RZCrAKAtdbETBDdbF2WA3OOMV5yy+GEELwzPBDGGGkvrpwFsHR3HY1aBMRxmPr8U6lXxTnLvrmRA74khe8OZzH/fHemmxqO0KhVD4FKKap6XNft5Zk4HA4HPp+ZeYnJJWCtNddaSvV9d2tYVdUEAGK4RtMdZc5r0lzIxBgNqtc2e04I0efi0GFdt7LVNlOI5hDdiNrWqmpVCM2oAp5zfbmUpydYAosaWIvlA5GZOc6sj+exJVXTh4fj6+sroC3HhRFzPiPy8RQvl50ppIgqAKqmq7/a86fzw9rwANISDktc2Y6nQyn78ZA+PCxeDds8MYLD4ajWZvgyYCBSnyMisixLzvt+2b19qtZqBloVxExNfRZkCAIMFNIh1CKlVCE7HY4xrc+XjWOIMW05myExp9T6DZgYAESt1hpTa0V4/nymsNRbAMbRJTu+7LGpJgOZSMwHexozN7DliMxcTZdlOZ+5iiAGg4ZNLdLYnzEYkjlNNlneesaaDqNYa13CkSFYEd8XkVhrVUEx3fe9FB+VxDnLqLd0ZSki234GPUiRkCqZYW93J2x3mVW4+3/Wp5BDqwi5UcOzeTF/r1OAfOY+4pGruaZrmgfeVf/VwJqcxnG1nhm4JnCGwYE9+Gg9aQy9wmPbNr8L3YGOAlwulxBKrRo5fv31j/70T3/28eNHANj288Px+Pr6Kql++PABEV9eXtTscFyJKCwpxk/blkMIgRnMDofl4XDc86a1vHt6dzgs27a9/9Hjhx+9R4PX18cUaL9sjHRICx7t06dPPtPlu++++3S+ENGyxIfjcVkWZsx5Q7Q1JmCUKqDG4CXdeLlcXs6vVSGEsItEIB8Nh4iAkmt9CunDh69CWs0kpai1hhAul9fD8TgG2Wsn7GGQjWWc9DtgCF605j1FLlsYyUNC1n0nRCQDNVO42T7o9tKUCb+GAwDAJkfo5ta3sfyrDWeNIHyXrzH9qVBi/tPdTQcteYxUR0Na57JhqJRSXN56NMcj/nqb9vGoDgDgNeE8MhU2ClXmhRqfZw6aibw34Kmb1J6HGDTv93V+HGJnLNFk6bZ/jp9Ax7kYJpwvLwMyh6pZrCKDqIhU5IXD+vKcP3+6/PWvf/O//m///vxy/vGPf7zlXXvGzO3kreSSxTuxQm/fMvPJEVVUEwcRUTNCNkRQaN1MhIMIDUByyZABAFWD93gDoBmIQktEtzaBGAJTAzvAEM/WnkfN1EGVEQmDWy9lz4iYIsfmkNmeLyEsVeV1q5et/qf//PdPH370/v37SITARfdSyxLdnLPALFbHBiEqYgBE09YUauDWcoecIAIEz07Om4tmQAR2dfVhCMEpqvKWO3phqbTwDWKIySN8w9txmrZOWoPSAhIiusfffNyeK4IO8UJE1gOYM2lRL4QcBDb0gvVgRGth60gls3GFiMuy4NQRM/h6mDp+oxCC2yZdl3jjXnO+XB/N3A1GN/oIEeB6F5xyg4Nr/IlSSoNBHMTen3fuIbyNe0HzKVWrl1lB14XjlcbnGCMzi5IvmTYi7t6nUV9BNNWRRh9s6ddjZjAdECZN0AAScWypg2s1FBGZNVQAX3iXWeMlp/e6kU1+ceaAwYiICXyCtogMJ4oA570ca+ISpBYvTUERsZFJ7imjIevHtjl7dknaYsmeU2UmxzpHBMDax8UFMbVAqlwZtNjLZXvZL2vATQTWOASxAjGYAaqqX10AvaBUAdAajGqtVdAB483AAnEIgQIvtQ/PQXe7gRXboD8zUxQAQPPe42VZai5mJrVetkspPrZwX9LJ2al6PkpRq17yq6PRLsuCaoiYcy6lmFk6JB8bVkqpXViXnVJKwMCJrdq+ba57mHmvlyqSiCnw8XgUKcjctBSCj9gw8HlhYqZiDGZuqjl+rIA5aBcRGZKZSamReE2J0lIMXgAvevEYNqeAiImDqpJ3+0GraXPeQwPz5B2IV5k6rlFVYSNBDb02CpEROUbEquaB5warFZBQQYsoYlUEqVdIiVkO4qimG1zZQ7PMPsAaJdeu6a8lEh1jxXsh/B8eoUfo7cF394IfOH7oT0Mg/DM/bHd0BlDVUTrYzYUhNz1IEULyk5uAI7LuVwx2Jmadqi+0CTwRkVHxbp4HaM+J/X+9lltVdWCRW4tBKilgQMyKL7m+XEogOgRiCIhIYLVWQwCBimAmMdKWAzGEuGBrOPddIFQtVqXuZsgIITJhZCKH5vrq8d3IMEfiGBkRwZSOacD/IKK7/cyIkUtRkaJVJACJAag0hNUWgTatIlKLlixqzBSBrKqqmld9FqlQtZRKaUnrkUI0Zo6BQ/IeXaKAdjXCumq0Irrv+77vJj6kh4rNGa1p1lzfHxfvY9dEC/RKc48h1kpFKNCU60bEBiJ2tWK/SKKzNHam9wHf6sLgikALpRTPA0D3uFwcfV8zhx4oJWNm4siACuVOCcKtDLcJIQkAnDLpljgR0c1WaGe2xYRrjPlmcWDyA0etC15TNDdLBN2PnfluVm0/xIbz6zTBxNc5VDY5nGamWh2HwDru9E9+8pO/+ZvfvD6/MvPHj9+9f/+jw+FwuVzM5PHxJNqy0wIeyjmrKqLVmsu258AEyCHEGA9piT7mSZQAAxIBW5W6Zwxh37aP33/PFJdliRyWmHxG8copJSbEimamPuhDHJRf1bTVCimQSDGE6yAQMDPDwDEuy7I8PjzFuOS8XfYSCCgtRPvlvFFaxt4N7WwdnQ/xCpDYdn94ZSOUhnR3hYYQ1oI40EbCtxn22BD0ZS42m/w0ZgYktyt6qZYZaKkmgiPV1hKXFVsXfTNJia4G4kwbQ9Je7eabPEnL/GOPLAwys/5ewyzGbpoPX26yfa86aHyezmk585neRrHfrObmc+6IGdH7HtEMGhhia3ylHtDwlyB/FZ+LeCNq3tiENwwF3sFFBgRkqKiAoCii333//Jv//Nv/+Nd/8/t/+i4F0laC0NhntjzH2qqqa15qCJnAhOYYdmgAJCgOQ5JCFFPUqooC4v4egC1h8Y6txGFZYyBmxhYI9j7AXmziu8DUHHhrbZDmibTz61Ylm2p07IZAMcbAXAkw4Locvnv+7uV1O737cNnK8/PlR+8PQEpGU8UpDAelOV0Aw49lZvB2S2DAa5PeXMreuAkREG1ynAazAADY1XqHaWvmAM2VNjo+s1uCE1HhTPNmDX5jPElD25sKkh1V2LorNW46/Kjxp7Hd15eajt4acPUJ5wDTOM1uKwWwW8KIaEZmMsCQ/JTBm+O+0FUS9Pijtiwa4RQJSikMmTY92Jdrs83AsVl8tN3oJr924IwNGw89LIax4sO3Rgpu6APAyBHVWhkb2qeHBJxcmWC44KqttEZViUMgNiNllzhEBCEkMzEFNe2uuDuNDRtH+/75uo/Oy/bfW9nXXrDNadGxIoo9qNBopuGJUZ8tYC0jrSEkVR1L30vdZBiatwr79r5t3RTAiCi3YkSfhyqGiCiAWFSqWlBU4Jdt33IJx0fYLwCggArWx0beRqwRFc1Fo7o5YlSKawskQzNwhBUQjrE6JqGqCph5PwaMcnMrVQgoUowcnp//sJ33UkqVXGtNKR0Oh5RSrR5ERJ/xlUJQgJzr0+lJVVXq6+dnd6RjjEtKr68vThu1FETECIYgIufzOaXEMRIRYWAyt3E3OQOCGKJBWpclL4iIgaspdxMSXDC7ieMlQEhmtuV9L+LLVT1PQeSYA0i2xOWwRGSKCGxNpzrKIAIs7J5AFz1dsCCigYBY9aJhxECACCKmWAmNe0+wYl9J91mqFFUGjIGiMWFAT/V4xzVwCAhIIL3jv1HctUwCO9CRiAQaKtZaGToYEZEhYmlytUkr73wjN2loyicMcTYLuDsxNx7g7nj7/Vgo6KKfiMawE38T9Xbz257m5uCRj5LmEQK0EWeeDru1X4eAIiIXI7MyAM/uTrFwVSUVMwuEjPeOsSEU4Ne9fHrdlkQLR2M2AMBRS+TlUphFStVSm7jz5CQaECGqiVRVQTMDQgNkQAuICAiL934iM3MKMTqYWC3U5zS6TAMGQjXV7eJFNUocEA0dsV5BTaw/s1QrRWutRQ2qcoyGZiULGAUW070IEVxKfTw8rMcDIAoYxwCBS9maA6BQa1UR5taoYmBYa9lz2XYzI0S6Ttm+SXN1zee9EFfi8RPIQ8Uq0qiXRaxAcYDjThse0hHgdEeBcCu9ZwpUUyMrWkmU7dqC7xTFzByMmUW1FBUh7Y6Z12sM8U4hVO0BAoPWktgLPW5VaVMr2AdX09SMhF9yKbWHz2ftC8Pr7mDdkzENnWUA3gAs3fHpeLyrBd+fVqdGxPlF8Fq120KcnpSG7rQvS3KSRsTj8fjLX/7y17/+m4/ffTydTp+++35dj2ZWSokxhBBirABqRodlgYEPDGBVLpcLIYQQmCBftt0JWLnuuQKUnKtafr0gYgpRRL779vsQwldffbWmhQDFDA2YANAUvBpfUMwIQTQwGrJbNi3cA2hiITIAiKkqGGEwBEIK8buP3x9Ox2VJBlZKBaYKPlsIcbIyxxJd6W0y3VxvW/dwJt1+JVd+A5M7GwPwXzo8/Tiz1Xiezno3pOVdmjaEKpENoKBOb3ArKudjnFClDqtvUEtXQ9eTZ9mLkxXbv79rfG2BSDPrGUVsPRHXi1yXaLxIp8lrRejdSg63diiyu1cb/3RbEW/9fPgBph4nMDEAi1UjRsZSabvU3//h41//+u/+09/83cdPz19/9QEYoap1A7g54YQcmv8M0iLzzERMiMF7dqD5adRaFa36zAwyA0BDDA5YGIiIEgUz83KiJa4phRSICFOI6EAyzEwRGs0rE6pH9GWkXgk8oAnNq3fZoFIVwQIQ2uF0OP99/vTx+cd/8qefni/f/vH7x1PiBAYtB+XmLnTMlOs33X6uDZyPvaa0cUcHBLmhKKelDtrZToY2RkVu8igA0BoGnEv7KvXQmg7Q9i/kFcmu8ZeBqTAbEnAdD4tABF6/im2VBuHd8e9bO+SWy27eF26l7iDvcf6I3Yyl6BF/bnZjuygCICE60gy8ESbTA1+TgTNT3DGmTtnywWVm0NCIxiWwezihARV1/cEYAok4TsrN22ovMyBuJZ1j+fyFofuvbh37T71cBcjnMZgH7mutp2WJxKpaDVTUVI3AWNy8njeAmVNK+7ZfRYmrcnO8isYhs9XSHqPvnPUws4iEEXhQnEJ+12ZQBAAjdO8Ua8lCREZMVJxQm5yiKwiNWxizDO0rc2UqqSrehgs+/9oCe3eiiioYmtrrVl63YshqFBr1UBs8gGCiFSyE4D4iIXuxIGIA8JmP3sIRDMFqMUVTBDCKIRqImqpWbRUpYUk176+vl/PLa9kKKASkQPH5+ZkxLGt02/f19fV8PtdaecLuCyGk4NFle/n0GhMf0hJjTCGamYrWS8mlglIIoRZ3GMHMztulKbDAy7IsywJI276fz2dYgpe/g1oItK6rmqSUiCCE4DXWZjZachVMwRShmm57ybUAMBjVmjlGJmIkMghEkZGRjusi5YBSc84qoNT9c3ewoaFJDhJiANU+xve2/76H+0270Y9AUjbRG4UHRogkAAwNdIgImTGCo11f2edK2N2G8z3yplUzATXCFilkQyAmMzABR9u7yiZ3Or3g+R597k6y3B0/9Nc54nUjFuU2ctYDQNBlIrzJeAxtrdpygzFGz7TYrY2L2Mpr5/VxoQXIiNqWgUilLx0R+jhtGLIVyLxERBARTMwIyNFqEELain0+bw9rPMToKKNADfPDDAHVEFTAwatoy8wMLtmhh9LM1CoIgFYA5BqISsCAiK9589dfUsLDEmghxKq11DbZFRlUFUy3WkVkE4wxrofFU19qVUV8uia4iBOoVbfsTXGIiiJmYKaoYDlXESlVYiRDCEtK62qEYmpEhipV3AlXmQquDFTVu/JMDdQYkcEb52ho1VmXQEOivqHbKyX084d9MBgKehHarA7NUFUIHTLk6kRBT/4QkgdTxpeKoA1Oxt+iOWz+VzFzt2HSbo2MxQT1aiphH/lmnc6vFD7Ml359us1v2AR/322mK5e5joVeqwbT9QFgJMMnlsSZ+4ZFO3PlWMNRGnqz7ADwpkgVWv3e9QkRkXCIuBHyj1KFMPzkJz/5xS9+8Te//hszY46vr6/ffffd6XQiWva9eJAC0QAUyVKKMQZE2s+XUsrlAilEUC3bvp0vzJwWeFkWAsg5a5WaCwAcl/V0OmmREJeAHEJAsy1nUC0gvnse4ndmR8TAjAqmtbTJSebqo5VgNaAgNgCptu3lP/7q13uuf/nf/TcPDw9lz6LGIaWUpO4zxb4VjHf0rOMEadCH88Kimk6Tcua9+KIgHSfAvUC78YXuxOb8eQhV6OG2YUfeXb8/z32PXyc/VHNWI1/AZkdTBwB743HNz2O3Rur84tZdx/6lt9a3xr9hkv7QygwSnQUOdN0xVqCbAfePGuPN4PthEM6e5PiTmRlIFnAIlGICyApcKv7TP33843evf/zu5ftP56Lmk66KVeztvmpVFIjbKDVmNgPPTyKiz1wlQO2FbEQMAFxa+5VqP5OQKHLHZm9z7BlHOQYRheBjnRWAzEysjgGAY5asP0Y1BVEWFBFCRG46uqjPABKIS80aQhDR856LwOdP5+Px5UfPJ3iIiYcs8oni1Hy3vtUjtFRry4UOGlBVAhC5NQ/65kZHAfU/TfsViPsguamQGABGbOIqxFREuEfNfJP9BLArrikS4fQr6L7o+KdTkZbSKAevGS9mllK/KBakD7oYJNS+rzf5yTvStR6DowkVb6ZDaplebwC5ESDNtLj1J+/A1YkIkeeM8bWI8qaZ/MYxHjciojDYYP5DO3eUQ7YyM0UEphZuAbjJnxLRNbs2xeOxq/NaK7SgxbXzwdMg142fbAKVjtdCwOzGPLXlENBSOcbEwWwr3tJDmIx9CUptjXZiSoZm5oCZ45m7CDDsdclrPARG16rdgOFBdmaGDWydVNUdNlVoYzkQRzJDbgCOBXpRLowEJhNiK4svpQBGhAgm1LFA2VSAA5KFBKCGcc/7x8+v5/fvU7FwGEyhqmpiojUoSRAA9rdVArwCihgiIhFTQAIl8kmpzDERE9Vtz5e873vZtm3bS4z88vLy+dOn8+e9ViCASMAYRGRdlrQ8AUDt7aMisgDv+66qy7Kg4Z4FqeXN60Vf99axZqLe4QNr3DD7CQJ2OqW0rovgy+uriGTZEbeYUgihlLJtO5RtXdcYYyCOMYYHVhNvsGbmztrXxBe4EEQQUR8E5BiSQEHBpBqQEDGDpzs0AC4cakwmKiDAHIiZozdGOvkPxQ9mY5YjexEuQDVl9zuuffM0GMx/yMjAiD7BEUDNci7IgBiIgm+liNQqV7Ng8PzEaI0vnXtUVQUZfRoStvt6JUYT1z2CTf2Sze77osD6P3WMn4yfYxfJsyi3/hY4Omcmi3ZeJZ1MXiKqt7X41zgOzHJtiKyGUTlLvSYfDXufG6IaiFowJCAzRmvgiqBmPjRJlOJe99c9v2zltGb2mREeZwIwUzTNBkTlvKOqIoQYIwGKQxtZIQMVIYOqKqWCQkVJHDAgETGhp+L3/QKgoBW73eCqCImIQcw8FawciVMIRAxVsuTWvovUmsVFZC+Sc/Ew84FTrSKgSGSie8lVBBGLqCEuh3U9HoTZCMRUq3q63leYrt1NJqVyiIxEIcQYHcrGqvgg71nDtd/StdduEoCm5n1ek15EgB65Z2aOIYRAEEzRmz9bNZqiopo1wnaDqWUg2K8WUI2YCQISe1GW9AqR7eUCIKJ7PCzgIGfdW/Ag3VBSiIpq3suNBKaOU2IDkG4m7+tbQKtqmV8WAAan3rFGJ+xrKn4sXVOaHQ4Xe0Zlzhn2iMk1u3LHszYZVYPBYcpGjj9pHwX8lp2xF9e4lhYpYng8Pvz5n//5f/jf/8Pf/e0/EKKIfPvtt+73llKqCqARA6I39Sn7iyxLydlD0SaiVUyUmZ8/bT63ts3vEWVmiBgo/uj9V4fjwkRSq4mZiNbKK4mJNZpHCi5mUR2bA1TE539WChw4EaHP1wWASJE5ikHO9T/89a//7re/22v5y7/8y8fHEzJqqbkY472QGSs8H2MBW0mb3Uo5M1UducGZHua9mI9BBndb2cBsOvLgENEzwcwPOVAfzcybMvz8a+pjuojv7LjU/OGqd7qtMtPP7JUhtgr8ceXpCtdB3rc0aQ4UN94aEVXHPOd7IrxblvkJx4vcqa35SYbZiYgOOYG9mXaYf2/v4vrCJ5OSmUItphRiFXw959/+7tuPH8+v57LtUsWqapFcLQcTM2nNxJMOgjad70o/DggGoMzYW3yAuCJDnyzVIFmoAaH58yshRg4YWL3qXikAiyn40JSOmgaEplhkBHxJzLSF++vu5dvAwRscCZkxhLBLuWzVW50QWQX2XEqRT88vSzikUwMvNQUVM1VOIwQ/lo4GSFXbFESrioC3QyhvgwVzpYOLWz+YSNtc8jonu1Shy8DBU6p6reG6FcXXwNlNZgzAdUjvyB0G5EwS/U83ebbx82GKzPQ2FmRkk+4IdTQr3v28l0Bfn2Fi3isZj6WS20C8dTt/kDcRjyC7v+AXj1HKO0SK03+YF9Ex9HwvBn3Ph4g6qIPd2qnNjJimtI/fxhQ9LGzdxmXmEBgbJpCX7oAaMGtKyeWc/y8QM5LngnzaJvT+w1rNtdcsvp0Y5ye3yRdulAQ8kiTzQndt7Xs5BHobl9eodlpB74cxwlbCFwIxEqPUq9RWVehgHo2YTGZGUFWOpxAWog2ASA20mgbwQp4lFBOgSKjbLrniMR6K7LGRoBSqoKYmRFBGyp7A50nwNbbCjv1FyF5wAOAYpKwGZnspsm3b88v5fD7Xctn3su9KAA/HdDocUlwCRZdoh8OBiE6leHtJCCFkPp/PIuJds9vlUkrxcrht256fn19eXyS32UoigvVwuVysK7DleDqdTnvJSGRmW8nbtqmhz8JGxD1/zjmfTqfjegghxJgcVTWlICK1FvFW5hbgB0RUEUUpope8lyqY0ExjjGLgoCPEZkzQ685TDDsTGmgVbknBq2NmJtajqq4MwYAARyTYl5umzLCqOrE3SS0dZBlIwESsFCEzQewtGjxqS3HSkLP67+Lmarehgompgud21EhBR78cdC+oX6nPFJ3k46Sw7w3E/8rj7a8Q0VP0N6NZGjRUYGYj1Em8es9qbzm+SupxwaEDhsyBDoFjvS0NScE6dGRP5/vJItXMdBJlqAYjGG5KoOKIMaAAUEXNaM/y/PJ6CMYPK1MgAqsOro1qQKh7MYBNRNbl0bvrAi8mVcXMFBRjXNCqFS0iqLKhMigRHdfWzF2qqKrWTESOcqFeY4SAaNjFnedeDKRWFRMA4EghRAXLRWutW6k555yrGjIzBm6dK4CqULJUlZCi1rocTumwhhSRAwTaS1a1xAkRfWAfd/1XawEAFAGgyCG5kaqtYGjWkX1fmoy9CdU7KFivg721Ie4TL9aa62Ci864y30SXzQxd7YE50CT05/G1BQBg8nmsg5YEPDqL13OmQ1UBFQFU0Y1+BVWtPv9gfubO415/0n47XkTkrn3/WuxtHftjtmAmT+Pq6flhdmN2DOPj7cPcfZiXa6zzvHrOcXOYCbpB4C4rEZtZKyRh+tnPfv4v/+W//O0//KMT6na+fPfdd8fjcV1X0eLvGyKpElRwfywtCcwYKcZoUgkwxhiQMPDxeAwhoIFXu0QOp/WwLMuyRkTM+5ZzLioi1UuUW1UoQKT7l1LVDkWmBBEA/F8iAkwJWjOLAvzx2+//P//wv35+fv70+eX/8m/+9Y9//GNju+R8ika3Zp+vw+gFnTcFAALxEPLj+3lTxlLPGzFL2kEV8CX52c68ZZb54uNeNLUL0tSl4jbYbHfOV/advXsqMyP+wuSYmTYGd/vTzUAP05s63d5rllmqD5IjCiM08/Z2gzVm0Aqbaj7HOmgH5LhjAf9JKfnuFahnde6I3x+bgEUKECAxiRlSqfbx8+W7j+fzVrcs2169nKKoCAhPYD/QtLgQkUgbs25marVWJALiwDF4u4eP6hGEQMhmMXb7wefddCsgpkiMkUOM7CPXxLSa5zkdvwusjRwDM5NmwdIUZa6gFa+HtXkXAF7ButdcJXiQXAwQWQ1fXy7laYE++N0EvTwzzMFfQ1UQKTrATgyBEWBuVb1mccd+dYoEaBK/fzDzyLCZtDYmb6XqigE8FoTATIikRrgrIQH6Xe5jBzbpBd/fGCMQdcOrTekYTiY5oo/I8AljiNDTejN3NNz+N1XNKteXnRlkoBbPFp2Z1XqTrB6/FRFEHmTQ1YeqVZz1bEfHGHKAqOWo/fsQaFg+05pcQYPvuNi1wrVYRRuazcjUXbW+ZzAChqHStOPzNBkqVxAe7ZFaYbBrQOta/Ibo4Dzknl4u1W6RcLA7yj5cwRyNFxSotds2SRG4NfDMZcHTijTEPBGVVpQFb+TseP0uy2zskLtSZoaAbhwQBYDsn8daDyE4CawbtMy2WR0UyBcthECwEAYENAVVZc/uKRYpIUZRDRwpQMlWsobjSWyT9myoqtgz5l4SYNawnsxMEd0njDF6j5+qLrRADycEdgTzNjU+pVprLfl8OCzvH5fj4eHhdDqtpxQXRkc9DqfTycv5HM9dRB7g2AK0HrDMUktR1cfHx0gtxerl7LXWnHOJ6bvvvhORqvL9d5+yVCISpb1kpshEPuwvhCWEZIjH9dhX1Wqth2VdD8u+7yGw02qpFcmoUy8zC1jFWmt7ZQATMSODNmOw18SrmVlAisRLiLYuKbA7V6UURhTo3I4KRm6j0xeKy61FnbDFElygMI85CioiVQQ4mHm5aQtANHulUz6R+eh4s2uCAno6hXqZwVXajmyM+u/8u4Y+N4Uw3BtEA+Ef8OLeytDxJ/jScWdqDO6j/k3Tke0dGrfiVAWKfbnaG3XeH47x2xuNe81/tVuf4f9H2Z9/SZIk54GgHKpq7hGRVx1dfbIb6AUGAAfY5bzZnbd//bzH2TePnCWXIAGCOPvuqu6qrMrMyAh3M1U59gdRVTePrObs2kMXIj0szM3URETl+OSTvS4DTD97xxPYp6pqjybACcwg8lxqjoikZo/r9v4BDokKOTojejRHIATQQQnIzLZNzAAPh6Ukp9zaBu5ElJAggbK5YFN11ebq7rWtiBhMS5JEJDMjujcVAAi2GEcLs4aJmTyGxQAFEzJiYkqsrYlI3aTWGq14iEyUVLpx2LZ2lqqqCma13j579vzli1JKaw0ZkRM0j46vQDfN1euOEZGqIgAmRERQw4GHnG9kWjbfFUwuvmmXYJozBOnSbh1dYXOIvCpihOqHtDB7SolZgSJ5QLPyMCz0dErc3UUF1IB0tFO5uqWUAEF8dt66SHj5eS/2RIQxTftazLBr0sUJRsSBSHqqGvsfrhLf1/WKJ9Lb/zthBYOxPX6lHfjrfu25zo4MuHbTn6jJvP5ksbu+26c9w/sTAIiZWxVETCm7+vPnz//iz//HX/3yNz//l38JO/Xu3Tvro6WjCtGp9nJmxIt3xcQJCVJJzEtK7r6Uw7NnzzInFwUABCAHM5DWVES1qWrPjJgy82M9uTs6ZE7GOdFYQL+YDiIK90xEVGvsjA5epTlj0SMiI6cvv3z9X//+HzbRh/PpL/7iLz755CNmbtD2XERztS8G5Dr2nr+dzk8/xzwINqY3tj95vqB5zX0Asz/m+53h+t61oMEqMe/hib80RUVHL+KT689AcX/sP5n2E7t/fFmHeaZZDwht0HiO27gqgF8L6tNJD99q4eda7f3m+SvfuftTI/Zv6sMLznvbhSgXgafrAkqXLPGAVG0mJn7/cPr9l6+3qqLQBKoYYNTT3L2TC5pH1R3CBw4QPmOnmlW1FoUJB8gYCDQHUwRHR6aE5L0yb+gOYBgmjtwpAzgyIVNQl6nquoV9i4l7vU1ORcWNPRJYUR7s1JRgUEohhAmfNDfRis7CBgBRv43cCqeiqusqrTXDgwEEuwgRE7JZm81VU2bMDDsq6hI1xfLqzH89sZYRDV7/Kp5jCn9cxMbE5alfABDjwck9sHDjz4Ggl1n3oc5eHwEgMEocNcAYMbebWQJwGX9l0bDwgToDAI0KG14XyU11/13zq6fwxzF9uRkTIaIPQh24GGSbd9LVc4dAgevNaEZPe4uxl+2dglyM535LMrPEzCnF7SrsMCpP9AR2ycX5yiMzN2akRMWm6wOMsFg7e9vlmGvn3bREoCwiAo6Z2DVGJOjIGiCRmneyjZSIMvei3c4W9PsBdw+H78JK5NccWe7Tt7jcj5nRNLK7ScoDy4sENquF7l5KocQ+OKxmBE+U929r2lBEJEZycrXhAmHOWStpZH6g+02MhEhNFIOzDlzd3z+u9w+Pr5YlLR0COpeRKKY4tstrGqqKgEw5lro2YRFmJqPatsQZEzJxzvkIx6UcDzfH29vbkvFwODy/fX57vMs5MzA4gvWk6ePjIzMDYbxxRLRqzIzehwG8fL7EmeRBGJ1MOm9BQgKAr2tNqZS8HA6H+4f327Y54eNpPZ1OQBiDQ6K7o6qs6wrL23Vdc87B9MjMx+MREYk6jBsHxGJqgpopkiqOzDGYWZNKvKTECZyJEMxcQbmKutqSS3Q/mvm6rqfzubVG1oHUASiEgXt8InhEBIGatuTopp1clzkzcyJWvOhnKEtmQLvMg5pattP0fXoC5qY7NUWEGAzM3PqEFRhLwZxjK5jlwSEV4N6TBE+0+8knH2rr/+lvr2yudxmMX8QJEzLx5JFjAoqPvs1JBwV/IHBVFdqlw6H7SZ44A6iZRYVw5kcRyZFHvE3Dbg5OFjR0QEJAi/m6TpwQTaVKe1zt+Za3hGAaWAUicFAwNVAwIIL3949lSYgUXf7uaOLIZA5uSJiYHTI59fLU1s5ERIAMiN3cc5DvUWQQGCI1qODsXkqJV58SM6Nh711099ba2qq06JrjlErOuZ4qJXbDbds2qYZgYCLyvR/84MWLF8T8sK4JPaeEiKmkVlciYk7oHehBDtG4X01jriZ0uinIxI/a9q9mvvQh84MUGxjRACBx8ug8ufTREXwbFz+MLSNZZEDARoIDdzuouT15+6pgZo595GC8ZZFGTvv7ZOagz7mK8SJjZZZS7LRBfkaIwfR4OXkKc6gbcVSir2IARAzg7jDFl8xIzNqBGYUO30K0jotfiidT9eefwK4YtX/8ucc98XdxOC5z/pjv0s8AHUpHo8oEOH0RjJR8rTVRLqVsp42Zv//97//5n//577/44vS4EqVaVzNb1/Xm7qCq2xZTmnJOC1ECkEgC9scHcCIzV5W21cxFs2ltvT/KBB0Oh0OTTVWXJR+Pi4+0acAg55vtmTFM2gQGN3bOi5saoJi5SNRezKHWauB5aQYuIo9n/erLr9++ffv5559/8cUXf/VXf/Xpp59+/0UupdACyL204QCGFtnMK+vn3Y7xGOg839c+I/zEHu7fxf6EJwH5hyfD2FlgeBf7nMjMSwZia94M7ID3vgtE58VFLoHi1X/HoK8n8jyTINPDcfcYDDMrY9MZcP8Woz2fZd7eHgPCnD5cnLAn+8eZP8+1nUoxnb258vO0EQ1GCrjjw+cVhiG6egtNpbWGzEgsIluT+/uHL7/8SiSJQKvRxEQpJQNvrRUoZmbeU/wzXQsASI5IUcfryS8k9MgghDUbbdtMbhaJRjcDMEYiBiKWaLcxS04AaOCq4mIFHNGZmRIDoYEHhzkOkirZFSpwiFwYKB9jtMxMXBFTFQnWeGlGRK1p45HXJnd3ClJzKlVXuIYsubvbJQ84LDwgETATRtzFMIQ5FKbng8b0iKk2gBBn4nBfeABAYLxxmzJklp1C7GC4vQgIdIUKvthwRBXh0Zfer4kXG+7uxEjMDKABOLL+bXAdX13Kv6OnbCzI6F3a6fKUZx/FxrFBeCBKdrb3SdRwybruhAr3lxqv9eJczTQZEcUyfeBk0rdGg2aWVMg0mAkaEXt2zy7ozulw+9KkrtXLgQkdwUsmYg767TAEHYFmpr0a08u9huCEo5tLmYkTu4O0pgoAadu28ONVNTrCAaC1xkRAiISckmhDRE4IhNospSRawVirGMLxeHQAdGtyBjR0ZubE+XxaMfQJAMybtdq2IABsrVVdI2SgjIlAm6BjggJW1SGhU0KRii4Erm0r5QAxnAqZE6VU1EXtjGkxY0RcSnEzVUfx47JUU3Rzl/CljMEYnBCcpOpWGyOllMHZFN3okc4NmyOgF3drSE4sYLlwW9/dmrH7oWQF+sV9e/GTzw4Pb5I5EZoLVmcEYlilHpcFrJl6ZiYgc1PVzWwh3+oZEblkSumsmonyzZ2ZVTRGOtweSkvbtmVMN+nm5eH7se8mZuSpsPTw+AhA4pogAdLWJHlLKUEOLi0BAMjCWV2lmmTipkjO4G4GpJRzLqW8OMCzu2eP6/l8fr0c9HBLm1Tg9fYFOYI6iZjBBnA+ANw9M0uLSFrPVUQPx9u8lCaOqbhTU9lqMqSFF0pBw6MGLg55uTm9fecCN+Xm/cN6SEd2VweIhiowYW5mro1S5uOSRqhgBr4d7LDAup7PZ9l01I4MzRmBibYmAO7sZpAJU0qmAgC1rbfHAyKoqqE1awZuibZViBmc2JmUt1PLNwsSQ0rGiXNGAz29t+o3+eYk9zGRgoAxL8LLBpQTmz9IPaO2Qlm1VaWb5cB8pMeaMDG1YgCO7O5Wa9vU0feMau44+lnn7jst5vQm52/3Ngu+7fhDG78AAgJRil6sBJAcg2VeTJ1ARMyRgVE8pcyCumleCjoZQs55rRvn1NpmaI5ADgnTQofihRoVQmnKzFTyw8P5rCrMZIoOXh3HGOKFmFNxEwZkaDlhPTURSSmBE2JiopIpp9VsbbZBysQJyZbKYlaBGh+r06nSRwf+6HAD9fzyeMD1fCyZIVmr6WbZtq3CBuY1bzU6X1OKiMUQlECTK7AnNHCwDO4vVRBRm7n7khIzEnhZDplRtSkoM3BQuJASyeFwQHQmJCSKCrNjEwcnMVajaiBGjg6JMSe9ISQ8vbt/eLh3UCNOy+HFZ9/5+Ps/flR9bwDHGzoWIUsZ2vZeHlo+3vKSHFHURVcEcLfbw9FUAWFbz4+P79+d3i63L0+tcR8Nf4lbxM39qiFKzWrbevkiCK0AEiCaY9ugGubSKigslu6qrOdKzEhAqCwiAWfYzm05PE9EGGQ8qwaaCIPfAN29qTuJoGcgQlMAuH88ffzpTTM0pCaORKZwKIfzuRUg31rOz1TPtYo7MmVmBmR3iLqWd8ZTBSCHnpKDkaqIET7dHVegcR+jmtEbieeOjsizr3gkKEnVwinvBZ/dPG3oPuKFdeDDIx9SuCAEgDA9iVDeHZ/ZKOzo+gAAAfRyYDQDICRk5JQzAK0ds11SSoawwFna1trpdkmAtrYHLEtbypvT+qN/8z/95P79/+vf/m+Otrw43m/vLcn2+CzlBfnGvCZOqWST5lpVKpm5inJeUia1JpuZfVIKPjw0YnPfWl1bdaZSyiNUdzeveIZUkQjQ1c9e8iGnlCknSIkImYGouXHJGzStqOgVTADFXNQzYZOKiEtKzJAQs7uulTA/e/ayQno4wze/3H5fP/93/1i//6Mf/V9/9PKTTz757LNPXzwrKWNJvBQqid1bTsTkEXlGAkVEDuXRO76GAROQgzsRO6OZ6shCA0DArJDO0FmgGTGBEwzCZwAAFHc1VAcFUABY8jMYLl3AVrv/Z6bRVm3m7hIe6lXh6zJ3gQiZOVgY4OKqIgAQyYdCBQBqRtfQNY/J9cliXo6PgmSkEQCAKPtujl8IedPGzDFdU1XVNNQiwPRd7BMz9ukITDwvDrvUpw0wcKjerDHEr2a6P24+MlnDxzUcLBVEWEFTPBSqgbrYiBIhmn6wR/4IhIlLltOzJW9iZpzp09dv337x+Qr40bmdHrezoDsGLIug8sJ3dn5k5oWyutdNwZg4S0OipYmiaUwIBHNt0hzqQzAgJAAghJxIVbf1MefsYEgQgK0IIZqayhavw8zLoTAXBVZvp7MgImUozoggomaKiIXDIqm5mFvUzwgxJUZgBEIgBzZgE9vMiEmqvn/zsCy3y+3zdHPr+RaXGyH74kv7zicflWSnx69uD5awAoAaqlliLKUgYMDBAAGJzUAimE2ZOAOQmCV47AawtdYamuecuZQIF8MygjsMji/woMk1ihOCjDTOCWo6QHZy9wQEQA0bU5zVC1TkgBp1IAj5REe3Ht8CgEmb1tUMiRmJZwIPzKOBAY3Io/GBODhazCdBiFkLmZw7AgCYRRHrQiINs5A2cqCOFMxTRERMh9ITOmYW3OMism1bikGgegHcRtyo1kSlrw8wdDwZECWiUeJ2B4iyE6RMxBgoowjsAcBdmBmpd9zAjJnRLj2EcJ0Hmrv7/nhiRGyQyAPAsiyoHfEPuzB30gkQkdOuGbTzAj09IpgdZme3WNcQC5pVsMv9x17eXxdfD852u4TIM3EL5ipCQCKSmFPiuZ3H34ahwZ4CvITR/REwMH8QFQfoEb+aX6UTwq7VWhGxlBKj4eP2TqcTLMfpeV+s9qB3x8RgoqqbW2uttnbsyGBGY8WGbqqiQehEqKoeOfLLSpg7OA8anJGrSKkj+MMEl1JSSmBOFyRVsAPEvWHOOVq8RMQUVHrW5Flecs5E6Grbtq2nEzOjw2pWOCfiaGRys5yWUpomJerUEepGblHVcfcxqAkJnbkXNumwtCbM3KqmVMqSmVgdTqeTmaVMPkZj910BrGlTcXHToYfmjolDnAAuvd5Tupg5pWIG8aZUddu21gJJcRlUamZRoiOixAQAKXHOGRKbmWsvcIVm7gE81nN/kT4Mf87ElCTkhGaWaOR9OVEa4no1Vwp3ILGJw4QZ9V109QqWsz/8OsbD68Bv/yff+uf//WN/8ZAeDCPOzMzhWoTcqapabcQ9gxVmDPzp8wK6x9s0NkNGTt1FuMqfdfwnuLujgoO6kVlMnpzPEqvHiE0mggDRe69eUDHlnNHR0Qcc3d/ev7tL1FpLZqaDv8cQgdfa1C2dUs6ZDofgAYcd1iJmfDIwJkSkUi2l5IuDO1MigIDGpSWRUXIFCqJiwXhEB6KUUsqlEGcwrU3NzHs81tfZDVW1tdYaidu2baoaN3M8Hu/u7k6nk+XFwY63d1s9HbCY6+3t7buH+55vJXCg8KXcJ6QqI+WcMw0Om9nR8eRF+87s738VuA9CBOgkZbHsKaVaa6018kStibvf3tw02XTU2EVEnQI4ERbeRrq9Jx8Ngmc4JnUwR3kwXEzIOadUmFut9Xxe0fuXptwrIbEdouvlxggRGJzdO7Z5j5qZqkc7BAruKn6h9bMAsi8afLg+06o8UZzpRjzRwdgRAgaP7gaMGFw+/TrjTOv/i7ZM6ORtRKMsAO5qDu5qyFASmZG7Sg28DwAkJARkdVSFZiLbw+F495svfvX5716f13rICck3Ob1/OB2O6wI5xzg0TrECh8PhfIpEgam2BgZUYn0e13P4Qo7Q1GPOtrqd3p+JIIYCVVTGsOcXTygasZg5tKHpft5jh62amZhMb4GIArcf76vWunlTyFs9vX79+v3D+v50evOz+vz581cfvbi5ORwP5fnd8e7u5uaYXzx79uz2eHd3czwcClMTR0RwPz0+3NzcHI43vXAnYOaq21KOgEjgTAlSgoFu0rbBRZYUHN1lKJFBn2QLEDQ54Day/nsBQET/A8ShOw5Ff/KraH/wa64E0fYkhOuFvj/E9mkXj2h/RIp8iveU//1tTH9mfvLhdZ6Qh+HoHozxevPK80vnf6caxpV3K4bd4xsnK4d2cEqMbCmlzLyeK4Azc0oZkAO+AQBMqYk4MDg9Pj5+/ebN/cPpfG4BjIqQ42ZZYq53k3pH7BBDgMIUuI3sT9wEuCsAulFQ+8KoyV9DECOumOq/f2VEBITqtq7r5UMGdU9KUUIPbiEiGuw2bi5BgEeEHj1pABfau1nslbZVbbOcYxYuEBdszc/n84ELUnJwNSWwUo4horXWmKKBo4fNw2giszmSGSg7psHyioOwSsGt1UzceWVsUBuMfd9mYbOXo9XGXL4PnZNp93wyskSibRwiMtiJwyBceoC7zEjvyvMdgmN+xV7pfOdIzJ95sOmMl3Uhg70SXZWpDvtXbLtKo+/00boBeUIGhvMi8/b84vVd4QviV7PYNjUljhCYqUejO4/2neWXkRQTGf9EewFmzD0GX1ySRkROT+54XgGuDzMLPvdQGLPL65xPSESgcT/q7sfjMp92/54+/Mb9Ck4TAzsjxcyB25xvBbsjTohX4hXP2Dekjo/fXXOAm5EIZyCN/mT1fXgwhTMzBNF2yERrDcuBENHBTRATw7wfJUQiBINm6qZr3U6n00clExFE38tM5oVB4QgmIXbS+BYVNQQycuszUKd8EJGyDpwqRtZq4UGi4KST1wGjT11DjAxBpavTw9YCzGOmJgPDCaBiSy6lHNBiWDOX7OLeaosvampVI+dkahLuuA1dmvkV1cpIpZTEQJSYWUWbWkSDxIsOaoH+hyU56LlurfZADi6zVsQ9ap4ZEdXd3bbHGpIQcLLWWn9Gg+ilQZyudgOAQz4AYGZOOSNYKeVYMiGs6yq11+4HcF/54izCBbrZYbE9ohMRohSoDvsD3FZzQaa0j8XZk990tuVhoK9x8yH8fyAaxA8Gp87PP1Te/9PD3XswOMIVcNi2bYlMqTtiB4DFyJPLUNpxKPjsdVS/dJoJEYMxckrEI+9lZsidCKQ7QsGY5GrGsvvzi7W9dlkIaSKHmlZyQsZU6Obm5ng8gLXzeWOmA6CjF3VHQ3BxcEIzqmrnrfJpRcSSEjEw+OFwcFfQJb7aByDzYCXnbOYx6t3V0D3nXEpGdGIgAvGm1hCREpZ8yDHBsxwM4VTbVtVNmulWZW7kjh2UK41F6rZt4ArAQQR1d/s8SOXW7czM9VGWkrdty5lnv0REPkPvAqzed/FlWXLOsUvPNN7eAu9f/T7+6ad5Z3EFAPeLyzhNNHUajCtsZLfqfsGMzegXLi4XuCN2aWPEzrnuI3aqVQ4HA4Ccs2u2aA9OV6gbRkckVUXXaOAGN3dUVzNJqcTd2mjYhp1P4NcOhA9bNDMaNDCZzN+iXFeXuq7kPPnvXGQMopQLvvTixwfw+IO3Q7HlRYjSIyeP8YwSBtfdwZyJlpK2qkEqW9WkQXNvjmL41//hP/6Xv/m7//Af/sv9u7efvnqRjoXTjYNt24YAdMBl6QOHEKCUpFIUtmoiIiYsLDH205qFU+hA0SAAiLSxaM2ZO52tKic8EqW0EHpOVGL4GgdklDkldQtqx7FK/d0EO3Qu7MCImYiQHABSSqJta06ZRfzx8fFU9VQr3vG7x/PnX34JbmZSEi0Hzsw3t4fbw/Lq5fOPPvro1YuXr169+vjjj54/f17KIfGNWZLazPCQS87kpGJA0Ju6sTY3E22qmg/sPsI5R3cLU9+dQvCJWiSHkMO95Z/CjDuncDrBM/CYcjV/VlW3i1ztRcJ3Aef0m+mDLM88Ye8Bzm8cjveFPX/vv+1vaX812LlGcRrjnMd99Sezl9JH+PTkWfaqsf98PObYzhwmN3+02qtpVCYSMAKCE7gHqysCY2E9bcBsSPcP796+fXc+n1tztWDK6I3QrrZJBVci1LBRSAgOHZ7aPQF0N0CEIH5BdVAxcxS9OOKIBIixLQEADmbIfh1IgZmzMd2NmTBxxj4SScXVmmonM6m19kVACy8dsROl8OgIFZGI8wHACJqKmaWcD4eSc+6z5h2ryP3D+VgIKTU3MCMnzugxltcc+kRfQnc3hT6KVgmVAQzQ8SJpgdiHqHWrBgkpRAoY+qXMgWP8xnDDEYGRaExmn9K114IpmTaBTkQw/PYwbvP8vRTt5W3e50z2xZkzUJyRG+4C0XnN+eHA516UaK87+51iZ6Knr2VuvbbRbwy9Tz7bbQFPrj9/MLuCqY87vwpoZ9g/rzkXNryjhKMcyde0UdNRiGkBLh08HSdc3NMx1qm1JibTQO8Slj3qVVXc/ZYZo2yHiESYEkaD5Vz3/fHk4eNJGBABFWfAzT02wwsoeS733t+dizslYP5zL1XxV61tXGJwMxJRVJw4J3d0QsLojulobHGNdznTZuSJiFLKAEjDf43fI2IpRcwSUgz+wjwqFYgWfcoAPh5wa3L/8B4/Rjd0U5FmLomQmYhYWzVzcQGAHBXlQeQTPXDkBsYxuol9FLXDC0TM1DOptfaeFr2S7MsSdb9z6Ji5n9c1oCkzyo2gt5RD4pWc3CGlVLLlJs1rDwa0NZWYMSWmRIApaA4vhAHufm7nINLNKSOlGCZRq8xhO73zs2dVMOci5qd1q9IM0IEoJ1XUphLtMbGNUURu/nB6PJ/PqhpFJ1Ng5mVZbu/uhhawqq7ruq5ra207r4zIuXBmdMg5LzfHzKSqJtXdHYLJUCGGB0Xv3+htw14zi2Z3iB7xSXmiqjEuxd1Fxd1L6eRM+5zuNIh+vcfPmtHeQ7pSog/23f0P33r8od8+sarf+vlUM5wkyBhGdhQMwdX7rAUnhJhiTBgWPKCBhEE5D7NeDRfSOWhtE6mFytyt+/rEeroY8FilHQXU8J7nK3GA6DVlBHFLlG7ubp89u2Py7fFUtw3QC0I6LM2U3YhQmiGhI5np2iptKWVChEJMTDln5pKJPbIVAxdwTKUXkUSiaMgIh8PBwXLOpSRmbFq3upoJIt4eb8IPRk5VhFwiptq2rda6tabqjjPZb9IkyoPMTMzRs8rMx9vbSuksNTK7RITAp9N6XA5Eoy9hUGAjYhPJo28+xM/hsv57s7yXkCcWG3rxFhHgwibk7u6tSVz21JqI5FxUfdu2SInOXSkmVgAEM5xHxnRmRkZ5JMBENp3XIMcLVylnYs6JWZ32MjsdMgJCwDmx0LwHhPsBJxBsh2Oc5n5nmnvltPkzJN77H0/0aNw/zf1uv4Z7J2Ouavyz9J6ry9WmVH+7x9AZyxAxGmXBRwnF1QAsEdnotk1EdnjRWju1RpRKPtTz+i8/+/U//fyX/+u//Xe/+s1vv3799pAowdkVXhyXkrhVr7USQc7sBKrGiIl5WXLFaORtKk21RTidnM1AIpsJ0Dt+xQB6PhPROKWc83I8HJZDBllyujkeUikRExqMAnKzFkgT1YhzAUDcEnZ/hlO3nEh+PB6ZuZ3WzN1SEqKqnis1j7ZABdNzFTypu6qImRDB7fF4d3f3/PnzTz/+5OOPP37+6vDy5ctPPvr45asXr54/o+WoYtIsM2ZK5KYmoIIAiVKmZN26OIA79CFuAAB9bAkCRnoqfvMt9nnqkY+ga58xnAHkpHO8+DZji5yfuHv0su5TNrG/2LUEXgTyA5mK3wYXIF5XPPaR6pRq2nG6TIndXedqXM188ForjcLF/lb358ztD3YO3vhV/J8jMDq5AqG7ubpH7nApB4lJz1o7oRlnJKziAgTIp62+eXv/eFod0MDO5zMABJBKpJ1Op8x2PCQ3paD+AFcH66ww/XkNA9VHSAiIBoHt7PV8ZghMwnDTL7hZGG5zYmdjZoQgQcYxAs6MqANxVc0dUgpfZeuvD41HtiiWz8cCmtkldxRgSPJDPtzc3CxLTilxTsxo1h4ez89uD4eSmikBKIC1+EsmdGQC6Cw4jAQIwE4ISAoBZ3TbLnIUQ54gYCiACMjuDoTee83M0SNxdiX8mIAIdkmB/ggGRIQj0Ya7zr3QiwhMIC59zV8w5XaKFl1XyKeORJl6ytg8Z1Ya5/lxA63pXlznFqA72sv9g8DgPeo3M/Yc29WQn9QJn+ywcajqEA2cSwEA0fjgu1DQR83vWl9GQLhfo7HYCE6lpNZaM+wwU70sirtP7qm5ZYqIem9+mF/mHv2rXbgD3dN5NQqaqUGkzdABmF3lqlNzn3nah9f7Vzhxj/tjrtZ+R4eLVQXo+6iAuRKISMnZ3WWMh6axTYoDdluDHX/DREoKiNipfs1FwaPVbF+KBIDwOsLQy1Zba4Foaq3lnA+HgxlkTowoZglQR5kofNqI2zgzgYvp+9OjvszkEJ20bmgAIG6gasbuTu7NVTWN7F0MEAs4WDjfjshsS2ZEjLn2AU4FQDCYGaaZ6kDscRQOQFRMCAxrfuDco/3xmoL4m5m1NtemYkQp54W5uTtiuKqTYCoEVJw855xzYubJ7DorYHGEy6uq7io6rGeEFl4U1N1X1XOTx/O2qamjgjOzOnGmeHspJUrJwEyDVVWjUzz4knGURB4fz8OZnj1CsWtkAgQiRDYXRyBKOaWcs1Qm7Li7yOpcAEt42ct9oE/JO2FAOOSAuB8pOxVhii4NUhndtzi3XfZ0Zy+m1Xhitv4Ab9TTwz8gJPjwhP/+FRAxmHjin/mwLMviJpdhWRGWRXgWkSJEggwAYigKG3OQwiAz58Q5OygRZCBO3e6bGcZobCZkwg7eiSkRwMFZmVJY3f4W/kCyzQGWZVlbLUt68eJFOab7t2/u799hrYawEj1blnAAAHxrLSUWEzMVS5xkrVJKIc75UBCxcDoshQBdNRxiREw5p5SyuZktaSFEIliWBcxT5rwkIkiNMWHXr1QAwMCbSK1trSLN3NktQAkYfkdIs5m36q01jCgup0NJzKxNELmU8oJfPdZTKQUAD4dD285lKWbRZd/JmdUUB/Yh9t6+xcq38AfuXzd8W6DIzG4APucZX9W7YJeGM3PT+ASv06xgGPTnQ66uU3uxG1k3WUSYUkrbo6SUCt/krCKyrjWazGlHtx3PGB0mKScHdjAYjKmJUweCzLjug8ZaH045jjhwcp/MR8PORHW1MtNX0CuCu8uCzNvbf93+2/cb+T7ju9vmHQC0V6EcQKPHhBEIU20bAeacSkqq6irapAGegHK+ywzv37//p5//4h//6Wf/6W/+7h9/9stv3tw/nhvmgxO+e9zQKWGW4gVJRNbVmJEOhQBSIkqYokAHRqitNR+7+dbCsmEwk3HJAYw2M06YmcLmLstyPJRSygG0LEtMIQpmvypybhLbUKRFmloVa2oeQ61yTiWXUqJZK8wkE+ScRVZr1TxXaSWpmr/e1lLKsuSUqXBKiSJxhKSyra22x7p+/W7z37xm/mUpJR3zzc3hu5999qPvf++7n336nY8++vTV85cvnr24Pd4uvKTivjVTNsOEyIzgkeGEnhSJlC9IyPpOKuBqQPcfPKaKDRt/cVLxulgxTf1e3Yg6GOSJ/4bX179sH3Sla0PqesAzqYmICABVbZ+1hIsXhFNE4dpKyADsPZFzvBBQfUsO9Fv3Jt/lUOZSkAO6MxEipV1K0d1jdpQbRv0pvqUJGhQT/ObNm9ff3J9rZySKjSkkSrZTay0NkA5RtGlFOUwHnWo8EZkJmhsggIFDUyd3jMHd4AYGOwOyh62FXjdtRBfGckRUVYl+MMZpmiIZqn4pbT2xtPFhXH/iYwmwNT1vm5mVnJdlwT5mloERID2s7WGtzEfnKIuQyImDb8tjhqTFtIvIzSEgoIIbgIQdz3ycN7Cfk5lSCo7WKR79vRcC95j8ECpCFGgLvph7APCYyYRTLi9y1ZHqO4akoLkYybspadN4uvvxeJwffusJT4Qz6me+y63s0zR74Rwv6IrJ7InEzu3A3aO/xq9mwOgFuT3ynvsL7t/y/pP9beyNAwDUWnkQ8I70BLt7Gg88Alnp3DXBkTgUFbHHNhQeeTg5PnzQiGGiR8uvUd2hhfNGB+Ijxb48/hxEvNa6bZVyCqCogis4IDATQQ8Ir21XBHgy3tbOh6bd+g7tmvkDZoLkwV4wWxGJOh/UND19g8+lQyJ9fHX4roiABsAAkQHv9i6EfpAlXl6MSvSid9rYCJzMjIFLStFCD9FtYOZ4mXoM7sFNYK7rtsnmVEpmTimZgplsWzVty1IAyId9k9He6QPp2jmKYbItH5MjI5oJQUeBi/ewNl7KbgFJB23msiwiArBFZHJuJxrl5dZaACBzzmJR/jIVZyZnR1cRWTJWV9RYltAlc9CgKWMm5IHCBkDyQz72hkOtaiBi8X4zp5g/BUMDxVRVV8Ct2WkTUXMEA2ciJyxlAaApGNpk21qTbRNxQuaclnjRiVNy4uXmSGM0orSYLofIKScCNGR2REcGZGRyQiA0iFrDtOZidjUnc3w7KDiI9gbUoRc5ZwCS7Ty0nab5btDmfuw7oqrIFe63arjebmG3wU9jBteH75pX9+c/+fz/xyOC6l5NCmcaYsq0ihuEpbYLQ2wzE4+ijOnuy3uMAGNsnJmCOhozEhJBMMj1B4r/UWAacCz1TJJFAmhHkx2JOIhEFABEyRANMIlUd80539wcDG1d1/VcD+Rbs60oUgICVUQHFQVEAnZEdd+a5q2lVFPixIimKWi4EiLEjEhDwNYaIgYK+rAU7KgBzSlF4z2AiVQz50Q5ZzJUcFVrzWqV1tQM1MGc1AOD5GZNRETUzFRQteUUnVY55s6fTie/v7/76OPnz5/jmtb1VOsW3KV9tcMyxRhxU3dfUkqcQ90ihxUuPeFlX+zLO/75ZMucW+M+C4HDz+ugEpFw3LdtU/Xj4dYuriMSEXWj6KHg0W/Zk2FxDidz9kEo2ktDnImqiKu16CAmSq4wJexqmx9ZPHcPLi5wIuqzTFRtumIwyCouz7tLPM+ddZZELjvjbnOBXXIHRmVjbyX2e9DEm8w/FJn8T+GCXPKkhkAE00/qxZ1uxns9iqJLDz1xVIqgijFQyse4/nmj9+v21Ve///u//8f//Nd/8y8//8XrN/ebkhoZFkBYWyXzkuS4qRvmJep1kgkZfck5gP2myAkJMjMsWlxGQLjaBERQSqXklAkRExIRJMaU0pLzcsillMLpJntKKY9ahEFnfkboVJ/hOoqomgNATjM+GblaVanbtq05MzioNCrZAZoGowg1MQfFJitjEIk7WElZPIk7AElcqhrU6ufmX7//zRdv//a//eyQUoByZQABAABJREFU08tnNz/9yY9/+uMffv87n3zv00+++9knz44HyonMxAWqAWOwTQJIFPIHajTeCAU6YWTC8DpEvAqopvzYDtI2aZndr2KkIdpP0tM4Q469RxuSsf9S3yUEcXeMe75UG6D3TV0KAPsdZ96nX1e85xeZOSFO4onLJkLk0Z+zy4+Y+76y8STsfHogAkDi1CPjIL7SrrAqiokTsRMpOCKrmYga3Trh4+P563cP96c1CgSOdDjk88OZmZdD0YqBNNpaLbGR9eeKlXTsBBkBV0F1jTSTqlLKjgSIFsmxKBWYizTf5Y8QEZCAsG51mgLOaQICor0WEQ2MiDJlc/Rm+WoB+kuzkV4X11EuI0Q0xE1aa80R8mE53ByZ2REcQdQz5nWTh0fJyZaSuBydANgiuFUXcidOiQwtxks4gHGQskBPf4i28crcO68J8252F3YPeTBpqbiZ6iULj73i15jziHUIohpGFMSTUwDMLLoooxLeg5QdZDQafz6U6tnDuf+QiObn82bmb22XfJ8iOtVqXqrL/L7kuSNSmRqBF1YI3WuQX74DAeDiHF8fEWDv9StuyVzmGj6JIcfK+5U9mQ/zRFef7F6jRafv9DGV10dAGPt69Ht0b5UHBrVf52lKOAy1urmBGah6tGktOcHOfvWlIATwfAFLXITpA2t1ufknz2JmuJtcScHSg5QSjS2/G7P9Btyhs9CRVEDo6hLsBlE4Tl0IghvQpf+hD4BokE8ASKdmjv436s1pnHJKEfICOgQpawfjEjq5xWxrQgAXdxFj1mSJkAFdvbMv5JwdPbJWjoDUkydi1onaR/U1XqjBacmFmQfYWtnA1QrN8uzecLMOI4KIpZTDQcOn2c7r3GZqrbUpswGiGTBnRVACcwNTAGqiNiLS2M0djBlTSpwSMvXhqlEYRDMz9AieA0Xp8QMSL8ui7uhuBmIxm7uqal1SVdiaqicHbWqEYO4E6uCuHQVd69q2KiKtRWTCIuauKXVVDyWutZ7P5zAiIY3LcowOI8QeYahqc7vIIPZlp84Rd5lpgxgLq6oIqDlAu+NlE9HYQi6FiC6N1DfUi6MzPNr41q6jOyWfGvTEcCA+9RuefLLXfR+Qng+PD6/85LcROExNrLXqIVPvs71U7E0VEMEx8DbxxR7VYHB3N3d1URczE7cCMRCQCBCpV5ijUX/elaGlGC8EZr00oe59Pqf7bMC6PEX8kxBrW1MpNzeHtKRt22I/QM66be6YUkG0CCBVq1mCjhrHpnraKgAQATrgklaCXJEs6OyDiN+cyVSAKBmiO6CZVlNAKAF26GLfc0O+NXV3dVPx2nyrum51FT3XttZWN6nSRoOkuXuinFKaNo2Zzez+4f7L+/uPq34E+OzFM9Wma6vnLefU/xaAUzQng9qlNO3u4FM4lYjcrra6KSf7ldyb7mlyzUJbr0hW9llVGH5qb+SmKwHbx1cAYODkgBjb3mhkwITY//zVq48f7h8fTzVkOGdqFtm6SzjX526hJSQRMZdOpgM+V+BbxX5/w1MNu139YDPdr8P+Vzjc7g91No6ZkIVd3Dhr6AMa0+tfqnLxVmY0CIAg/V+Bth2hAqes4lUEwJnJDE+n0+Pj4+eP6z/9wz/+9V//f371q988nk+iUI3XprW5h51XOTI0JzXEXFpb3R3Rt21zUDgcODknBzAihJI4gWt3ksjheOjRso7BSylTjJ3goDdLlHMumQPqfJORAmQB3lTVTdXcfZNWaw3Wr+mYIXZ+qGFy1Yxdm0hKKX306sXvXr89CzCzRFmGsbZmY4hiygSQcmagpADA2RVrrWZGlCDA58SIWQAfz/Lm7cPvfvf6V7/54j/99d33Pv3oR9/77k9+/MMffvez73zy8Xc++eju5pYLyfqeiCjAgWF/QHy0lozwLxwkBAhc3j4Ve8kw7veFaT+ffLI/pu2dGweMsGgvmfFDaFs4RwAQ8+sRYQcu7N+5V1UAmF2s8S06yL32gj2bAJ9IOxHhB8Yk/ql65RbPJ1IbDu6uW/iJWu3XjTsKx/DCEOkA4EjJkBJbr2shIjma80FM3z6sb+7PTUwNxCAvxZQU9Hg8Hu9u3739stZ6WA7emhGKKccQoskj0isQECnpmERjbgZITn3iOoAnDGCZKUjrVpGIwIMgHN0jgI2CrM8CoJkgZkfAYI/CC8lc4uG+Xhazr16Y+m54B1yuShPTnA/Pnj27u7srpZM/NW2p5HOz+8czpZyrChYAWIgjg0OQCS0hKqh6JXQGDTI2cKNQREStDdhSn+oTsFJipk6OhYizTQgBEVRkF1zlmDcITj6S4PN53C+QKNg5JNEo2QlstJOETpGwHaH6zNzNM2EX9e0lasr8Xq2mTZ57ZUTa8+S93kWP0n7jmJe95Dt2aUrqg0l20GsG6O2F7qM/a+6zsdHvFWHe5H5x5joESY+NxtR5nTSVPPYg3EXGoZL9KmMt+i5lV1pqk9Cpk2QQ5971FNNC9meqgohQnxXkkbYk8t5WhDBdZJydr6FdhOE+B9ja7JK7jYoKReGcelyxP2zwmm7bBpAjTYJBIzMSikTE/a3IED6aF4DrQ7UFQSEhpUzuHQccT2FjWlxChhyEaTklaq2d64OZLceSCwMaoTMSzxc29zZG6il7a00pIyMBogcRh6qjR+c0c8bgp0EwZnYuiUIEwQGRHILvLUy/oTsDtFUBKDmAaWa27OSubrU279ybQ5KJcEy3D7WZmgAANzc3EYkBAOeUofO5OTFyRiYTUW1NTcVrrWhr1ASIwMyQIOfDsiylZE44PDx3d4zQFFw1Em+JwcxAVMF9a01ExNzMtqa1qagDUDVshgLkSIYURDLmra4KyNF/IrW11n19MyTCaJQlorg3RDR1HNPVfETRAHBaz+6+5OKqiEAN0kqJMXc/HMx6nTA8N9+xwJmZkqkIC9BC88BLjsMvidvh5uOAymD3ES8BYXgqNOkriPZiut8X+yfj8ye77P5nv07W+re5Gk8u+y3H+CNHiKb+lPOyLNRZIgFdMXLliE7RNxiZjC5XKXoD2CID2teUyF3dIcqKI93kZjYcGgfoRtnQwNHQAIK3BhwxiDedUM0JLv6HAxBgoPBv747Pnt0WTic9afdBo6JNzMzIouIAYpDA13XFGI0DjAgpmZjH7BNVbU0TOKGjdkBkZicQVxcXyUiAYAJgJkZEHBPZmVQVHFS1bgaE5ljVtibntb5/3E61ba2et7quq4wdNJ6lLEVFUiLEzowU40N/8bvfvTvXh7r9j3/1lzkvRPT2m9PhEJw3BgM0goiuCUhERFP37bgfgMBNn4w4373s3T63P4eok2r4MP4i5C6llFLK+eEh8lnMYRD6vigiQKICFqSx9EHaAgFjLIQjOSHydKbN7NPPPr093j2eDq29c/d1XcFuQrkuab6ge0nAzFXsQhLW26s76uTirQ6/wQbvGu3axaee2shtT0yOu7v3Xg7cHbG2e42bdlV2A+ht7KHuznyMbDr0ipBrb1HbqzOGb+/u6RJXBJs6qKM7VnVENF4QWZHuH+5//vPPf/mLX//7v/38q6+++ubNa63NkZvapqaIgo7ECMREyC6mq7YX6blsD7GYEVGjOydn9CWnKFASsyHGeDTEC/dMMJCVJS25IGJCSCnllJgx4ZjXmhi80/SLqVYxdVU3x/N5e1zX8/m8qTmSqkMUglNKKUZ3YmysAUn9+NWL7376nV998dX67uTuTZqAcXYQ74ED4uy3iWREsNQG9bRBDEJQUCPAhs6IJR3Sgm721Tfvv/jdV3//T794cXv76uXzH//wB3/2Z3/20z/+ycuXL3/4yS0QARqxAZBjMwuODAMAjiDQo3eXIGCM11Y3REJ2YELYuZvrehonXjxRAEgp4WhO239OoxdmlgpnIuNDrxHgEhC6+yztTocqOBWn0zmd4ynhc6uCXfi6PwHwol++c753X7pjrECsbYPeLuF+Ib17ahwQg5UMRdvc1BAxEce+rGIG4ecoIHNmZyJMm/FprW/enx4eN9EooFEpx7pt4HRzd3tzc9Naezydbm5zpGDdvaowZSIiJndwtN79BBelJiJArFudDnMIOX4wRG4ugu+SAm44k+mcyoBNwCijgoiKSFl64EF4vbMTQh+qZ0Tk1GtWm6g5UkrL8ZCXhWO4NICIwcJN/XETeqzMLFjN7PaAi6dDyZwwERq5mqhjZjAQBgAUBHPCRABEbNKHl030r5qIEqKbIwJT3IhDRO/X+YUwuSqeUgqSANj1wsEIa/fGM3gZ93m6aJuK2KSUMk+eghe43/n5XmX4ml9zeqfz23FkXee7myfMawKA20UR9gIP0Fkk9k2J2KkxLqMKrneNi5BMrdnf1TQOe2Xc7zLzz+cJc0NMqp5SKqUEgCduutZacuf9j0S9NK21Hg4HiH5NpAgaI6ASkWVZ9mukqoGCW9tmYwiEqLZaU2JbGEfLXSQSeBCI9ceKBNB4/vni0ziBiOIr4sFqrctyBIBAbNpYmtYaMx+Px0Rca3VXpOD/UDPLTAHZ2rYtp0QxNOupZQnCPY4gc8llWZa6tXQ4BPLDRaPvDpDqdjZHETmfz94nsxMApVRErG211sqcS7n0kkUmY11bLiitcSmllCoCoIjkKnMIT0pJRMxS8I6kRKUsTSqir1LdLKUEgNG1nHNOnebdkMkJpDUPxAVRU02JxToEszU1hcPhkJibnltr5/N525q7l1Jub28PS2615tSHzqPDoSyHsqhq7anEzim8wubGOedzDSgCBt2zmIs3Az+WotpUm7sT4/Pnz29ubnptBAC6I4UDCdNqaw6UUjazda1NBJHEhR2YMyKIbO6QcgFM27ad11bFiZfH03mr4kgPpxMQI6at1q3V0EoTtd7FwTBonUISIGaoDLMw1SyIsA/EzOxqta48/tDMatWbu1s3qWdFREpclsXUWmtcMomyASIxc0lH8xbCSZSZuUprW3PrLZpTd7obPjQ27M7UBRFh5nAFw1lBJByKHUw801xO42jwtLt6by/21gT+u8fe1ux/ADUZY3biWww8IZrZ1uoxpVJKr3KnJE0o87xgKpmMHh4eECAgYf2eo27s5u6cuKp4HyoAqlpr1UNidgAAJkwMDoE19c6Oa4MMQLZtyzmrIgDGUoedVjDiPE/7zne+Iy6IkHOOnbdkOtwcVTXnlHM2acsht9YMEM1B6GxhPyUlSoRSmx8yIxCWY06cyQ21yfn8eHM4llLAfT0/ZOqcQ63qzc2NqazSlmVRtU1aKcWBRXRrstb2eK6P53Z/Op+3bd1aVVE1Dc5Rs2VZDocDMyOYSDOzV69eIeKS+mjyb7755iQtl8Of/NmfbPW0LMfb27u3r18vyyEvx3Or6pASqSonQsDz+Zzzcj6d3rx50/dX6gPWrw7sYqNzPu8wboidOZiIwvVjvrh3UdWPa7TW3DEnVlHAzMwpRcHHCYmYxDSlNJlIB3AjshvAzAi+bdu6rvGwv/jFL37wvR++fPny7dvT27dvCUsUx3LOAKuqllLCDxARNKdEgIQEjJmwACR1c7dws7pLh7j33nDXjDp9BRgkAdMbnjt3uGIppRBaRFyWJZR9RpI+4lW4TtxM/dLO8x7OtAEYDhsVCBTpDTPdB8ooMcJH3UzdITkhYGpmRIsjPTxuv/78t//17/7b3/7t3/7617+9X2OHZSVorTW1pl4NFQHcMrOD17aRmbhVrR/f3QWGYkmMCLWt6wpL5gZ+OC6HcgAACObAgAw06RU8InfPaCXBoZTMveO917eJAExFU87uXkW31qpIUxfTTe3d/f35fG5NmwpRSkwGDAAlZXdF55xz5uSgrrosC4H/9Kd/9M3DafvHX75b5Xjz/M37kyMtyCoSYSciilRVjNdUvUaKnCmDqkZ2VaojEjMghvEgQKCFMm2Grx+237/73T/+4nf/77/9l5/+9I9+/OMf/y9/9qNXr1784IefpYRgyonMcSmLSD2UBQBUPXNyx1YVQDld3NDpvYUHpbsiyfxh53Re2WEz8zEmnka32zTasbnEBUPeOi3bNYDZB8+ZOxJZ5HDDnIoIDAqcuQVA0PnufFMYbvq81QhEQ1ZVVVrvFtn/iV+yn7PNh+dT+67ugQQ4ainDG6S5RaqqWWVm4j6IS0Fjrmk6Zje0oGNAMgcTb2aWy9v3b94/rKLQ3AGzk53W1QAez6dD4u9+7/tf/f6333z9u9vz8uzuNoZeIqLzRFChg8eQNwMH8JQy7kZtISLn/shV2lyunHMpS6ywamSrLRHTaMlxdzVRVUBUa+EkpNSZ/Mb7Gq1V1O1qSp0ssLUmopFtibcvIpyPmeD4/NXN3XNEbq0hMGybu79/fEiEBlwNlpwezgIAW3M++ZIbM+ZEpfDtcTncHEVrZgdyMFGrjqBgoJ5T9yVMZl4AmbhPgwBzBSRiYlOV2jDFOeniEGLC5ABgqjBUINhiwgvCAakdb9yfJOZoBzax7id3soxQhCl7T+QQsbd4wDW2fxrbkOFeDmFOKUXVZMaNOqbI5KVMyQeAyQSRM3uvk1mks+KfonECzjuc2uoTnrl7KOtMCjuAW2RASjYztT4dJMJyJgw/Bzw2MTToFYiEl0Dzot6zdjG3sWVZTPvAEyJCmuwy/fzWmvkFAI2jTY6IJMCBEc6lFJQhPcfjFgHhfLa9p9ErJOTowTOkgIh2AXMyc7jNYe+AKKhXaYAY+5YcBbMwLpyYUTu8Pl6+e698dsxoJHz7agDjCEyxOzgOAKfTiYhSXswMVHyMeQjuo5SS94glhhaE57l7f3h5qXF1JOqJ5ii1m7kjYaJAJ/ZkWDgBFxmYptMQkREQ3dyBzDGgXyklCN4FIDWFgV1HJHU3UQLPnKK5HxDDsN7d3d3d0VAnghEOxT9Vo0pjZsaAS8oJCRHFjQDFpFZPiAIGFg3VzIjHpeAB2/mBmdNABN3eHnPOItVcCNLwpbpOEiYkcDMzEPWI4QjBnczAQFvM6TQHp9r0vNYHd2m61nbetIrFUEg3b9qaqIqphw89MOsWtI1pqlDfkAhgpxFT00pZmNmgEZGprOta6womN4eje86EaTlkQiS2kcKcHnAXaXRwJUp66Q9mYtYBt96bpA+PvfEKgzVrRIQ4iSW3ptO6TUlz9z1JwB/6iv+/jv0uDgCMyN77GgF66SLsRmsp8Cg86L+siSC21geNJmIgzin54AMQ6E6MmZmJepSHHTER8R6GF8+pwQ4IjpQIGUfHGyJecAeIhCRqSM4JmTEjEaK411oB/NWrV8SomzJiTMkDaaIeWSSYiUM0AKubeVALAKhDFTydN0a34yFUNTyoQ2JQkdqQoEkF9O70YnSmgaqez4/RzmF2Z2bbup7PZ4EDAKnbucnDut2fTo+n7SyybbXFSFx0oJQ5hNnVA7ZKGL6XCLC6e9vEUtP7x1/84heH28O/+uEP6MVH7+7f4MiettZAzb1gsB9v1cwQuZQoIAAEAwSkvRBOSbUduOW/L71zd3C/WHJ3MAMi0tH05t5Tgp2Ab/eNO3lzD35JdMfLDG4ielxPX331lUNz327vihsAWASBgBJQQ+i6mRjJvI6Efqht/5bIKA9zRDBqejiOfdYm9sF5e3NNEDH8tukZzx+myj/5q71TPl1hIjKNzy2kK/rt4z4NvDUxs5yXcOJFBOsDIxFncAcExQTAzSktd28fzj//xS//03/5u7/+L3/z699+LuZLOQZ9nImKVBETNXEw4Cj1iwu7I6ihIwbXGJWSVJEC0OdWaz2d8PnzOyKKKCsgo2HYeVDqEYCZEuCSOSc8lDJeH/QGrBgTDejmYtJU1VHd100f1vPjum5VgmUxdD86imHoOwESGAAyOpjXbTuU4x//5I9+//r9+8+/fHx/z1gS9nY4AAQw9CgyIwCIVEQG6Ds7ImUiIBJvAFGVMUVHxPBgDBgs4i6stb79/Tdfn9rf//J3v/r7//zq1as///M/+eM/+lc/+OFnL58fRUxWOS4HJw6i16aurbphKWXm+ueDTN2ZPsm06lMah7pctMPMAtWEI4XhA5a2v8j8YWZzpi53Bw/RrE/sHNYsai9PCR2e/HNvDZ78sP8V7hDmsNt/54755JGRdtXF8T3gF5fdLoMxEACi21/jIuAA0VpBYKCGBuhIBiTiTczMz+v5/cP6cK6bGlGiBGjqqLXWm2d3ZMqSPv70s3V9fDivy7Jsrsy8XPMMB+KTEkInZazBqBk+9XxwGkWz+YImcHEuS7jQ8wSH0QJ6zY2/W5O+Ygw4vKkeOMUrVvTepeeu7s0Ac1mON3k5cE4pZ0hpeDvaFIgkF0/ABgyArUpZCCwRwCZGTU/1nDMfl1QylYQITJSIenMU6eZOxNCHvru6ocMFKex97PwIgHFU5GzyUoePlCZeozOPY+hpw+sUho255T5b+/yynnuzvD908Bfi7vhWSR7fK9M3noI6gXX7fLrthqjPVzx3kAg456W88+lo6s2CPqV6FxBejnl7tKuUDg2Nb7xGSl8jYJ9oHwCkJ1bGR4iHPaODYQhKKa2mQNntVReGezT/3LqsD2xrYsQ+ezQGuUPP1BZVBNMJau+vqmRgAiZ3G9mvC3TYzOYIpvGeLvvrkDA39xm9uLvpGA0H1slduxZFv+mwNXgh5hrL1HEYw1KOiN91SRmYSilqFnjoKNaoKgTIDZCIzFHVPdof3N0x8JsKnRbUzBw0ws14sM61BzDuhnywHahq5pKZAo0OvgNbUzD0BUs72eDrQSYgMgRxE1NxTIBhwiwYWZA45ZSyE5l7FBlKKYErC0+xVU3MhGkaXIAeOt4uh7BotbXaNkY4rWutTVRFwww5ACGnQGPeHEuI07IsnNPhUFJKxMbMy7LkHO6m9v4xc0C2jp+02iHwJG4gog41oiEgADhv+nhuZ4BW9Vy3KqoGFpzD4CKjm8zM9WJDB2RlOvoJBmexg4VDEmcO+6LhQiCBawx/E2uCDtJqKWlJ2dFlq1gdAJphlDWcwhIbosewcgST6G5JkRlSVZ8Kcq2Sl01ial8cqVNosl43h0wjNf+8//baV58W5Mle/uGx13e49lTmdUICffc1fYmJohJXVRgw8n80EPzxCucTMbPWxkyKiENVA8XtQbENgZMlBHLDIHp1a+NMM0dNQAhIaeab+uqNRj3AK5+DmAkQzAH11UcvCFCbMPOhLNuybLWigXn0lxuiAzkBuGMAmwHADBu6V3vv3uoKAFKpbVXawdX9WMjBANjtvK1NZcklEYoIj4KAmQd37iOuZhZbxfu2EZE6PKzru8fzu4fHh/PaxNYWrU1UmHKihIRgqkqMzETAElivaPlzr7UaPMK6PaxbzvmTjz56+fLZw/v3ik1VKTkAhZHk3asBgMPhkHqIzmZC+FQy9wI1l3oWLsJE74Wkv+ILF8MUbDMz6IGFmRmNv/Jus30v2PFLxEs7A5K5a/xFznw6nZo83t5yzrlVtw529egZm/trUJqKCKAgASBTTNwGddfj8TCdYNg5Ft11Q7zezi/E31OzxiNcsjNzw7Zdoveyge56g/eaRZeRzcGUYO5q43UQUYCSVN1QDRIROibiRQ03ETEHLsT5/bl+c396f/r9f/vHf/n3/+E///PPfvW4VuDsQOtJcgJVa9K0iYopuENMSUWP1k006uxfEJCulBIzmlSLWFLkdNLjcXE/IKdEGJoJAEQpKjZLzsyEDomAmRNxTtOBMzMEgFB43Sg+UkADr03fPT7ePz7cv3+s0kTEEd2VPOYWsbsnQgIkBwJ0VzAHVXMD0Z/+0U9+++U3X9+f1rcPuWQHiMWM2UIRY3dnDtFdoc9bIkQL6uhEfdc2tWCIg6gilgIAgMwAOR/WWr9+f359f7r//LRk/tmvP//TP/mjP/+LP/mTn/7ko1fPXr18WXVzIAIidCZyRxedTs5MClwU59uS5nDl8D0NCCMHNs+cBvbJvjCE6mmPenyYcq8mXew5Yu9cfXI4AWCs537T2d/z/Ir5SeRT5o3N/075n2eOn7XH+7DD8tl8FriOjsDpgkBx6O8LsJiDmAkQYKpi57XVKgb4uN6/e386r6Ia7wcB1Aya2LO7u3pegdInn3334fHtN69/v6ll1YWSDVfT3WOyIYAjEqEpmJkjemRPEo2lGFRb0SsO7g7Wu2M6WhICvwZDfRwvlR+IFgVz4IvJRURX8ZCcC07BprmbSzFFXRre3B4PN89LXhKXVBag5O6cUDZXUUfgrTgXTgBA27ZlsZyVmZGAGQ+QM+DapGTMmRJjSbzklDIjYn18w9D7wh2iFcsd0SlxcINbRLmMCMzwpDfLRy/k3kUZcnKdAJqw52vPxMwMeplqVsWnRD1NH+DVVrXXmpkLhpFk2WvWNN3hws2Ctu+Ao7gjkt2/x8u7c7cAUyDOsjldZ/CJyEcr6RMVG1ewJzvIE5cyPp9lzL2GwgwII9EZYUx3qnSwj6iouLODk5mF98nMAJesUn8eG5lmgIwlHglGYAnQ80yIIALMN+6GboB9AEvc4t7S9Ooh9bxs3Pa8desFgb4dBjGJiUJvqLtKG4z1Cm28ymHHY7bWYkrq9avqeh6bvEaOAxwdRJVjOzE31dYHo4sYBv97r9kHBamj6cUXDxmN99G0RsmZ3AFN3KL4ljiHByYqTsBozgkdcuFcOLIMAIA+XeleazYAjK+LxLooRZUGKOrCBp7MdFlihgszOqEOx+Xu2V0UcFIqITdExGSJGbzzXsaWEN96CE458G1ba60isq6n07rFY0rTTZqqE6YcYyUIPML1TACQEy1LLksKtnhmNgRVQlWRTUQapCYOaCpuHuMZTdRVpYmuVcyAUgaHs7SztNW41lariZoTAXb/EgADFRMaERpLmBJfHFxCTAgEDirh6XtvGgHulWJQacDm7mg+w0giUjfdpIlIEUR0USRPKYkBIkcDp2Fv5kaKNloX6irKTGZgpjnnSMFME+M7LxN2NYT459RHUge8FMRGemEaka4CAR54YsV8x1b3rcfeaO4V5EODEmxEw5ZRwAQZnKIBoDdHmYtTzL/CS2ODqmZicthEDiUDeif8gQtpR4yymI5FyDsROSAYAnKMObr4E+FqjA604ZcAjvKiiBggIyZCBzowv3z+Ivr6llxKSdx3334lJEAzB3MQB83E6jFXzQCwqZltrQEzl0Q3JZt2I7aUglSqrKrKouqQic0VB7Y8pwJEYnB+WJsIcy4lb6JmbW3t3ePp3cPj+8ft3HqFm4gT55QoePuD04tydE1g+FpzX1HV9eGx3D43ks8///xXv/rVs2d/8cknn3zz+neTGBndmBlMpwRG0oc5m9kexvOHhGTvinWpsDmSPrbPD7fAsPyu6uBCC9sTqoABykBEx6uJFO7OFG3ol1EV7h48sVWbmQEQMyq5KgBaKYv51QYJI4kJFyCGd1dk54k+kfO+Ptcp29gf927KEw9j74tM1Zu+zl6n5oeXdeiKHKZfI284mpT88XxaluX29taJVbA1qZuaWW26LAcDOG3t3cObr755+7Off/7L337x81/99s39+fXb97UZpZL4IA5mIrqZgRmqgwI6RAWMENnMgheRmVOizImZpa45ZwbaoiGTohyh5/P5UJabGyWK5nNXcFTPgO6ojgxIhJMHq8mesi/KBe7uW8wlcnQEdTit2/3D6f3jel6rBThsGLqUUi4MPV8MhI5zPpxJSqWu55vD4Y9+/JMvvny3Gq0VqkaKsbsxMbAm7EwaQzi7MIwcV+KRiaeg70AAdaLtfHZHoJRSQk55OSCLqjrau3X7519+8cXX3/zDz3/5P/zpH//ln//pv/7zP727Xe4OicnAtCQKjI+D+hCJ+PZp5XCH0t8LiersNb0YvW5+8RIQTkcwIPRTvy6/5astZn51mKb41bjIJVt0UQcngNhZrpgz5n8/tA9xNYM+YjYyLggQc1KCtQ87/LI7gYigJghgpog257PNxCMgAnQhdUAHb6MXF5EcCZ3RyAyRyqp1a64ma7XTuTUx5vT+4fG8VTN3ZHOXJlW0qTiCqFUVBzre3r38+JPH82M1bA6sniLcw7ihC6eRe0zCiWw/GTrSoOD0ID0y986n4nAhLIiKotokHwKASP7EmphI2AdDJwIkQEJAQqkyG6mGebma34aRkO6JUmyOudwshxviRdwzIiCaWUI2IvXamp/PZ3Xi5UCY2tbWtYNQuPCyLE0hZ7+7OTQnbW61kVvJVkpOzFlSgsTARECQAnGXCE0bBMECqZmiO5ITkci6N5sISIOFAfBiePcyNk8ekdhFPrvZHHPun/z5lEMfrKT7386kzL7cNxUBrhnRdoLt+8vOM/caNy/lA3oKO34XIiqlxPsZJzvARSXx+piqNN3ReRCR01XPwjQFe/bU/dEpPYkuLXwdFJtyYJRbzy90Wou5Ov1udklfJ1dVFNm/pwF8J48ewtZS6gjPbpgsHLxBCysy20/NzMH522qdcwN+QgU3jVc0aUDKMPIliOgOQBS5SQAgMECPjrXxqq5MKgAEZSAhIRO5R9UuEW+6ESCokTsRJOYcLRzxdVGRV4329C46Iyid4mKmaDZz3TNixNnMauZqhJQol5RLKXFiLHxUO3BXHDdHdzAPXg0HwCbC0TqYGD27iyko2rZtzLykTJwBSMSMjBCByQnDTY9FCH8+MYdr229vbj9qOSciysx2XBBR9UWtKzN3XVcfETISUa1buE1AGPDjwLjbqK/q8MlEbF2rErQmQO4eBTZwt9oUkbamVVQM2NnU1q2tm1ZTETcHjJHEyGyKYmgqiOBOQBhK6ISIhS/8Tsxc8iG2zMBYu3dzvCxLzpmIIuYSkYphNQQAgAmCLdiM1SNNjoilkymDX1G2IiKqCvElkwQjSZFzDl674SleYs5Rt7kyZ4Nnz8beQ11tte6tzzSFc1rxXsj3Cvstf3Ktfb5LPu1PG9cEcphFGwhMX7SMM+ecxHq11kckfLmKGoQdGp3l5IA2yvRDwd0j5GOAPnWGKJkrUrSZFYtaBBC6E03kidrAOQPYHIXAzDPZ5+DPnj0/lqWZJWLOacnFOzElMDiCoSmAASqDG8aGjRj7cuS8zMH87fvToWQVNwUDN8dnt7xkruKqTgTO3lhdFABS4EzQmFnM1/NmBinBWfGhbtta7x9P70/nh3VbawdpmDkn5pwoEVifADSZXS3gOgzMbACtxuCW/sjv3z9+/tvf/asf/ujTTz+5u7t7++ZeVfeE1tIMTKOlpOjYHRmIBhfB7ghBuApghghNeAzAHntxsS0zD3hdRUSI3rjdPzE0IbLC9PQe4k+jeBuOxcP6CMpmKhIF/0TYEem7YZc94ASAnDOgOJg5IQwsL6BcHMpo9O+rEY3cNGQSPqjn7x0FAAjIzNyn9wo4NP2y9ez/EIZy9b+iWMxePQ0EGiKmspj6w3l1R3BGYCBmTl+/O9vp9O7t+5/98lc/+/kvf/P577/86utv7h9P50rpgKlwXppobWvQJG7mgZsBTwjmEDswBpsuAhFTwj6eYb5Pv85Sqfv5vB0O681a+2JibFJelltmBgIxbK4E1lr0OkQsQO7ezFVV1N1d2QHAAMxgq/L+9Pj+YT1v1QycAq+LjEgEmXFJWZFyosKJqAt0AMfJYNN6enj8wXe/99lnv3v7uP3m998AohGYoYW1MUAEAmUEGF03NObF9yOxgzsDmGFvNHUwCxZu86rWiGOnACIQWpTgZHj/+v2XX99//vnrX/zi83/+l1//P/6nv/xXP/r+J6/u3L2KJAZ0E3cezEZzMf2DYvLe9k5ZGsXMoQzu06ecMgYAYTbHRXykxYJ27iLD7pfhKLYjoZm1mv024Xbxg/epnHlM/+SJsuxNxAxa5j/3fYPzoWZyM9IT84u0M0pcbWSquvUgnwkjymJVcCMDOK/4uGkT35rXRuqcvLx/eFM3MSB3ENGt1dbEHZj58XwyUS4ZzO+ev3z26v6br766YdjUoIojL5kjYWlmweAg4EFuhMSqaq1F+hIRVS24r3vd5QIlmAwYAGCFZ08mDrtlUVTEOUIMvdMjDx8gxm/GWzNTILzs+9dZ3bLcLjfPyuEWOMWwO8I+fogRPCV1EBGvtXDKJWVOtda1bu5OW1rP9Z7J3W9vj8shLzkTQ0I4LHCwVAptmrMxMxMjgTM6MzI4ADI4O6CbubgLmqNHLyYCXDIL4TtjD/WfxjBMV/MAffBE+uDhhAG164s4NAJ3Scm5Gu5Prz9/hddBZu95uU7fzIvYjrk0vkiGPdnvEftU4H4jcNcmV/Hb+IoLE+lUrr027fUunk4GKdT8w2trcNHQOP8ymJ47KajPmyBCZrYRJQIAU4bRAucfOI5ElHOOWtNkT4q5CH2th6WIxvruCvSmv96UKRD7/hDf2JkRg0KD+Kr6QYDM/aW21gw6R3pEtqmHBKimw6i1VK4yYZGNFKallDgdANwnOKfzpqaUomPJsFvNkjIgEaDFROcA1AD3+ScGHd4YmygRMwfRHgAjdkNsgx0REJDcLXJheGGkHgmDJedSckmJ3MAk8oJhSmg2oCOHFskATqRUDNaw+7zHTiCd11oy5wGAqWDZmXLq60xkBDxCU0I01dgfc85RkIglCnwVE1Ei90wExlRSlweEHvyr9npyTujuOWck2rZN3Zixd29YDwgphjEwAaGYNhUXNwBpZuBqsLaaczFAiYEl0kRs3VTUiVMqnAA451QyETWVKnbeViapOS4fkMWEiMfc80mxoxyPx1D1ABGt67ptGyIeDof4lQOIyLqup/PDdrbW+o5lY2+28EwB3BQUywh4Qmd9gEWZBjBvOL6RIsIyDBZdtka/pkZ84hxcNt1oNx/4kP3+Ov+L6WJHYLc982A7nGc+MTSXK1xDLPYfQo+vAiID7q5ubCZua6thRtHcXcIAahPmQ5QHMzGAMfNxObgamOL49rBlqqomlJY9vtQdY3QqAgAyovRtIFYvEgmgE8jkcMWinolzzq7WzMyBmF48v0NENMjMzOn29hbdiADbnHwghMbokJCcDSg8IodOhhnUvqd1iy9sLfjxZWt2XIrqhog5JUVJSA4G5kSeUt62UwhnDDjcqm3v399vj+e1vn98eFi3KmCITgyIDujESAyOkZtHSIgW+huRL2jflvqAUF5ExM0d7f7+/vXr17e3N8+fP39//9hU1VxMmTkRAkCtNafSmgTNUpjKJ298SODV3hbVjKlTxGOX9YvLqKpjzuuExvU9aVfiflqODp+gv3eEDid2c1MDmaFqpHRVNXWPVs1kKM4FJzNFqNeXcSYOwkqbxYTj6/LdE+HH6zQzDNKXqS94qewZfKBQOy/wDzoiT7TPNTgwbRYi4r+HZTmfVhEhLMxUmz28e3h8OP/Hv/vZ119//atf//aLL37//vEsok1sa3r37KN1a9vWECwvC2FqKrVKU4Mg6AV0JDCInAw4RFjIRDmlwkiUwILECqFnRnyW7Vtr61pPpxWAcs6EKcAamzpFVwUAmEWNMI38IyIruIrLcF8gAQCoQVV5PK3vHx+2rYkZcibqyVwCIwImJATiVJhyiulz3uuErirNwd9+8/rTH/7xp5989KvffkmAOeWq6Kg+YmyEQLjz2iQhMQNzDogSkINDcwTAWBzFmXjtDk9/eJcedIGdhA3wQAsmbK29fnO6f/in3/729198/vv/5X/+q3/zV3/x2ScvUmFCM9scdNetc5WJ28dLMPRrGnDYV7SH2wB+5QXCt0VrU7rErgzjLv946UXfO06XkNUJ8CLAe+fYr2M5282CHnWCq37a/cOO/rer+NPMOF3uAcBhDCA1M0TuHXqGYyS28XILQUaACMCi3sRF4by1tfnj2WozteSYHFJt8Pj4KCJNVcRazOhzcCQFq7XmnHMqrdrx5u75i49ff/11MzFVbIjcEH1him7anGnsNgiUAHpSmKJ2jQAEwOijEhPAOkIaTWiRgSImtoEzpIGnNAROSERMhND51iJFf+jwrhT1RlV19H0r1LQY8d+buxc3t88PN7cpFSICIALgPvcbUmZSt5FBYOYDcUJICAqITAZUpdVav/7668PhcHNzOBzLoZTj8VibM7egniiZklPArNghMxCkRAE5UGI2zGAKDtkbIoW1UWvuTmRENLGUgNP8hiR37/dSYQLcOzN7sfdrAOd0nGDH6rxXsSFXFyjmE4dn6tfugrvyzG7HVNPJQ7M3+7W2fRH+ohp+ofDY35Kqul9iuf0zDnzY04TRfOlTxfC6sDev5sGlMdsbRkdGB9TNDXtaBCJK14PmfR8lX49amnshDugzM/sg7GZmdyPwCRruXzF6zn0XEA79B2Ymh9Fj0kH8NGEVFlUBd/dJghJaYUNcaq0BXDYzMHU1QN9zfM+b6T+IuqhDZ0XzwRatiDZasftTq5krLPnyqsbMKyJKqWhfJWcKoNMAfuC05uDeR9r5YIvJTMflwIQuej5th+NtFK8k6HaoAwKZg24X3dFMRSwe35HdMfZyBCJnVEREaSKCmzQiUkUEoAQRqE8Z0h1/OrireCgeEoV1wy4BjgSJOOa8IRgjOYRfaIhESEwmGMSwqbWG6DmRQ1JVBwIzYHIwd1JTVQ3njplBI+ozM6siIW+tiQNXVQumEdNWXVUdqSxHREyZUimcEwA0ldQUAEoGMw/WdQBIqeScD6kz1tYqRHQ8HmNCC45+7liKUsrNzc3hcCCCWmvJnBjPnAJuR53TrKlqHRQvichHv6vtTHkYDGK4WJmEnDgoflvr1UXsrbnTgaaZ0LoIp3vOpbVIjkoHiFyyuU/LfU+MxRTOvY17ogJPzv/Waz75qxBk2CWnHXeIgBHc0oX7LtFgX2SilFKmyJ44I1lM53RXExGBfERTYAqab3cXkW2tS+GoRqqa96pEBE5PeXqm4a5jFFLcNqd0e3d3c3ODYCklE0fEVy9e5Jx7/Apqpu5EaEhOhO4EwUmE6NCJ+sCJmJGSmJ+3urmta1qbnM5bSZnY+9gVdACIZnszW8rxfD4TJeYk6ohsCu9Pj6f6oKpVLfjBDYmJmPMcZebuCMyMQcKhKiXRGCN2sfUiiuTbtmHK5ZDv7+8///zz73znOy9fvKRwHonF1MyQC6KLyLPb56pns25JxB12rsUTMcAPEq7jnNFMPrYw62RjMp1CxCjCmKoGdC12jV0tHM1s7/ECeG+gpv59ZgI42wM15wx9zBWFLmsLTDsPXq6rm7cOA+5WeDrYe2d0X+astRIRXrVKfku0PNff7HLaPMF3HvDcNXDnfz8ppwDELIw40wHNxjXl4cHUU0qE6eH9+ee//M0//P0/ff755//HP3/1/v370+Oac765uYG8qGxN7Otv3hIl4gwIbd0ANgQuiJuhOxGYe8BTY7wpOUTSlkpKS8aSnTkhIiHFWPHoZQZX1YaIprCu6z3et6bH43FZFgI0s1XjcoYADI7kjFgRpbaYiiGmKi7eqxmI0sxFpImstZ3WLYQklQxojAEQpzllLtyHEAhzI4pQ1pzcwV9/+dVy9/F3P/3O8+efv3x5riLr6j3FC5AAERyBOyUbuIEbKDkiRnsSaQf7xV8gEjAQADSpREFa3gCAIuBV9fzMKAslB4DESFjb9vXX7//3f/cf3775+uuvvvyrf/1/+elPfnB3W5iMU6egGBvKhcVq5009SUBMD+3yufcE36UqMgXSRykAdkHm/rJPpW7gBnA02E8RRexj9L5VBfbbwfzn3t7OG8PRADb1YoaIeyWaarj/rRvOPWVeGXbqY7y4QTBcikprdt60CZzPKoZbhXNzd0TOTe183mptrdVNNrEWG7o4BEkh5URE6o6cyvF4c3f36pNP9asvHEgBVFzQCCA58uhn6y44WGCJj6NraR/M4wh6cccyRfMwnLH2lAQc1EdmvWUags6Neo9At2Y7PyFaQnyAogfEAO7unh8Pt0s58uEYeLr+V+YeQWkQf6S+WRNiYaIlO5ATO1AMKQhP8LxKa1oX2bb6xl1Ebu4+Wg755ubmuJScKDMwQSJHkAyQGJacEjsyIIOZgJ3iJt3dDX1MuTfrDsVoX+3W2PfNBWMtZlaii5lf7DDilcA8kc+9TZ4SO8+c7xTGIPsP7XlEdjsdnBvWt5QZ45MnJwwViFd6mXsBPSCUvYL48CH3t7G3A5guW9IMO2EgN/dnxjm9Qmi7mgMN2mtp2lqD6yxmn+NhfV/yEZ2nlMws0uEAkEqOO+CcTFxjG0Aa9AlWSkEEFzDwaAWOWpxzCO6I+SzmtUOMWmZmtG4otQk6+IWzlIjIesDQRyzQYEbmzgnLD6czM8bn1LccCD1EBManPk1oJTMXTgoOQPHIlJOZccqIGA8bIKIv7x86vfDVO2YimByi4eHEF0lP4IGaWfDrR7UvInPiGHDnptu2tVpvbj4tJbW2iZuP9BgiMmcH9T6eys3MDRG4q0cQtHaEU38RPro6GQkBFElEojoqIjEPPJ4rJ2Jmaf1IFMVjTikFvhHRl2VBInclZyQ4nU5ukVlWREQDNEVTPpTaVJtGcBQ6Yzb1cDh0gx8EOENH4qmIOIDGObY1czE0MxUQNyDOmSzasXLmnCld5tsiYs4JkZp6axom8nA4LGxEFMDLSUoy90vVFvRcEa6E3E6DHh+GnMckw9Pp1NopIjSgRCkF+eFQn6GP2IN2RB/9Y7k1BYAY6siDiT5u33YjN6fJ9msoUdgOZ07g2UH9ghG/bPZDi59Yvb2C7//74fGHPr+cIDpSit0GkQOhx5bfWrMWqOrgl0/Nfdqc7qCYmxknRofZrA8Q7EARUnaSMqIEsIlIrbXkwwyGHQgYOCMSiV4Mt+/NIifbtrgfUQXAZVlevnxeUgaAwqlKQ/fb29tSynmkbAHgMt0eAGk6WxfeXQQ07PTrKq2pCImryVYRkTLc3dyUUkzEzDDgdk0QWZouywJUHh/PZoBE21YNV0yMlIgZnJBTSolyJloSISGAWawejt6/wAzjJDthzjmbAqg2heNySKm8ffv2q6++mpJQSnFOYlfVrcPhsG0tfptSkvYtAjC3xH3CAsY+d+WiAeCgvFPV2bgVR8gGuJa84PXGFrRP7j7Hjrk7Ihg49IETHFiiMFytba01M5VqtZ0PhwWGodt/42WPd0NAzuzA5gBOCIyYDBzg4goAgO0GQ3Wo23XaFS+cW5dv2R/7TXZe1oen+6F/AHtHJzQFY4AHQG88cgc1x4x4Pp++/Prtb3/zxT//yy/++Z9/8cXnvz+d6uPN99VTPt4CwJv357ZuzHxzc1drReBEvZNCREQ2VQVeEM0NvfcdEBEDcqC+M6eccymUE2QiouS6qhox5JyJ0MbYNxetm5ieWusWnTC5WVmO4GAIaK6mJNbQ0KG1ZuAqXnV0GCQmTObSWqtNzaypNLXgnu5ER2Dh3vS0gwp2iCMgOJgCRRoayrI0sTdv7o/ffPP9P/rTly9ffnKWL7/+2kafBbqL9YiwF5oQNOJBj3wcI4ICetCUOQAYEzljQnJI6Kam5sKAjBjA8pMaOaqBqKFByujACuZN/+6//sPr3/32q9/9+vT//Dc//fEPXr66eV7utLa98OvgwuVd+9Dek9uVBa5QpkSk3xbvTVW1gWrrEu5PHeueWDmk6cXCE8UE2Ne9u7ju4Nz7PWW4WDivEDfWZNzANWR0GpB97DT1brbJed+mw5+ctC4XO38WV9VWtTVtTc6brpvWZuYZqTTF1iiaER9O29u397dosaGoi5moqQGai5mXUrx5bVtiIObj8faz73zv89dfhoPohKoqYJwTEUutl5t0J/SImuLiIp0JEwBUJWAdY4uPxekeYGew6/8vJgc4ALfWSFWprw8zMpdSCgUCRyRGWmNkKs3CP5+dO4Q9VinLkcsSXqYP59zM+hxgcySMYpyqmlclVHBTNzABRSbivKQCTqf1vK4nFcw5cyoIAEC//uL3x+PxxYv27O7m9rjkBIkooUo9J/YDsx7LzaGkRAAGmKzPxO2PiwEx7e1sweMVEtVlTIaC4AjVyLt1jcPMcITB/AGpzJTS657hp5k7uLhu/WfZdcbtlWIWJ3FsDaFHcxDrFO84ytKRujNMjUNqQ7xSN79OoNCO+G2swGxe3W0ralOzZoly/+w2+Evju9LimhAcaTWsniglw6UJ1PXh5pA/en7b6nosZGbv1vPxeOxjJxC1SWutu0cKAtpUzEyJzL0R51xElbbN0YygohqTOCTGZzeHdWsqTVUcEDBjocho5IzEiATkRMRR0mFAFaxuIjAYJjIlopQQjd3CPQAzdyTEQy6cCmNCJyZSJVNRM3fIxwMTIzkqiYiDJ2bfkxk7oAM7uBoaSHEsSRHOUpkxGslySU5B8MiHw+H58+e3t7ei9fHx8ZPPvzidt9qsOd0/1jfvH1prJaUmXoWqMVIiD/h4Zi5wyHo6U6HWjJhMhUpp3iABsJ237bMf/vir3/6WRF4cD7fH48P5/nl+frg9yINYa8SMWgvzJhIoGVU3DkdFxTddHRE1IYY8QWYCIjrrtqTcBETWQy5EJFqbg7/dXrx48eKQTfWsm6kuVhbQQy6GzUHRSNQBPJGD8aEQooK5NAmjLSYK7uTMbAatqqoSMRADIHLOyzEmXJmJuWPivCyAuIkiU0bWta7nFYCW5fnp3Ts2JlyADXFpItq24A6zqlttDVLlsqbD2WkzvHN1EXbNrlj7rLORau2zemK+R3bIDtpIwxtzV5H39V1YH6IU8yTP2+ruuAE+wFrP6X0H2YpIGIXUp03C4XBY19XdYyYnM3/66adgEtdpD49qjgBOhMiAiYiJ2JyrqKMoirMw5JQULBovYUahALAsS0rk7sQD2EEIp5bAM6c1qRNxToqE4iAbAjCgEyKYoDqBeYDnIDuSUgpHz92CoT0nAY8ZL6JVVZeUs3OQ6EYSoLMHY6/SuzvgZFV2AJAE7uTxlJQInLyByiEvmfKSsiHW2twUvbqtxsVA63ktjqSG4AnZ1ZsZJsRlMVBtNRMQESMlPJzkUdspZT4c6eEETmpsVbaY4EVEQJhJ0c6mfsADAt2Wm0d6JErpcBQiIHw8n8rNoazL9u7t4VDev6+s23dfPXv28tXD+XS7IAFmTozw8atXj+/entaabg645E0bMZGyS8ucpDpTSpzFLVGU2MAxaGZAkQxIAcz9sYqqHqi8e3hMaQtAOxCauRlt25mZqa3uj5c6amawG1d3cUTLhIksuZBIYnZRQCcEB26x95Mfl09EtZ4fGQ2ypKzvz+8SQypUOW3qrkaASvBYz1+/+eqz73368Wff/eXPf/Xy+fOXy916erStpUwfv3jhrumQH9d180DjGyBYR+H28GzWzCJXfUlkiLg7EyoAJmbomTZEp8SUORNQ4vARzex0OiNiShyjWJkXxBiKbsgFIRFR8EESMwGjWjjtW7tBMoQGUBdmgjurR1BmqpBqL5X7AaFycswb4gIMTTcnJzYxKeRusG0bUiMiQgBDdfMoWkKNvXe6FxPMhohAFBDxWBJERNuImRIzs2MycDPbTBw9cA6EUDjFoGI324RjZGgfG6tORInZTBicgEBVTRmsE3KWc61NnQGKWmY8EhzODd+83f63//1v/9N//tsvv3779v79Wjegj539fIogP+oMhUsys/vHGiZFEBEYEIxRHA05QRA3KyICobqaCRC7++3tLbkdFkwgtp2Px7tFV2sPKRVw2NZt55klXLKKn1ZZq52rHc6tlMLMB8CYi0ME2iCmxJCjKhhCk3beTFSBCQ1Vz7WeuztIiMCZw9OCNFqH3F0cmFPKyVNC2MBJlRU5caaciRggIOuIdv7q83/5oz/5oz/70x+9e3hz3rIIrGttVRCJUgnitFUklUUiWGJgc1FBVE6ELoSYmRCpE4CrNdeexWNOmABAuqeFS1pFpYknSqkkN3eETZwgb86n1+ubf/8Pv3xT/+//87/5y7/819+Fm09vvlxtZQcCB1BAS4yFUwDApQIAqyN4tEYUqUoMkQkhRmb3SJ82CO4QJ3RKDuSOgJCQAF2sqaqaxiBoQ0dCGBhsBHQE76PdnQiZsTXQkVxjZpEGuxa+4aACgUYEEn4oIjJnZq51A2RAUJPhMZO5iefINCEhgKMHNrK7vwTkAZ/rSfmDi1o14EQpGbhIU89cSIItODCP5Buomajqu/aDAdpv3RmwhtFf13TbttYaJ0yNyLaFH7fNxdgtuaIbkzuYuDurESkxobO7E/DN8UXi4/qdX3/55ZdN6dmz26pmTgnL/Xm7ORwjxkZwxPDRG5JzOSaHJmpqbp5SSsRIvCyL9dke3d2PFHNKjIzMvU0Q3MA6bZi5kAEviZkBkbJzAWlsZgYOxCkvRJBA3BqSFoamhrzk5ebcoBk/f/lq+fgzvL2xUiAxuGmrOGjgkBMlCvAzOJIpgcvtnfXEazeIYM293t3duQnDUUTOj6dEfHd3Z2yf3LHIqd6vp3pTx0CplIqqllJubw93cLviDQu7oareHJpI1SaMlBOUhExGJokdXRCEQRGdQCLWOchtxEOdEpYC22HNFJApF3AwA3UUADQ0e4eEAKxOVRwcEy5EvG3S3SowcHEIpgAnKnt4c8j/zF/vw6r4fF1PO7TzTMFA7lOylKhzzMSfuOXIaIRGuIOIuSsNyMw+7EREgM46uY943Xu6wd1VRWR3Y00psgjmtbUZVaaUYiQBQ8RZPZmeqIMzY3e30dIaXwDSZNu2cHkj8dzfKDEmNzNzCPse4Jk2brG1tm2bqh4zMmd1MTMdQ1Ri55BIgGHv/lbV1hqn3APcnhPpA/tGybuX3GEgXQE8aEgMfNZWA+/iPqj3zCRo7Ny3tlnOKSXwwIsCRFJ4HDOIBwB3XVKutYrq8Xh89dFHh8PhfH68v78HgHpe11ZFZFnK7e1t1IvumD/79DvN4HdffiNyWpYF0+KqiXkzsVaJSExqXb2UkhCF0C1ldncg7k2QTmLCnF+9erWua7zp82k7LofeDkSEYwRWvBp2UPHIZeac0UADghX22mHCDiOYAUcFBzV0qCRBnMyqZ9kiWUJgIkroyERE70+PLu4KTDnAqUAIhE0cydENTTtDLvYmUg9XCRWIoptLzazWsMLR8w6IUdhpIkFqh0jEnHOuomut9+8eFNyIhUDMm4irgncCsW1tj7quzi3fSMqelnVtAMBsUZSutYYcwnVbLQDUKutab25uAUAjkdMhASmwMbGhdoUEBidTEBNVjQ1Gd03Dy7JEjj/oZ3AM/0W6DKpum3jkou0CcdnnaVTVjIJ9aKQP+2BTvLikvdwx0opJ1GJp1QGYp1sGEC0xPVSLg5EIgBAJCH2AmoEcoLV2bhXIj8dj4eRAhLSd1lTyHDc8OmsNeaSvZtrMYfzzaXP2NF5TxXgcsMtGu5oiTUTfXBYzUzF3R4foK+ghyXgwJmLmhGTo4QeYWcKU0yX9NmN4TopApZRa69pqFLRKgZso3GEMeL3k6g6Hw7Nnz6x93WGuBq21XMKGXCakE1LnkrUgvAgzAnt44swsznG6MMZSzWLbTPlbBx4nxN6plToLERD2lDmBBcBhNFR07+wyDRaRmUspbta0YirRAUuI54fT69ffHI/HuJO11iXnnHNr21o3ZLK6AVD48UjepXEA+J+84v1bHsbT3QdyeERN7gEE+pZkZ3SDjF0tSHESEZm7qeKYT9X3xcE5rr0P7XIbYQ/r1kdvBRI7aqjLkrGjWi6NzQaGhjBKl4gEFvtzIFPSdAXsuhF/r8I7wc4A4IbqbijRk6/g0RzZ403rjHmMdHNz8Jglb4KD5FpdwF37INQQDmxqYvpQT4flBrk8nlqtnkq+f3z3m99+9b/+23//D//y899/+Y04aWBFyFpTgbhVDmM7No45XaDH9TYOAJuFdOsyiQCwLMuhLAk0c0qmNB58NuoPMe512pzzfJtB+h1JpcfHx4CWBGm+mYEBIrbWmNkx4F6k6qqbiOgoOXZtZIo/j+Rj7PMAwGnYW4j5YGzeezo6+ZeBA5ZS3r1//+7N21evXizLcnt306RG0SaAuO5X5IH9dcOuKGcXJDbsfMFp5Oc67MXbfZe8dwcER8yUmOjhfPqbv/mbr79+/fr167/8y3/9f/sfbp49e7YcFtcmbUMgBxSjlFK0KjsyAamBWJOq4OQOhJ4yQwchUczbNHdAB0Pz8RQ7Epe9DsJAKj7R5b1S7/cg7/7YRR8vl3II694TRwCOqO6OF4RLrGuUwHLvsbcQNPIOMZgZF4BOMGVmAIrcgNzAqoo5ijkQq3OrhpQop8TZEUG1tdpaO62n8EXD+MdGA6YPDw/x3n2QTVgTM8Po0EYwcMdwYMDckSnQk8F6GTMGyfi73/vBurX37+5rlcxEjmaWibfziuiDKWBgeRBYa0jLfBEhIbVWvH4v3OedXrbCWexy95yzgkwZi0rptm1sBExMKZqnVI3QEKhukkv4MC5ijul4e/fsxcu0lChy7I0YDsqPuIGdIE9yjSHh3Zvo2h1av21tuqk8ZrTu9SUkYl3Xda1v374tpZRSEhciOh3OOdGyHDknjmZpN0d5XB8KlZIWJzNTdyUwJIdyjAWKnRTMmjUREXUiBe6hhCNEC8nYIgiCfcZ4mAsJEFBg8acq6GTsQ8QduPfJDrjf+OY2Md8mANhgXZrnQ49u+igOgA74GI3re/LOXjEG8LkTzSvM9zKXeqJM3T3iTxyQt/kss8IZ72iGVynCVnftrD4aBcTEnMAknKjWailLyIyqJuKIK4nBNa6IyGjG6obo4D6GV1iMi/HIGdOFRln6nAZyQLXWJEoiF2QCjj0HEcfsQXB3AaPdII5JSgkdNQKX2Lnbo740AerKueScU2LwJL6hdySqmbmTAU2HzN0NvFYBIGYSsfu379qxMvPt8Q6ZzCSdzw8PD+vjVs8NEVX1TPr8ZUNO63oybYzkrtt6url9ZgsnSmbmivmQc04Mig6MxEjWB0k5GZsZMpVSXrx48fb1a1VFg7NszFnUI7TjIKpGVOi7lA3sNOWUDa2JSJu7tarHhmHmtW4YxDCqrubuhsQJCTCXLObb1kJ+MmNtitDHCTIjYHdzVFwMbnIC6C1jowiJiNgcgnhGHZCSA5mZgrXzudbaVBAxlh4QmkhVQyADEJWqZti5WzZp5qikArip1FodNCExUjNXdxVvYJs3oMyIt8+fwQUh7Fwz5RS7XewHcy8nImI+revYyzgGOh8Oh1Rya41rAafoRy/Hw7IsRLTwZfb0zBgBwLIsqnpzcxMedqglES05h7qWfDifTj0YdvGrY+ciX+2yl81gJGAnlnUMc2dGbD3pgQnNJtkQAOBu7uDexenk7NArPO5WVfhQbsvR3QlJ2hbgpcPhCAAqrTOO5CW+Ha9oI6+Oue372FX29zzM0AA07qnt1IAEEQlj2J/bmFc7jAlM4F/n33cAG02JgDErzMxAgYIqVsE0kiOm4KKeVJnQHcIbQAYRQcbb29tlWSLOD0+0Kage8iHfPbut54duPVOCqojITG2rzJ2uGju6DlQdB4kpDqJCIiIHJyTsXc2xDSDGuIQOqQ0NnTprZoyh7ImCPBu944YI5mYdwhiSYyo8GjhFhFpz///y9adNkiRHliD4mFlE1Q4/4siMTGQCKKAaNV3V3dUz3dND0/thaWlp//IS7dcZoqHdJerumbpRVQAKCSDvuN3dzFRFhJn3A4uqmUdixygR8PCwS0VF+Hz8HnJKIjLft+F6LKXO8O0mHR5Or1++BnB1dbW72rdSAaRhJOHkMtdqteTtLueRguKd2b1F4vTDm+7n8BHrf9GYih2wJIRhpWPuLF4IdxCJu5sij8PqOHuY4qxLVr3+U9j/IOVZH6vDw+OJfF/cbGQmC8xMu7Nn6iBVkg5kXXCq6OQKj7SA43EZQl0eLpHk4aeiEoH+ZhxCWBx66w5zYSaCoSkpyIgCd5aiG6muDip2VjukLjZ5/f5YDZVlPGn78te//5u//ae/+ft//uqbV+/vT1WRtzuWxCGs12ZtldY85wys6oFd/A+PEsIIIwQcEYGDwIxxHMcxZ8oiTk4pcWKWxF4ejdbgEr4loIXjapqmSPnmaEEQB081L0aPmdWNIOqu2qppqbXWOuQz1JCZRXgYhpTW/LybkeDYiAvp9BgXwo9L8IDdZvvNq3dfff3lv/sff/Lkyc1xKnR7o7WVUpqat0AjGXOuy2zeo43ngaT9cBbu8rFGzMtuPO8QVY0BfmEh+NxaFheR4zT95osvSinfvfy+PvzZn/zkxz/+yWfbYXSjTFCz6i2ptVbcScSZk4Kc3F05aobMFLWwqgTr7i9wrU4MhnOEBmaPRgloqaqs10hrpPX4Wvx8YH3xeo+y3/jTgKimgQL7b512QQTgJSI/J8ziXeEm6qQrktzcF8KK0CtiIiEipYlYAK4Kp0TDIGkHSmO+KtWmYqfJatOqXObUGiYptdZ5LqVOa07oaofDqZTSWlnpIqEmnIkCiZmY10t+hOXr5XIBEeWcn7747O798XSYatFhF76+JSbzTsInIsHNHIYlZk0lcUbMmyDYwbQqdXO1EghTSqKqAgp6DyEGdbM2jKkZXd7EMGkACyRYHqzLmrsw3JyMmcQomQnn7f76ye2Tj9o4rtJEl+G3utEC1qXzg9cKtXe5kP6EFfUKoJQpKuYhak1EMRzEffqD3T2lVKtGos7MSYYYTHjrU855GFoWYnjKtBllm9Pt9Sdp4DwIo5UyaatGnlmqT6raWnXvrNoikjNQK2ASSQG7u0dW1SxcrkezDeSOZmbMRkRRjD/7NydJRNbtJxC4xZ6A0bL+AJbnUFTgHh+TR7+59JWXy/uBH7l84eN/Ijy2S/EOMbq1rPD5Ez+Aia7ecM0A8ch3e1o9QXhp666hmQVpO6fUkc0ElDKxIzGn1IFzqsoMVUoU0IPg3qOU0piSErdWwBJMJwvhi6vq3GpONAxDU6ulltLcvXN5L+GsLk1CiYPSzRQWCq+1CN5lXOyi2yByUTIXZuXVPbu7mVvTWpVcAROGsayUAsA5zJiPU7QOSpnv3t7d3T1cXV3t9/s61XEcb6+3m/HqdDoFHWUt7aCHw2nO4yaNu+dPnj5M9Vg0MT/cv2dmb22eJiHfbkdGmw6n/X4Da7CmqiBigMiJkId8c3NdazmdTqia3ZkkySDixIk4iSjO0o4aNzgE9wJqdFk5ICI3b231/WAOj+UxQBUdRIIMWWoxeAFADCChuuocQS2IXM1q1DAiJ90QEbFzcyCmqpwIaIuoTm+ihH3lWmvV1gUniCAMN20159FBpdTQ3Tbz0zS/v39wp6KhthzFCgXgQodp0ubGIuMgxmSkBqjNvnQIzaNmFkdlHEdJFD0HLHWUlFLERuEqyjxNZT6e5vjGc6un06nWKpKbo5RGRE+uRlUNaHvU3aN/nhK7q0hy93mezcw9qVbVIaUhReRzEUSqamWQumjv6YXzIH3EzIZo2y+HObzLagtE5DFr5nkD00W6d/HvFLlKPwVrGQzOSYiottbqnFkGZs7DOAynUhAqfIkJ7FqdkHIv/9JaP+ykj5E2XFjA5cucI84L6dn4d1qaJ8YXTF9uFPBVkEWAssyVMrN3zbCodNS4Ul8GBgC4utVWiYJzqLnFiHx4BiOoGTNvt9uU7uZZh026vr5OKdVycmjTYgju+5Jz3u/3x80mZDZ329HPRSiIsJk1Uwp1CqaBpOqSsVDPBNLC05CQA13WBUv6OnR4FTpVgDEzKPALwVO8TCWBiTw4Y9jXfL63MNxjySxsYmAujIWZr/dX3715b1rNvBW3nKqpGe4OD5998tnTp0/fv30/l1mIUuI8bOd6d6oz0si505evHTz84EF9mIGWqtwj37PsBHiHa5AZVN0U2rxqixlyN3JyWSqdkYw16/I5j97KHYRAuC3OtFN9nNO8RZw2jqdqa9XmGQAvte/YnkSRlogs8iYCEmcGLIQ3Hh2upbqxut5Lx+/uuo7lo7/92khhkBtAxLBFOYPmcuTI9UFETuzRNks5m6FaDFsupQAjSzeAmvrX37/73//bX/+X//1vf/+H7+4f5nF7LePeQerUmgFWWyu1JjnftUv3379ktPzt/K8KB0VzAWrqDkkYUh6SCIfEyeTWhpREaEyi9VwIv3wACIJEIlomtCOOhJmpKYAUITAA52EYXFW9qmo1xZISrPhcXDSjsMzMRLCwZoPr7SCSHoEbIi8VkaY2JslJvvrDl//mvy+f/eiTb775br/fx9zpu7v7UibOgyA3bxSZVM9VaN3MPwzd1v1/GfatX1VVgwbByd2IuvmCE0qtqri63lwNT+dy+PbVq7vj8fTu1S9+8Yv//i/nn//sx09ur3gUg8JLc9eQqnNAW3yCiAQ/qcPBDmJ2B5glWXhjBCWXuBMbhePv+3RZSTNTt8sFjCgpMM1pYXFcD3V3GaoXpb/z70Oq0pa0ec0kWVJXE1hqQ/G3tNDiRzYYL3cjicEhUEy9oWuHonjLiZzYwaCchh3xTk1ORebCp5OWmZsNahwsZu/u37l7NAlj9gGAQ5fsbhjHsfMCcCMi1N7s5QuCBqDQYid6/ADiGE/FcPXk2e7u7nh3p+rODjV0PUpKiYMlq8+jGkFrrPw6aeILED2ijrVsijXcX/wpC13u9oQu0GpmxEwQYktEzDBXj9ITU9Sg8jAQs5MQDzRsN1e326tnMuyxktD0fdtNxLqZew19mZZfoKIefFZEUQjtxnA1udrhr12pKB7L9QbJYor9qxrGYKGdo526lAbvmF3sRhlGe3t33Iyy345DZuZtlqthSJazDAeYkbVWi7ZJtRFUzEUSwdmNHA5lD0Cyr2l3bAOzthLVkpODQLZUGTlu0+pNVp8uIoHxWQKebgqYGT/I6PrtlrM8xuUKf+BH1psOO5sXXPBR0wXr5+URu3RDtNir9Saux/DSNNkFFer68hRVwOhTMavD0Qm1HOjbdP1yrTUBzLKZORRq3qrBjdhFXC3gXomQlmpBLZ0OaK3XLmuH9duISEqoWpu2eW5xG7CASIko5koRAqZmTkB0eikZhfAzKUAh7GKmburGbgbvXkfCVsJBgbSKCMZJmBNTWlfEKJRQQwR5QbUZkQx5ZDM7PEwP9ycR2e10d7XPeSMybLdXsb6Ht1+WpnkY91c3anSq94lchiElq7U2a6NAkqSwnAJGlKyVAYq2SWIlbLbD9e3N77/4XazD0HkiaBhC4D0TEbfmXvpFLx6iRbwDpiRZuJym1V6bWWtY+4QATGFmClVA1Ij4Xa1DCk6WPG6yMDc1VbTWMjcRcY2JOzC7wKf7QwBXiT0IYCOAT9yHa0Ugji6K00wDeBqoLLdk5sbNlSSBuKqXqlOt2vz+eLp7OJClZkqCtB3zQjADTnM9tuoKURZzzFVLm6jZYTpg1VBZsJ2+YKwvOwY553EcB0nxtFJK0bYcGIkzP81zlL7GcRQRd9y9q5fbeBiG7Xa7yZsYNYwPPU6nWmvOOaU0zzUGxsi8tdZrsZKsVTc4UyRJABZ5knMkfXmSbS3i8jk0YebAm3XLEkGlXXjuNQRcwvjl/9n7oDqcoNH8FUBNCIMgm3udy3QqDk40DtvNOMytTdMEkrwZVZW6Xxc8wkX27382ixcgQz8H4+vxFwBCnCWpW4TGKSWtU5TlUkorIsCbIVlKfRQnkG9rsxQaEg5pSKF8SNosRDWD3oqIDN7csnvOqbmh1daMBE+e3NzeXjMjJXFPxJwSmMncx3HA1W663s+Hh9PpdL3JImI6u0sAXVprXj0Ef4iInfIynX9Z/4vQjLyDiDq2cPER3KvOaVkWk87bxJlZlto6M3JKOWfVimiFUaS63QFkJkmJAG/uFFIRnjk9f3L7xVfvdSpgBkiLpk2+vrotpUD46uqq1ja9muY6j5ucwMNmW8xBpKpFm6oyr8iWPwIJPoO8Hu+EpYYnAd/3HqaH0qmbRU6o8FCeQK0V4JhUbK1VpSA1WXfQ4kFjZm+Nv+PjIk/oIexFIsHCGUlz7htyDVD6A10kllmIEizpkmLz0u+6vLoP0o/1PAIoRcN3EBMQrXhzdxg1a7EnhCDM2qsbmoQIrG6dSgtJ1asrAAMzJ5KOt1fVU6OH+/Kb337x//3//Jf/46/+7u5hTnlraXNqkDTUZqd5MjOPRaCQTDjH68uN+yCxOVedzZkcShZNShIeUt5sh5xzdBHaXBK3cdgmYSbPm01rtuLf1vAipUSgS7Wu5aZ0qAjMZ3de8oG5hvhql6rnJCJJRJh0jcsvb0HO2ax3idcKsrvDFuhm10B1I3LXnHMw+j+9ffLlyzdvXr3+6Wef/fVf/a1Zu95vS7k9TnO5PzCbs6oqc3JYhzrROca62PB0sZ7nFtllwZGZTQtR7/B45Es9RPFogE2znnwyNyIvD6e/+fuvvv1+evnm9O9fH//kp589vd5uRhkzxo0k4ZwzJ/KmzZSZJSWzpm7uGgsXXLAAa/Po1REJQdyNzKGrpiv38tHFNujHCgB6c8ndmfPlcT5bM6KVxubRwe/Pt34sOZgDHlmMNeEBXNzW6BskHc8lIpLNYQZ3MgMRAtntaYO8Ic4gMs9Vx1ZRik/zXFuqTWpjVZrmdniYpml6N38/DAMz11pbKxyEc5JdjDkBGIYUvh69M7aornumBfUKpMtI+nI9SqX91dNnz091mst0EmdJUNLMHCyz7K4EMKkTEHVkAB75JAKHr0ZMzIg+dnfoQEx4ghBxNROJdFvUq9LcO3vBzi0iO9GqDrcGZ07MPe2UYTADsUjeDle3108+2V8/azRK76sjaA6jDRvB45JsGEE4hjvP1p3d1brEzpnNKMoiw7CZ51P4ZQLHOy83vfOsRFqYcx7yZq3Im5nxYGCRHDpvZnao7VBbmR6SUEo8SLxKhmFISXbPdL/Zbnc7GQ1t9jrBKrzVMknE/wAjCEqCfuayY2ZATDyFlh4RQpb2vLHX8qJfEB1hycQuXcCysR918PwiQ+sL9xhaAlohUhbAGmJiItduqCOGs2WUL60ic8v72wJZ77ICF+juuH2+4o0vBjSC7cJ/0KVMDnUYsUviZMkWdI6IkGqo/KlVsyzCMOfeFjAswugAzBtjoax1dSezFm0I5sAqIEmOykHMuW3GoczTNE2cJOfNVlJpVo8nYnNfux6Psm301nkPQI1wttQs3WxYF0SoqiS5jwT5YuPgOW+6yp24K8ybNi9oeTMayBzkFNuEwrPwthrmaV55ZmPrXl9fH+Y2tQcREc7jOIYswenh5fVuzOPWwafTBOf9fj9ud/eH4zRNrm2zHQU0TUcm3+02dT5Za2aAobkVM6/tVPH5z348buR4etimwRMx9Uab5LS25jkGbPqeYxImBixyLRJmgqjU+M7x5WutsWSmhlB+cTYnVwupP8Bq06HlIXTZSMUkVpC6nwA00GAN2qxySinlcMnu6HxKQj25Skm4UlSMzCxLFDO5uZk2J6hTMz+VB05DM2/utdk81+NpPk0l0p+cBuFsZNbhdjRu9lXnqbTi7eSY5laJoWAlAOwgitFEVXMA8/G0xg2xGsMwjLV5qz0QWQnZWIi5tmbLNE1QR0TH7Xg8yoUoS5cACdhD6N1f1D6GYRDJQr2ERqZc2d2ZKGq06/b2AA7aI9vh3kePsKQ9/We3UAg4nw5yZnbiJdhdaKb8IgPrQfPypM5VCCfAcZqnq7zbjmNzI9VMtN1u9+PY9uPd3d3dw7HUiSXnnCLExWL5ACxaBB308+jYekDOPhhRwOpVL8ILD5YcERkSzxUrfhLKuMiNmZncolxFhEABiYiZE6cUfEpuTuZEkgZuxklJWOEKV7MEcE42TfM8q+PmZvf8+fNxt51bJWF2AbkE0bY1EUlDfvL82dtaTqfTYZB9TmzkRsN2IBDA7hzao+rOhLB76nDgzEMdUbi5CAFpuct9Xi7iWiIyC3h2l5MaQwKcmTgwVNJnpUxjczDCgyx3E0rOQmQcg8MaT3p2c/vkSt4flcwgXuZ5t93u97cs2cyGzfb61u/v75spcVIzGWTcbkDcbAFxPebyXkP8xc+d2bEX492ng9ALB3G953qqO2m0Cg2hwOqLJMxyCvpYFIu0ZrYoDS4TRx2Pt65tSsM60sa976Zm3lpzo4iNVBVhOyldJhLmRkutdumSuH/QzT5v4D/eJiWipqEHRkxMnZk2VkDJ3VxhTheMdt2LR4+UUpIslJU0j3tV12anpvNhvj88vH9/fzwev303/frXv/7lL3/19bff1eKb7TU4TcfirCO5OtSJOLEwQj25TpeXsAZw6x0kWuSe47SaODoxMoRSknEctuNmGBJMyVW1caLNZkgE1Zp+MH4fP6iqXKRG55gJHMUDI3M3xfI1XCJGlNQFaXop56KULiIpra2UheMB0Xvs5vSy8eLu0QXorTV3c7u+3vPL16++//bHP/v5i4+e/f6rL4dxvLnaHQ771oLiLgYZbf3alTzzoznJy/Tm8vdryLiateDPcHciRvC9OMjcrKWUnLzURkwpDQG+mTX9/ut37+9/+e237z7/7OOnt1fX18P1frjaj1fX2+dPb65v9rFLxnFk4dJCcygFyRcRkdNczd0FEnVbdmeHmdMyUPPBluBO+XQGuPISNF/mcnTx+OA3P1gNXnzBilRqOK9enAhEbBkVnIWDXZzEQU1hTgqCC0tmyQgQy3gLTuZQ0Fy1zD7XVhvVgmmux1OZZi1Fj1M5HE7TNI03BuGRgnU5iUggipuXZcy1mx33hbilA1wDo9BpBSJqXa/V3eHCRMoYtvnp008O9w9vT1+VUkYazBoNg8O9VWNRAkhCEzMHALZfftwDeKLWWpRuQJYWb+nee4DuvZ5oZgH6ZWZmoa5vEQGtiAh5JTdVVxAjkXAohxBng6e8zbvbzf7p7vajvLsu1fOZvXaJW0SAlTxZqPMS9U0edb9wBetZcHfmKGU2Zh6GodYYANYsfSNp90VROSIiSynFqMR6dphTDemgqslTrDcLu8v2+ra1UlTnolQnIgKO7JD7sh03290w5pTZN8n3m7wbNpSE0NjUoa4KtNArtsToM+e+AnAgbt6IiNF1fQEO8uqm05L4nff8ErydyTyjAkLEqufi0XpwzKzWgkfRzpo3dmd6Gd4wR3m0L+9FAQXrb+JVH7zh5T8tp/vD3iNdwPvjhWubBEBagEY9tlvunJqZICC51Npi1kkFwXvb3z2lxLIGfB2vZWYIiTi1nPNcm5lJIoT+o5/rZ8vZUHcCbFXVU63hv6OnXLQtbZmIRH1hjXEl75zT4B6aSuej85jLJ5AjwkNzKqXGJ5MLiVPrAdl2u4WzgcjhECcnEYK1Ju5etB2Op3mu6IJL+f1xii1uyzRtqC093B+2WxsbzPg0FSPejttxu/vZz/8MZOM4bsfBXOt0Amwcx5KG1trDw9GJiuthmpVwbE3hr16/GTbSSk1CDE4pXV1dMWvAjWJBoqNNRO4UMZczQ715KLTYOI4xvwHpIxYBlmutDchm0UgPEJcBmnOGc6luaI5iHgKDNqRsiZJHoxlA0Gc28UEd6kxEDiX3wLCfygRYFlFNgLfSKVhsICwpjVdUB8BN7XCaJVcDT3M7HKfDNB9Pc1UfxwT3mHqf2zxNEzOPmzFTPs6Yj20yrZBm1AhshoBsLbtqbfl1zHQcg5QIQExj5DFSu3Dbaq7WsIAxPEy5kVu3ImkcKES3o/RgOreKwldXV3kzmpmbDttNgBIhfajawSCIZErKToSgdI9peVzqVfoyuyIgB+ky73R57OM36hEiU4BKA+HmALHLQiodjuvSmpB3Pg49p21wgmqdywRtPOuTq+2ffPLJn/zo808/ev7l8e7169e/+/Kr71++UZ03497Ap2mmlOlcMDsHSb60a86fSB1W0q1P+J7VAjiRuXmLpWMSZuScZUZETsGejaVC2nEO8fckRFC31praalpJDc0UwjnlKqSMSAVjmShJGodmSkLb/fbTT58/e/Zsf3MdhXtmclczT2kwgqoKsbvv9/tys79/PT8cD+P11Sis8KotWBCYObkgQggCAymlRmQr1GSBnZhexlJAL+s0IKbijTvINBw8pdwZ1BKn4OuLuVtduo5rl4t6mFIdKaIrw0r/5Pvd5tOPns1fvzwqklOpGuSoRFRqTUPe4eo4lVev315d7VhotCHlrCBtJiI556bRhfZetowu7eNq5TlEsD+SMi0PdqeOTVCrzbSTHfIKQjMz84UchxPAHjWOR3WEpS3o7Gi0HBAzq1VPx3nIZBEmG7mTaqtlreH2c2f98BuJw13VOqkMcZ8wCC+zAKWwgHw+aBKuxzOizH7SQIB1Sgl3BjEnRkDhwCAicXNF6KOyUDaX42l+OM55sPtjeXt3//bNu1dv3r589erly9d3d3dfvb578/pdrbrZ7jPjNDd1E861VRarQaeUkmvHAcplGtPDmovEfkF0P0oSCHAwcxbOOW+GYRiyENQ0Znu343Bzvec2da7KJYYWkbVwXmulXrLpvAW2IJSYmXJKvha5eF1V6uAgrqaBdJDljCwJYVoYZaKgH1MuZ+r5HjS7BgRmORr9c918SHx7vX//7rXX8vOf/vgPf/g93DZjfv701t3vHg61VRAB5M5w1ZhvPNe0+Lx4F/twQXDAzCOjJCKzDgFxd2YKpHfEDACrm4FIJA1JUlKtpc1JdmU6ffvm4f70xe+//uZ6v7na59122O/k+mr77NnN86dPdrvNdrPZ7Ta73e5mn25ubna7XezDIbiL3dytmZMpmwXmiwW0al/12HFxK9FSXcorfQiwi/s82huXoaf7WTzz/OjaAAbrCnpBeNZfjqWS2P+6YMKZyNmZYQISIzaHusTqUN4gDyIC57mlVq2UNpc2TXqcmjY48fFUT8f5/ngsVd3dmTAgCwknJok2UEqcc9emjkQ42LyxOF8zA5PkRMJsoqqOCndiDjQKUXDKQFXhzkygwaztr5+++OQzPR7L4b26edUxeyQbMdOv4fOcfAGSxBryheh3LGaiLvq3bKrF7JBHoQPq7o0kEzkjqN96K7611rx1hFCw0RoJC1gaMicZd7fD7obTjtJWhh1ceSk5h7IJybmYQsE+xSyS1xQlvsuS0sez7KLtdk42loSw822aGRDHaKnBKRQXAFQwEcZtFKZqBHQdJiC51uop4Lc5TEGsYfF0nO3hdIIVho6CzSCbzB8/uxanTDxwCiCRUTGoocERAymAExvQlTWWwZeLGUIsM84XiICzj3P3BbGydgtVbU2xzob3Qj9mPVDdgPAqGb1iwRzgxF1HFAhgToRVZ1F7Wh6xndYeYK8vL58ucs4Y12+lqitN4OoLeodjSSgBkCImylBrrVUgLtIJBpnhxDAlORf7F9f4YRs0IEy8JH5eqqrKMk4Q096BxNtsNs281HoqNbLH1kpn8+dzc7ZPrC4lSGInuIHCtMZwmoM4IBIQkpggDUEgMeoKf+7uBmEJuGAKURdT8+atGbEh9FCacyI3IvE8ROCCYSSQGYr7aZ7nqepyX5bVfA2nVh6YMYzDdrsjyQCPR72a2ut3Dz/5yU9e7K+mpuZtGDdmdnc4zDYNm3Gz2YzbbSPdzKUJXVlLm/H13Zur6+2rb96nzESaEt8+uSZ6f7kvVzONhUCk2yw1M2utXO/2p9OplCJDjpJn7BhVbRFEEhhsMHWgmZMCyiBuaAYDD8mI6Hg6DElyzpFNM3NzK9owtbVk69AIhSVRnScAniVsR6s1ajBehZmbo6k7GYOMXA1TU/LqhuOp3h2Op+NcWnMiR3IEyUeby6lZvd7ePnny5N3D3ECzejFWYTDQI9QzzsMUbuHVmMBmupjdGHQWNwqduiXL7awt7n3IgolNLKA3McjTJlue70ScZBDOTClUH1tttSgzt2rTNDGlu3Z090FSoHy7ayQxmw3ExrGFmBnIKVWaZ15kc5ZySZ8xwA+wagAenzwjSkzU6QmXRxhurHR7F5Uq751zjLstmZHbJy+e/Ptf/Nmf/uizwWw+Hq52w7Mnf/Ls6e0//PNvvv7m2zpPoKyqKT2CEi0fR5cfuv51tUc9cQE5hWpLKq2by9YaVJkcSIsgU+fgWg75wtUWx42JmZ1hBjUUbWgW3P61tmot0yAs5lDz2rRE3YQ55WHYjA8PD8M43j55AuGbm5tpmjTEVFGjAeUcpUIXdidi4d3u6nh3N5cp8ONa5tNcEzqXmiSC8VrLC8Jud5cOH0U4j7ZwIwN9fYg9hYz1Ih0Z73b5EAqSZ2FmXvhZFvardTYHtObbMXToDo5p1zS4P31y8/LN23JoTA5TIlJHaXWudUhZvX7z7fe//eI3T58+3WyG5x8/39/swdKMOElKSc2Y3RluHOH1egrQq/6X4xDr5oh2PT2uS0RlKkagtakBzCB3gqqB57nUWnNmM4KrmXISgjjUz/g9EBGLRFzry0x7KWWaplev7q72krOMw1ZEzGAGDbj7BS+Ox8Sd2pDWMUyDBUoQxA57tKV96ZnY45mxZcP3qMnMzBtgQswCZjIzifAaHvIzcSbMU+Bm1bgSSqvfv3zz3cu3v/r1b+8O89v3d3cPp9NU5nk+TqWUcphaQ5JxKA3uNGz2cH44HqJ+Sq0JkSRSNTQlWExD9CTdbaFiWvSyLhLs8z1zJjKJgkGWlDgLtdZcKxiJsR03+/3OJi/WElJUxwMfsSJB1sWhxx1CbUHIFImiYFmWmO8yYjenRUJNRLhrkXYy7bUuHBGwg1Sjow5iF2Etnck5uhycJLEQHGRZEoCm7Wq/a9PpdHj/oxfPUmaGScq3N1ellPvDQ62VUg52y4CMLrYVDgixP/a8a1jGS7Plcj2D45qIU+p2DQy3PqgGmKo+LJorkobTcQ5zUtQfTk2pzNreH467Tc5vHr75/u1u803OaUh5uxs3m83Tm/Hzzz//+OOPAzhwc3W934wAhpzJm7uyqbNmlgBGOfcZ1yj7h1ECcdw1dwcsUujLoM4fP5bL7Bd+eQqYYqKbGEu5oYeyfc8/NlzsnIMB0omcBS7G4sQsA4NB2Vg0Dwapxmb25q2cTmWa51p1nss0VyKSIXPKsvGBmqgGzrfWGbXW0gfzAOScgz882pVE5AydHUDOKSUxb97qGjSjV5UdQAAgqPelLZ5AROpS5nk3Ds8/enF4//adFtOpuTezhcWp+74YmtTWxxd9MSJRbguQCzt8geUvtyNCfxcSZhZyY5glPu86ULCkmpXSxuQQSZwJuXkyypAskpunMW3z9joPOyVpCsNg3ICybuNO6bpAEMORpJREEtFSO7OlbaULtPWCyIQ59aFg79OGGty2MOtycb2B35p1oBCxuRk637HW9ymlYcjM7N7Ma6uGeu7AE5GB1PrlS9oAEDahLUFN54fDdN8OD/fzKLQd8363uRq3OY0keyPLcgTMvJlV88K9Q0zgFeFAAMMv8VaPXAAtDc+l0POIgHrVBl9NQTxC5me1imsfT9va6zqfIzPjlNe3XYtlAKayxBsLSc+aCl5+3GqFIr0Ezrh6M7/kKL0wVgZYEpHSVNVAnFKSZsTsTGqWGMfjMazGcZqJqJkLmbmYO1Ow7ECbq5aY7IpAIbQeRLJqKaWZRabXiGi72zCcJThMImmpCzwPgDFjiTO81hbGlIMWmXngJGeeYicSh6c0SEKdpqlWgoBFXCDszNH1ig3aQsEXxKAkw5PbZ7dPrmF2d3d3d/9O3dxgVVWEWLzT0DXLGUCjNCnuDtPpdAKYkgQPvrtfgqAADOOGmE3yhCQu7nS8P7x693A6HX77h6+GIbm21op0NZLWlCTR/eHw5Nnt7vrqX7763X/6z//z//X/8X//9vXL2yc7UMsDcgY1e/Zk/+TJNfM9I0bki7sPY3Lo4eGUc27mbsQ5gz0ZQv97Ph1FaLMZzMysiQhdsGCrAeaKylHHMtO5AxJSZpvLNJWc8zAkAK1aaprSolkahsN9muZe92Vn0DDkYUy1avCeBaNp3C8ATKK1OkEkGWEupTZrhtK0nOZ5rnP12oLbWxR2LGW3HQwGq5vNMFgAvdI038/F8rA7nerDcYpOSs7ZtIeeRATv43nM5AROQs5h0ogJvPRUCQZXU2ZOLARprYlQNV3m01IUaTheZR5U1HAvraJQM1W3uZaYeg0+66DPgZGItJy5lijGCbOA3EgJvAC7HWzWOknsQgIep2adz/YF1BoRcPgAaD81xOHmSzNq6sxcSsksKSVVdwYJ3L2XiMN8eIwdGqBCAPk8OayR17u3r1KrVuqbe5WcmoGtEnzI0hzcELTbSw24o4AEZLEx+nUEqWYt2mS/2W63IuSmtdZWF/1uprnU1pokpgXwdTqVqkVSYs6KQkQsTES16iYJhAceKyxvxs2Gaw1KeXEhba05lNh5KEblVN7PDzlnHvIwjqfTaZrna6ZS67Pnz3POzeOY13HMqmrQqD0Ng2h0+8iM+Pr6us6nYbf97PMfv/rmq3f3d5mfuPl2GHSeu11294WMzt1hnojR6cywdkgWq92jBIkAiT2ouUzVrBGMXKLfQfCcOkyUmTJz9BnGQd1j4vnS1lutM5DdXT2aqL1lsB3lejt+9slH9ZvvilpK6eOPPyaieZ6HYTgd55TH+8PDl199c3d/f3V79fXLb4fN+K/+9Beb/dXDw4MDx+Pxan89N21aI2IQplZdtRJRzuNS6nZ3X7VDzCzw20SCpVvt7kHkq7YlopQGU5TSmFPKSc1SSpvNximLGnFq5sLZqPaWBQErQs88GL/MrNY6nYob1VpLQRsVsFqrqteqpj6OKUnmLAQpJWS+0pjGcpoIwiLuMU5DwzAA4TfP/HsrJrznNo+LvnFzT9Nhux3HIbWmrbmjqaKUmnOu/TkxBBLNK0/pqk6TOjmlVy/f/t0//uqX//ir716+/f0fvqE0StoYUak6hTlRbb2u0cu9tVY3SizMVOfJVAmwMieRYchMqLWt8YFczJ+oBgSu5wa+ND8BELurt6bbMd1cXW+2AwCFXV3tjvf3ifzp01tv+v7tu82YSi05j2GygoBnHEdcdInXgDh01aJXsBBOpBjtVtUs49Jc7OjQ/lUJKcXTMM9zqT4MaRxHomTe4puLCC/V4pQlWnAp5XiT1hrBq9arq50I74Zc3F+9P/zmn//xf/hP/+mTjz/6+uuvtyLMcnNz9fb9PXM6TbOTEQmzOJGqlVLiDK6+fg254uEr/cZSxY6nMXMkIbTQH3BHhlvr+8uiQGnm7prHjTVt2gBrs81a5pq2m0GNk7RpbveHJkyJJU42++lv//6319fX19fX+/3+yc3VixcfPX/25PMf/SiRZ6GUEsGqFW3GzOZDTEzEy0Okl6MGlZJqbc3cPWiFzGxqXbFpTZNsoam7HBteA1xfiN9S6ny5iAHjmKcFmBjprF5gaTtPlZK4E0seN3sH5WHjlFrV2lCrlYfpVGY3EpH373ZmVo1V3WmTtwMRgay5GplkBquHTggDQpk3tdY46UR0dE/MItIpTVXBtM5fsbPn8OBlifhp8bmGHq82XLCkkqdxtycopc3zjz+1cnr35js4V+dMGIbR4HOdnExyMrPtJhOREJIIDSnesJQiQuwXdRDm1lzVhmEwi/n0MKzRamYi4swE8bBcKZlC2adylDGxZJaRkIUS5Q3LwDk/++gTR/riD9+8fv/w0YvP/+wvNi8+++z05uuUUvOWhXfbfWtlmqYIPDgNmRlgVY0yQc6ZqTua5Uaf+/8XGh603+9D2Gm765rMAOCrZMLKknBZhyIAiV3rZG0OMQxd5pMhMfzZGZLXRy4yDNmdmlvOkmRrOVHaBXPw3aHImzIM02673e1222FzuzfV6iDJaaDRvZrPpgqQMEESALQwiUZESR7ldWtNaq3R22V2xQxfXO6iGdahE/A1D8QZF7po2OIsTrja4dVorCkfgJTyBwYHS8Z4LsqsaCzmlZX9cubIlwZjvDMv3LlmljSGCMEGmJMt1WbrLTioqoNySk7MralbBphTUN41r5H3kywTGk5OuiArHs0VxA/9AOSITy/UbwIUZAZ3xoBz2ckBxGQwEalbrTWYbVtrN0+f1VpLqUUbKBh+xR3Tw0E4x70RokVnzgTe2mme5/cPh+GbRObTfJymU5YUyAohxDBxQGfv25vFr2gp1TrtS559QkABF9sAgCAnrd6MqHGxaAdbp7tUpTmV2lqBWWwGMxMHJZrn9tW3r4d3rz/76Y/+7V/++e2zK974l1//7rOffHJ4/y/E9uyj26fPrmt98NzMmzkhNOWYTdKQxQHXpg6tDpBqFINJrTqMBcypm3k3bVh2MPXBAoCcmVgD3QZHiFK4N/XabBySMRReQ75i2Xy52FoSjiparbWUFC0ONm+wpWre3F1L3HEGEzPHQKKZldpOcz2d6lyaqgXZT6JUm9dmpk1Yx8zDkPM4cpJh2MjYhCn5kCEhBa6qhPN8jl809yMEuXws56o7hsgEmBKzE/VsMOYVo46LHt0uTcg+dojg9HMvQa4QhY/oSLRmIuIENbh7c5dgfkvBDWNm1ro2VbcXQbS7nF5bw6OuotGJClThzY3dxoU54/KiuJMifQjb8/Mo18XDnAnamrB9+uLq/u3DH37725tf/Ol8eLjabL0e54LDab6/ewsLAhnfbDa2ZN1na+j4AJ6KCzxhp/wm5j6rEJSLYU9XrUXrAlCrfVsmlMxs5UJsrRmTE8xhjmKYVZGSe2vArFpbq6YWul0s7+7eB8fPp589+fjjj69ub5h5v9+ZWVKtdVY3wDl16rDLdTMnd1I3kqw25TFfPXn6/vX3d8fj1X5XHTnaGRdewRdC/77gMZO/vO0a6RKR0NpENSYGM8E6VL43kBbKO+ZEocEmAMipQz6it9bTUBiZhtZxElcO91K1eaubq+v9brjaDrsx6dQ+ef7ixz/9yYtPP+WU1b2a1jLPVeeqD6fJhIYh3Z9O/tt/GcZtrX59fV2KHY/HNA6AuXctXQeInUnMPly6eDCvPJvWJe8AMKkuIkMk5h7eh4BqCpA2V3VDUzViNHNIlHKWaqiv79R7OOM4ClBrPRwOp9Nps4E75tlbs5QGES9zVfXWmrAv54tWx+ne1j4I9YDe1FQWUo31mbgo8a47fE0I3es8WykUUNAQnk5pFJFaazNnYBg2RDRN5Xg8jcOOaHM4Tl/84bd/89d//3f/+Kvvvn9TqnHawFJt0f32WqkqmzFTW5LS5WNhIA/miWD2oj7V6lgyvf7d7PyF+w+9VKd+rnwTC3Li7bi52m2HMUngLd3KNJNrljSO4/uHd+GI83LHI6q4/Di66JitgFJWZcfSeY2PPpfYl2BXyDxA8TyuwQ2FiwmfXmt1aB/yJFPtzZwhUOXE7hLvTHAmbHebxGLwanWT6GozsNfM/tOffP71V39oZZY8bDfjpy8++uq77+8fGiRgsx0Qvqz5H5nnudzzq9NZH5ENAoCHSqp133ERGPVVInbAUSFOQaMFam6lqc/NwSMl9gHRUlY0IyE8PByBA9G78BG73eb509unT67+u1/8/EeffvSTH318ezUKA0zuUKtJRjo/euLNKQaAe5djnQkCHkXDlw96nPeuB4S8hbsnV3JxizHRJUiFLARQAUn1kw00brf7PZE4MfFgTSuP9w/HqbRatamVpnOr2pyZ1WL0w53MQ4PaHW4xQOHu4C6qxJxEHMFjaT1KCWc65hyw3vjlmsGSo7kpXEHOAWJ3AoM8FAnQTT06xCNSHQe505A2+6v97bO5TMd7l2FTyqkejykF8JWZ4G5aO1BlTS16FcZWqZtHKHTAEgtYAON+g/pqM8jZqXNdkAhvNpssY3M0Y8WQ846HLaeRJLVqv/niy3dv716+fff2/vC7r1/dTe1fT+XFbSTuFGF5oIXD864bYM0f4OemWY/igBhuIKIIXXq4vcTPPe7qfbPOKwP4OI5mWJlImSTkNjqtMlyhRrF7ACZxFkjm0d2NIhogAqEZpV4hJE/uZmruGMabVYCuGd1P9DBXovZs/36zGa72GyJULVorSxpyJjd1bcXcnRxEwsLifKkQFvs8HNCqsthDMjPrclwcoc6lcQinslqPSxNB/EjtY83lpkUObX1V3yrc+S/1zNvsvshIfHAY/fFjvZv+eORhPQi11lS0BZZd3at1Thk1MAsJw6p2QfsEgjVHcx9BwjAyL+Yk1GNWZmbJMFOHuyu8mefUK90LYKAHzZ7Y451DA1p0GWXvDfrV9QYCyju+1lqLaTQ2s3me3759N9UyT9VAIgJOAJkHC1Ptx6bbYHV3pmnt9gLGfYFURNA1Is/sT6p6WqBui/mLds5cStCRL5iAuAHUxGJzNDciassBMFWfilJrqs2tcYeL2G5I5VAVMII2/PwXP7/96Mk333/1+U8+52zPnl9vdxiA3T5td6x27L7lXJtE6NW4wz2RupMbSBKNlN29qAsF1wY5UTO1Ro627I+oUoSLAovwBdqnhqxIbcyltSGwoESEhWeCyDdG0cVi5tSYiCp0KnPOmQjUZfUsskF3KyFotiAu0pABtGaH01yrTnNtTeFMKeAQSRKZNbUa9c5xzGkzJJaqbsox7QkS9RgCtJAb6Rtdm2kNK7MwYi3rBkTNki80eS9P7Aeu3btoNYHIO8k3HK4GqJsrqzf1tfjqYIs7wUROZKYdEk7MLJ65637AFjWFwGBYc2u61O/7gW+tdV5ZZiOofxB59Dml5c9+pj7w5etL3D1GxYFOJ0EOZhEoqWrDPB/qfPA6FXLmBrC14qpC0FbmhnE/am2X6Z+vM9cXJunyr9EfyCRJwnuJnucQYtLLUzd/rd89MoYAUHjQWDkJM5s7IJREJBuLw6p6MW+q07wUikzVowZPtdbtdvzs889/9KMfbfeb3spgqrUxI4+DzbNIpFi6ujcHGZw8uAE5ZeYkKQ1PPsr39+/fHQ/b/c5Us3B3Tb7AS+KSFlPN3MWjlo0n7u5QYE3OjTpBjwcwnkNzIvwFQ7AKscbp86at3zsiBLm29RSUBWCHRPzBZqatNrctdLdJV/t8vd8c6v3zj26fP3+22W23+11Vm0sjEk7ihFIVU1F4a+Wb774lyHZ38/TpU5H8+tW743xytzA7RCGHuEJ5g4k+xtY6ZlgC2hkWGAiWVVroy1dTE4yR6LqF1kkOiSymfxaONV+GwYIjPH7TmpoZp8RdkNOY0zBIupAMhdMyu0JLibpDqeNQtNYYimW4Fx+47W4z/vjevjwF+6vd4lyiyVKLNoJ4LeO4HYdcSnuYKhGzjJvddir5zZs3v/yHf/qv/8df/erXXxxPZdhcba/2p7mqUS1WzZupG6mLw7siO1ZEGdwjCXTuK/xBsH7+5uvz10tYEXFYAh0WYXikFle7TWJymMNSlvk0MdHt7c1ut3v59e+3AjUM6dGyrLGRu/syxXf5oWfWH4QmR9xkk5ywaL4L3PvE1DlEBigcUEo9AiMK9x4izhZbyNG7FhFsADF/S8JQK8xiqkPKQ6aHu9cP79785PMf/TehVmdmHofh6dPb9/eH+/tDWToTxBf9PXe6RFgs67Z+zyVQPi81EXeJEcKFvbTEojGtuZJV9HfrED4WuEPNjsWkVnVv5uYyGIe6GpMTUR6uSinzNNd6cveUHl69fb/bbn79xe9+9uPP/t2//cW//sXPXjy/GXMwBbg8zld5SdptISQEWWxdkIFMePAupL6kCmeY3GOir7gfXdOo3/TIBtWQ8miKOU49hFLKOSfJOT1lTsNuN1edTrWq3R8m4jrPtaqVZuGd3EcXV6Bh7hB55qgcmoZedFUN4iliHpidxJlGt/6dwwFF3sJCArJFjHQJX882ljmYZs4xNxHBxdGW+05Ln9CDzJmd0mZ79ez5cZ5Oc5nci1pycCA03MldyAHp6ayte0YSsZGtJyXsEjkLJXcnJhECmLy3rYg95JEpJeGM2EtEwzA0UwfLOI6b27y9Ns6nqR4P5fXr1y9fvn737l11lGrvD9+7pNLqf/4Pf55zTmloWiLMSCkZdXqD5RvGRAOB3MyZOYqzl5nMevwjssUSMtWiF5FJGOGI5GMwrcJjeKQbKJZMJu6uttSaOQsJGZMNUHGzqCWqkzsn8j6SQElA7qzOBmoWdNMEgkWBXs3MvE38MI/vT9td3o4p5TG5Qd21EMBOvCD7TV29JTpztFxa/hUYsu78WJDQ2lnBI+vTVr6mS/sAgHAWmmem8IBEqFUj2cSi9rnOQ12+/LKXuBofunhcTr/jsVP74Pfxc3KQU1DhxoFQEBmBU8pZrIG4AiBhNVT1TGjmVU2oC/WsfC2BQXZid1WPrOECBdtlSjW8Aq2biGhhvPW1JbI8eheLQi10MUNEtN/vr66uSinfvHpT5jaVagpnhtdm6k5D3sR5XgKJDknVNvV6urvDpLe801x0HccUgTCbwYxoMzrQbO10m9NljTDYVpf9TgTKzmQQgy0ezgGilI3YI+PmBCZ3VqqG1KwNGxl3WTZ0++TJcTrcne4+/cnzzZaN5tvnI8+NfHKfd5u9EPHSPWB3wIU8LVSuFNmgM8ONgnA1AlBGBNaLmVNvTIldATa4W9/ZofkbJzaK6M0NijpNmSVYZx2BjXFm9tY748yuwtSJsIymRhwTz9HjXSAiknMWd5/n5u65OYCpzKfTDGd3MCWXbpFVlfII87ghDMpJcsrMfDqdjtN8VJma1qpLT+kMbrnss18ekvXn+EHAzt3DxaX1cS9f5+8fc3MvaLXloMYQcJ+TYJZYtHPbrLtQBN+dE6WlTW8mBr0oTwDOtU6qusjAdE7UeZ7HcSSioNXtqWWYg3ON1qI8wUy+0OU/6tg5u6uTM4LiwqOUF9eWE6PR8eH0s58++cWPP7veDpubfZ1O12kIBl0ze3sob+5P96cyaPWLfqC7s/PFuPyHE4z9+5lp0MZJOuerRCSMyL+YFOZmYMKZfGaF5BGYwCm+MjM7EUtm0eZ4/3CopUzTVIs26sNSBhLC048/+uSTTz755JPrm73V1tpRRE6n0zzP2+0YlGjxZVprQ87L/iCm5DFnT6REw25Ppttxe/302TcPD++naZB0NQxRs3VGTPqZGbTB+3h4sJj09yNaKiMuIO+z7D0UTNTZNViCdNFjYbGUKrqjMmutpb7TIEQOXwXM46CpmxM7yANe6Cjl6KZD5ie32/t5un16s9lvwL7b7e7u7t6/u7++vt1d36RxU+uc4HMpImJOtVbHcbPZP3/+fLvdv3zzXSktRL3UzC0q3CYSJyWiX+qSAES6DEYuOVjsBwOw4h4jSVtPQReYhQTBHHeV9GDfsfMR5r4agKtqKS1LSynt9/vr6+v37+bNZhjHMeexzDXULJIMsvC+0zoKErVbN1clcuaL+IZ4heWsRuOHxgQXIJx5ngGkxCFuVkooTSR1zNXUqkMobee5fvf1999++/3vvnr36uWbP/zhy2++e1mK5vGK0lgaHNJU56a1xVp1378OHy3L5QAUXavx8nv2bSabCxOgjw+m+uPil7vnxHDNwjmxCMENrkJCRBUuQi8+/YSES6mb3Ya7aNIZN4WLN+wmbVmZcLubFHoYcF85lUlYck6ui/BGsMuwcMqO6n2IuheSYpNzZI1LXZJoiURb40RB4t8Jx4lFpJRZyPNul8xZOMFevf7+26+/+PRf/fsnNzcvX79m2qgqkTx5enM4Te/e32lPkrt3UDd3Hyh/sBlWU/xHoy51gz7yOIs16yS8K6N6DLYxrBdOeuNUzUxJMJfWrFUbhiFLkqWtRPtBSVyyg82sEtnkx3IYH/Du/f394cHdN5u/+OjZNbElGSIsPn9m0MoZVKu725mFzdBbe4/u6dqR+OBi16w4kO0aryQiFzg5eG7kTqBsxMzCnKqzWiq+sWqnh/Zwfzid5mZ+Os4QGfKmKlSdKGTHOk9a0/fUD3+oW8PczDuNPgVaLTImIghabiTMzD39IJfEzNxU+ZzDBHFXZ+m7cFgrFPYcSHxw/GubicjNVBuzyPZqc32bT8eH+7fwNGRJmxE6a51YOGUWFr8ohFFH1vSeChFFjhqhY3TC40MZFuqOLOgOFCJDHoedOkppBjAnli1JItnwsJuUDw+nN2/v7+/v3759O8+zg4V5s+EEmg4Pv/n1r370bOvuL168iDlGZuaUyLs68FqtiL+Ymfra5esbKDLtc0AChPC3iKz59pK38FIU4ePxaGZhAmzBkLvTKCOTW9/4xszCKXEmYaJkRq15rWstDGmQJORLtwYAhFPABGCBDwAzC+eciAh6d5rnu9Ocj/NuO1xth+0mbUYSFiZlUrgbGgI0EaRSwIqOXtM/kbwUFGLXSWizLw4RWGmuYs8sO2c9O9QhoGd8xLqkQMdByELTtT5nTSB/uBXXffvD3/zwsZ7i9aLiQ5MzccpRBhdZWklgA5kTWEgysYMTuYIFBAOpqkU+yuLLQALABgp6KyEzgyHiiaj6BgbahJBzBgyPk+8l6HnkV9YLZmYJcUIzh3GSnHMasrx/4OpJ0Njh3CwidmorMevCPSDERDT5LBcY3wjfQx4MHN+KwOIcip5Offink9MRszxmCurSF+e0gYkQsEPQ+Q6dbwAkKOYNxpSnuaVxQ+IufPP0dnO93V5vnn12q2g/+8VPvvv2D88/ujm9eY9WicvVTfaTkiuHoNGCZU+J57maGdwJ7ARxNpiripAZLcRBS5WSl5Zxv0pv1jx8KjUmcqPgWe5O3ajVpqzUiJeBsSillBokfk4S0m1sZrXFsCKcLKTqz8m56OAMYK5mZslIVUspqkYE4UxMCRJwc8A9EdwHSYO0yNHGLC4yz/NxnqaW5ubFWtAppi7+1TeQYCFKu0zgFjsX4bm6myqsgyEAmLfonrFDu7dr3PXWJU6l+8JThVBWjx5QIna4mro2NwWBGlnE9NFaJoeZqyrnDDbY2pZksKVlOjye3iHBdq6SLDiTD496P0eE4JWhdRj64nx1c0NnDpKIPQLq6VU3Y/7044/+w1/8d599/JTLJNp03j5oGzfbK6Nh3JyUvvr+TfntH5r7ZUJ4/gJn0/eh9bmMhHzxM0QEIeHkTNF8tyjzApwZRBZD2sKmfcjYe+2J3NDMKQnlYVZ7fzi2Wstcg180RogN/unHL37yk8+f3j4xs+PxGBt1u9nY0dFmZ3J2hau2hnaqpxwifrEqAcwBGeCm2+1uerg34qcff/z67Zu7w3G73VZhDvoKWhqaWO42IQaDEIycQTy4tKMJ1Jk1FgRgWCphcAwfLku6/ulqGk1jNU4ZAAMx16RQAInZOS2qXdE/JIYQvGlprYLa9c3uI8fTZzf76932apdz/v71l3fvH4bt7ubmJg/DaZrcaa41o48QTNMEyDjsb29vd9fD6TS/e/fu7u7udDopPOQfqKNGhUSpy0WAmVqLpIMCa8VO0beM/dyn0KHqaFHCao6lKNNPK1M0A43giCGu7hQcrqrh6WstZJZSGscxJoIitqPOdNI1IZnZz+MM60wdi4gTBRql8/a6Pa5s9DuyNlXWX37wr+5uhlrVzGpVJqE8kDrL4E3uHk4vX33z2999+ct/+uff/+7Ll++OrVmzsB/bqTlqATdwqtqqNjV1IoFEUBKWXC/z50iNvReloozqvanK+Qff8/Kvq5M9H2FXZhpz2g55yAmm0cpopimlLPTs2bO71y+bajPdjhvhEAA+Rzmrv/algr4mS0SUSGqv1TtBzY2JGV0PGbA+SYWe/OmFPblc84VCSEHGjE4+yqlDlxmA6SL65w6zJkKbIbk7BJuBydvD3Vty/clPP//+1UsiIrfS7OnN7fu7h8PxZE2DSImWlncEr2uj7DIMuNwDj08uL1bx8jkrtWC3hOsPnDj8+EouAmGHm7VJa63z0Ib9djsEybO1+0O1PmcDg7h5gXlR2m9O98d//M0fnBic/v2/+fNnT6/deetzfLRZjHsbUZ8NWS/qkmAoqr2rtJKvlamV1WlZjQVowxYexgFKLmLOblQqUt7kcZfzAHCpOs+11nonD7WomR0OJzMf8kY9seemZMrB1EBUnXum1GwmIqYENIBNI20Dg4gT9Yz0UnqkiohsWRJL6ixfItJas+DbeuRYyd2J+2w8Uxa4wRddN4dL56Q4H6TGLC6oBnXHMA7XT/e1zU11frBEvrAQEYwXOYf1pAgeCQv3PWNnGxety3WPBXNSzhJzp5ISC9w4peQk4zjS/nmp+jC196/v3z5M94fpNJdWzZw5ZXFMpRicJZc6zfP8y1/+EsDV1dXV9U5VS2vDY2yRe9fH6LuaundLKbVmUbUvpQgn6i2h8xPWbRSXcmklWmt0kVUuqhkCT7EEas2dKG4rJ2GBk6oFCOsMuBWnDrYy1WC7p5S4BswomNgd7uQwgmjeCW/I1Fq5O06H0/F6N95cbZ/d7piaeXErQbyVE6ckOlX8scflSb8sBtVa16TRLzK3lQvt8rUUARCMCUzMFJpkQdVJTAwneGCA3AE3J3qEFF2tYuB1L38TT7gce8aSicQ3uYQ2nO3zPFV1VKMGqYZmcDR3O05JKxKZuyZIMwVxHoYQsFMHwcxhQWZBrA6Cuy0TY05Oi+HuhtQia+eFp9TdzYLb6vERc11HttZraaoSREzMqh7D0AC0uTucOUGc2KsRJ2bRttSje3WDgqY6pcScliZGh7M6sXekeLSJyJGWiFq7WqUD7kydVFUXIAG7IyiwI3nQupBY9hW42AfBXBn8tl38pzFxzofpYTeOL3706Sc/+vRHn3+atqna8adXn//D32zaKZ9eF5aUROGzt6pJVDmJRCgJBiWeTyohselGIAgSVuZnQxDXRvtB2CmtJPwsFHLYREzCbhZUalEgjM1tSwVU1WoHVRsqAKTWTzvgzZ3ZDdyURTptH9Tc+0S2u2MupSrHOKprberupiaS4Mts3lKXZZKiyvCUU04kZIkpZ1FGrVrmOqnOytWUKDEcEuzMkYCqWyN0aKh0Wv7lFAHRson0LJwcR2mAiRZeMnbYwh1ORECvg3j4iT7p4ebmpqGfrnBzKxHJMbF7cxNHxJowV3do6wQVsTEWCsqUUsxzm1mtNTon4RVSSjFXzQt6eW2BEiP2LRGI2Tso71Fi1j/L2QEjrEGxGBzODtd2++zZf/4f/6cn15s63++E3r9/vx8HVJ2nk0K2m/Fqc8V58/rdw9ev3rp3tWVcRDOPPu4Hv6E14L5oXUYHq7+JsBtUVTL3OezldfEENe8dKKKirannYWMsRnQqRaPKHW0xIkCcsN/vnz59vt2Od3d3Zp6HTq49DENrAxZRymiaRlcHUQwF9TrWEtM5wQjFfH9z+/Tjj7/98stZ7XA4BMwmsRBRixZ5rIwDOEtRE1GUHOPCmfh8eeTswfrY8VoR00bhs0/kmwesiNY+CQXTam9Tk4MZMgyty/r1BQRMCGNOR29ZoMDz57fX1/vdfthux+b2/v37w8NJ1be7/ThuwPeRw5VSirZx2Ko6l2LKRCJZrq52m83w5MnNPNfT6XQ8TtM0aXNdmg8hARrEZg51OIVvg63jodS11lu1IFvBGlYCUNVqKiF4ZabaZSzcvbmx9TTYXM1oCT56K6k1m6YppWTqpc4EFpFhYFWPCKY7ooudKSKSsiGtAlNmMFNiW4cCVq/pZ4js+U1Wx7/d7yJ6riUmq1POm5Q2Zno6+XcvX/7dL3/913/zj1/87suH40SSlFOx0loLE+pGlHjI6XScm2kUrDworaAARZ8qNHW9MxAGHYNHV8ScCCF1Y06Pas++KIX6ksysNR3qSBao6nbc7vab3XYzJDGHxihHNRG5vt6P4/jq1avStFXTTR/5w2PSgj96/JfMISbpNEDgvUosmE8nZmZKsrzYaqukaRzW7g26BUdP8cwcSuhNnuUo9W2G3vc1Z7ZQOBSYNWKF85D55nrf5qm18qd/8rNf/sM/wdqw3VupOcs4juNmaKfSdOHeZBYIEdns64pSb+XFAezIkXPYEe51sVfuWKqiC1iaHhEFx3YS6iygMI8x+wVfQNqKtuqoLAaMkhhwZ2ESIpRqqj1HIx7uj2XMm/vj/Hf/8C9A2m1v/nL3F7vdDnO9qMs9mgD0c7vArdeaQThHkHTR5FTVS1IZLC5pbiAiC9JGJkJ2Sko07HcOmU30BHVr1auJKVej2tyMaiN3iHBTg2rALM08FCYDckBqgABi3vnDo7Dghpxz5IGLf2zWk4TGnJk5XbB8mzXVKtKLLL04TlFJe7RjifNy2NUuHuvZZzGRZCA2aQ4iGvfX10Qk6f7tS53vpnnOwCanMQWfBC34BmZiWRLX1loHBBr1TAER6HOML5AxwJJoSJJSIuEYuY0VyDmnYTMMw9uK09Te3k2v3j28u59OpfPPseRgRBARuDUtbpJS+vbbb/f7/ccff7zd/lRE2hJv6DJiAya60MQj5lXBNaUUbW1tlkYSEaZzuhulbScHU3QPlqyGCE7McUZMlT2BLUlOQy5zA2CmUdlhEWJ3aG3FO7enLp8e9W8oXL1VqwoNrKE7qRWE1hcJoHA1FQOKOxFnkTRuOI2u7TjVUo/utM3YbmVMG0oMq26llJIf4XgvEryLrB5L3cTMony2NgnXgDNEgGjJEtcqf7AQX7oS702IHt1dmmsA5yGIZZfG47KLeBkifkCEtv65Xg49xvmnw+nY1KuTcVaSoJ00tAe3E2kmD1YWKVlSSmmgkFMThpmhmRlJnwsiMCQK4eIEOBNEUgrQS+zL8Em11pQfFQyYKaWUTINkY12L3tcBzGye5x4bLSNJAEqpoTtMDDNrpkRMEGd1Z4qGYeQcEaww+4XfYk7xk8G68XY2d9M+u0jW2y0KNTdoJ0Jzg6+CTjhfCAGAhTqZm1EfmWWLHmPCytIeOIE8Dg44ydX19e2zZ9U05dysXN1evX39/e56PN2lYWRual6n+RBbBv2+9pocEUkic4hTNQNc4BAZgLnFMEavMkWo6m4pc6sLrQKfyZSBqC9fkIUwx1VEohKk5EQSTf1ismgrkwJCZKAG1ha90xDzouAoNtfkrAbu6isMMiGWJIljPxCWXlCkryklhudMm8yZdTOkLKktI8tuXYxBRISQJLTg5fLcxkbqOoTL6RKRiOPhHPg3AMyLpHWS4PSjSucz1pWIZDl4uhR6OaiLzXoCEW8f/dooFFTTRCzEQVKq5hbJuvOKl8tZOOdSCl147CAzXbfr4nqXhDDS+LiHIUzPDMgaZ7ivBPe9QrUYBQHQ+Sei+c3Y5EESHx7u7t++vBrlcPf2QGBOc1XnVGmQYq0hpC+ZUqcHWPiGAx+LC8ioXySKEegvfG6ibWmww92hSxIV1lLAnERVbam7rKRBrTVw8uaz1VOZnSnlobWm6tEfV7A63BxiDsy1NFOwSE6DiDBqnUsptTVwIOyRMkeJfBiGpGxg718GBpCTOsYxl6Zp3IBJhvzxJy8Oh0OZp4f3d+Pouw1zktBC7PIJsabLFnRD0M0HdSU7lDXgw8SR+i2T+NFU6UEgtWYiCRZKIguULrN5I3NfVKT6LCg7ETu5wp2JEAM9nWl3zNKsetNnH318c3u13e8k893dw/39vSmXGLAZh5QywE6kilprTqOqNbNmRea5zadxHIdhuBluANSqh8PheJxKKdOp1Np8UVozV++FNCAABcZ0lggNzYnWrcoiTkV8DjqX6N/iHCwe4XyKfcnNzEyYGZH1hYDVOUsJ/7IOEXWPaBe+2SwR4Tyae7bn527kY4eK/z+P+/vDelpFMpEcp1Len96+f/jHX33xN3/7j//yu2/evT9O1UUGoWG2qg4Ng50TZy5zvX94WIgcQYnQia9AROa8pB8r7qGfOEOAqqlXWEiis7R+t6VEe169JaXsDXsAY8777eZqu0kpORqZs5uCWMCUXrx4YWbv7u5qKbjeOyFijjUAWiNmWYa5Vy8fa2K1mZtE+ziU2t3dvdYqIpndmMmZOs8Z1s0Qbw6FO5jhUKJoDHY+y/4EMjdyBNn0GeIVJ6q2AoNSTSldX+8Pp4e7d+8//8mPX3zy0avX71gwjuNc6/X+6nCaqj50UbvF4dJFwnC5By63x+X1AtAwgW68/GsEXhzg/4v3jKPsDa4RWBiDfSkrxYcoazM7TQ9qZQiRW085jQCpt1NVgERyIlalzTgw8XE+/f7Ll//06z+8+OTHf/rzZ2avVmUU9MmChfu3I4qNQjOi13HO35CWFnQEA8CjNQkfNDs4hObBBBFKBlHilDZzscOpnCat6kRZggZWhD24lkttytUWX8w9JDADe2IBnAiqG+8cXkFYs/SkHETKYWWtuapqMesVqPN9ok6hEfdCyck8GF2CnSXmR8x6+B4j30QU+nLrxa53eRB3aAC/yAhgysOw218TgDa9bz6rt9oTmNbpBOOtVo3QNXB37c2y9bP6MYpWoUCEEouIcBLmRJDmzkgimUXc/c3bu8Npfn8/3R1LNSfJ7lBHK7NWJfAwJIfNwf7vrVb79ttvv/zyy2fPnu1v9utH9xIM9/DQ++j1WWXBO5hXzGy1nGYWMK61cFm7utnlqem7KGZQ6TFJR6dNp+qsxMxJSNxQmrZgFSY3SQMLB+mrITNpL5B5W9SC+lQwEVEMW5IAxo5iMNPaeDsO47iBljof5jJ9//L9buTbq3R9nfcbYbFiXkvpurKPT7cvshzUOahXC8Ep9QtZD4tdoKzP3md5w9iKdLET+l/XqfnlxK0vWW9QPOKvK2p3XfzL7Xppsta31YUx+LLGkVqzpt6cnN2TE7Ez1Pw4nUhbJuQU6sDzZrPZbiVJFNsC3L+kDZLYQUlEATBRowU8NQyDeXUmAYmIuWottdbdkFZTz8SJWL1lz80fOa24cAcNwxABRGydy3qkBagIbBrcdwZOqmHROnsRea88Gs4a38zJO/QkWJVShBfhoyii5o414kXBk4NryS5N4YW5DE2GHudGVBiQ6gC3GEN61NubyM7N7Pr6+tmz57XW3/72Nz/++cfXz/a73fYf/+EbEdrvt6erXb0/qNZaZ1k0QGNYCGpLqEBk8OA+EWfvIjjcp+MAsK+jtWYpBWdui+Czr6QZhb65h5rto36OGYgac7Q2yV1U21Q1CDXcESLLRGjGWhUcq+pOPaQz8MgppteZmd3gTJKGLGYmQRC6avUREdFms4HROPKYLVGLFpnVFVQpmSQLbzYDow2Z99sdM6tqYilSLg/w5b4KKvOcc8rj8Xicpmk5w2cSalVlrtp8PX5EpKD1GF8e1/U8Y2GSNFsnYwOaAxJinE8+L5WE5TT2PUbnfDW31pppBEyM3szsCeHFpzNHlk7Reb4cHbyMTrC0B53g5iu8M0YWVfVX//TPz5/s0GYvuh3HN69f7jfbuSnJeD+fpvd3SptaK9GZEg3nn4gWHrHLT++ud6FgTikR9aKMu5vbYtoN0YWDhvist2ZLaI4FOj+dCmcz42OZjqejEjkwdYYnGNjJ1dwggEM4EvvlXpCamiFnDkLReOtAEJVSmHk7XqlTYHI76JIJwDhuD/d31/sdkzv4+vbpk2fv3r5+U2LWzhamWLgvAeLqCSKoigu06BA6wohF44PICaRWrfXugSxmvZmKSLQfsagJiQjMYu5XRNwDV0giMteiDiUmTvG+7i7k03wUuCQC+ZMnt0+fPrm5uaY0fvPlN8fjUdJmmqbIl4iouQWHRFxIa82yIbh2BbXWeZ7dPUkex/Hm5ub6+toMh4fT6XSqtc3zfDz2FZ5rIyKCC3hh0bHoTIc9Xyx5bwHlnA0B6zKAetxBWCNyv0gSwohHQy5lWsA2vN1uHx5OzJwkM3MAOJdRZ7ZegO/WsqGFRVYLzgZfiycivXLxwVHyhUh99eWXf3ZSIqda6/t373735ZfffP3y17/9w1dfv/7m5VtV4WGfhIta0VpoHsetJArMS8dNhfUITWbAbA3Wadlnj7KROCAAems5+KIIHbn3g8cHgcL5ycButwvKeGtqbV7AL5RzZkrPnz8363ctQtjWmkheQ8PFn+owDGvXixeidiJqNseEs6TUqYNMl5bIojjlKpHYsERoDiAqCNZ1j+FQEc5DiEGIe9f72Qjpco1MzI4Q/IvsCeaSxNxZZDvyq1ev3rx586/+7BcfffTR6zfvYC4i8+FwfX1zdzwep3qaSrPVPsPdmYQePy73xroNfEm5W9eiOO+THtstGgOASZ++gVsHRro7Q2K8T+HLjC6IyLTNrbU22TDknLwlZk55DBdgUT0Hu9v9YRoYAnrz+u7v/+FXz59/fHPz5OOkREEsfi7WXBrqXplaGL/iy/TBtov2xfryNUJd7qATB8MRd+JQSur09u6+Nq4NpUGNzc21EblKBZhBrVlrLUlOMqSU5tNkydzdajNvrbOht+o7d6ja0lfpLX1VTTl05wEybSW00ChdrYc9uA9ify6/VHeD8XmjprPG92pt1ksONw2s/+oOM63mgSnrXP3O4iLb/VXyWu61Hk5mRi4sREtLY93wYQB3ux16D/+DI6MAiDn0RSVCCDJGsPIM1cxcmJM2O80zyb626f54Oh6rcyZJ5jCtrZoww73UednMVo7TZrN5+/btN99889Of/nTYDiml+D4RaFHvB9C52bFEL2aPQos1sXEsuXRs9YWkk87lYyKilJaRReaQLIo3MV4UNtgoMSViMbMmKcrGyolkIM5wr00rINmjCRnexRxwcBBVAA4rIAFpWPSUt8ychRmYSvHaMuf9Lh/uX9/Xw3xqp2N6+mSz30tOiZm8PmKnX0/xqtOwhi5husNdxgZbu3OXee+l9eYFXnHxTy3SFlCMUca6cXTb475c+po15lz354d2ZikXrvfKe1HaFhnS+PRustJ9lZwzEzVTqOZEIFTFMO5aazO8ROm0uZws1dPHYsPtbRrGqqQ19AONrI5jhlpmYavGUJ1tpqvtdqYKsnKqzMKU0IgpeaU+MiYyJrfmp9Ps00xFVbbuEv+RaIQ/qhpJR86ZSFQ1SapWHx4eimmPcMlTRhBOEsqYU2tN1ZSIcwJETc1slN6gg2Od52GCEAHNzQWIuNx9aQFGLdY9sPocY8ALzbpfjK0BCHZRsKtreD4nVTcXB0gR3PgxqQI4nB54I3Nmvwa2+OTjj3767MX/9v/+X9LN8OJfff6w8X/5L//l/S1fydZLy7XWqlcfXzGlYkYpVdLW2iYPTMxNRS2x1FpbOZHIOAwp7YJW0cwyswhzMpRG6kmyMzWDqjMSkZCTYwsXJhBKzA5EcNRaI6Y0iKraUnqF8NBpryj4duc2M3POuXkhogCkEsiAxDGO35YuvBKTEBN5KxVA1BqM1LwBGIINWN4mJq+SxuvtcMMYVcd5bqAkaVNn45SSMNo8bunmKkNIhFm9KqRbn46iJkirtZbGInCB55x2rLobNpkkWhVElIYsIja7ERiu1MzdxFMiEUlG7s6JoOQVqmrBguZO3mK+FNrQGpu21sRlyRiraiVGlpQHgp8cxkQ5JcmZqRLAItuBvYoVd60e2qPaSilTLdskKQ0pDYkmtoRKNjt2xoxg6QElGZPCqjYSVoIxkjDMyJSJmEBWATCcmNSjruHsLknevn6zH+j5fmtFi8/zfRMej6fcKr+7v1NKN88/trx9//b3o2xbhKeEiC16lkNIDocnJiN3jw4DCYQsZd6zJ6skfcLYhyFtjjrPpxgZnFs19ZxHgMtUF5bCxFlArkSnQQ4baa3WqaqqrcKsTJF+BOklu3MAjRrN1QzycDwRMRs3VeI0zQ2QnAYeevnSlAOKcjITEWaJhnIiA5qZeTneXm1b1erOMpDLePv5FldHG79/++Yw+YthEEcyT+BW2pBHIzaGE4qWWiYWBLAnOtLOzJDmfYL8OM/MAHEnBBJB1zMsOXPKkWkkODdTEm7qxMKJlWFGKkGRUg3mQsLOVIhImBqkuSDtjkanWcdn1zR+kvLNT178+PXrt+/f3bvRkDILbp7cjOOYhuE0F+bc1JmvptmYx1o1JZ/qQyxsMDeoldMUhUnebrdXN/n6ZrTQA5ymeZ5ba69eu9amqkaViMDs4gXY5O2kaLRVlrk1YlfyPKYJJzPzZFoq80CStDmYyTqvTKLE0lsmiZKlIrwRuiFoa1NtD+BDyg7P1lJOiUXBJ0ncqgzp9kRTym4oksx02mYS1mZ2KMe0BadcKpuCnYQGK40aInwoWswbGMJUCUW9tZbSkDi3ZuzMlGqze39qBlPc359+89uv/u5v//E3//K7u/fHwzSzJMmfWfKipXhzciII7bQRESXZmtl0qoEVF0nkBA1UIvsiAuQckc25b4bFhQEgioT23M2otSxPFzdikug1mzkoJXIib1prKznLZhyvr4ZxAEiNmosJAglA7LjaDH/y8Ytf/sPfltP9ZhTKThvKvlE3R5MkIsmqt1KZXL1EB8SXAdcoh41XKVSj6jwzc5IkLKY4lolSdpKmjSnxkJ3kVFv2pi7kTSSLCIHd0LSlzIlT9iwW9OBN1BhkMsDVrLm3JJRZyL1V5O2WKcdJz5S9SZvnZ+Pt63evTqfTX/7lv/3tF1+A6jBmkVrmt9dX+fDgB1YmJlAtDc55HJ3bkkj0AHHhufG1BrQGZMzsXi9DSSxIipR6gRuduHRRIUu5E5TBGCAIAe6mDQwwJeLBYKp2nCi1lHMGfFADKDFqrWU+hEkprVXi7bhRtV9/+c3Tf/rio89+/uRnW21zHsRwOpb7/Wa72WyOd8ectixXpGhqxJ4TAU2tMklr1YlJWK3XRs1BHdHT9VJlAbWkdONInPYsV5PK4eRT8QaSISM7slmea5lNa/COtBMPg7h7SmxG5hXMsxZLFmC1hkbc2dpqRc4hUt7Mu9iVd0p5atWL1pQSgVUZ2LCwm5OTkIBgrrXVWrW16MgxUee4zOMI5mBmaQ5nAZG51maqDlDsTDOwsKNzveScyYdWW2uNSdK4ycxsc211O+zvp0LjFanNcz3YDB6Z3HHMOSdmM3VXchhra81bS4k5kVJFlD3IjGxrFMKeBhcGgTLTbjtqPQ1uzM4yVhoqjSXn2oAhW6lXdNve3bXWRMr7+wMz02BO5MamuTRtLTqLm7f3h9vb29999c3ul//0f3vx6bjZHk+njz56Nk0TO7xVbbbZbIgcpMMoxOOSUUfly1oz1UacEKPvdhbkdPdEA0KoQJjBwcIYQ/KhnCwiiYOc1t2Nm7fW4D7mIVPmxqYOynBnT4OAg2NMhYBBtjD1qsw8cLD1kbtXtZQyM6lq1caMlMTcT2V6smFiqVNt6sM40Haodb4rJV09cdX76Xi8b5Vvbn07DmlMeY/f1VpBNgwD4LXOAPIgHrk/LOQxALiQMUYTTsmYo2NkC/HMdrv1Tv3gzNSHV0nUukDFZY8hcrzQE3H36ClFdidkHUQds/G1qwhGayclAdgWkp7LKttKWRSnZk1Wl5qyd7nOzXaIClzwVAaJTRaBmRBxH3AFAG+t1HqgFqm8iBAnJmeGMymIe53Vo6BhjnmeMfYB0yVPjeUQIjJ3axphTBQCc+b5B2jasEG90s9O1AKVtxbt1vT3osoFtbNq8GqR158/yPj/Tx7rOyxFMP8/ecllgr5SGsRvVomzeKyt+e3++qTzyefdbqeq2/3+u9cvq9V/92/+/Tsc37x9ezydUkokqK2V2sBbs6hoOdzZkTjgWOQu5kgJQLbqtTVVzZtdfLdejCcOfAKYA69IZxSWAUg5ZgV7Azb6h6uf64t2AcEYx9H9TCyfXUI7ruNIOWYnnBcHmczXfhr5urzR62whtC0iaZFIcjXJQ1rUBTmzC891btbSIAOJElGSMbGI1qrX+w0RBQApeCJEsrunlLR5NNZpmdUmonG7iaO4IrnNjKT3/VeJRVqQ4rK0vv3igbUp5EYUMk4LkFWkfxwxRc4NMrPNMLgH5MpaqVU4pZSIy7KwfPGgZeY6pTRIWkfhg9eHOQkHmXt8XBqE1UosIAV7r0Xr1RAvWTf/Upw307lUVb26uvryi2/fvPxmtx1+8vlnPGy/fP3l6zfv02a/ub4Fyel0MhbOY0hwXDTxeogEnH91ee7cXdXPJ6hDctiIVbWqpehQL+2pgHwRJ2JSr+porc0nW6uqWIJhZlqHiAA3iz4biHA4HN6+ffvjH//4eHxwcZg9eXJTSnnz+mjWcs5Juu5Ip76+QG4AUQjylW4uSh+06DiLyH53VQ6n0/H+1Zs3tzk/u9oNOVmnIrAWCgqwlFKMUQRNAi/S22q1TU5EOWcivuywxteIOl9TpQ4+D2SpjXkAGSdZfifLkzUICtFJcZyF3Hjcbu9ev5uLPfnoxX6/b60dj1MQbI7jdhzHIW+IWxqG+Jx1b1/eLCIqJeyYrjuTSIjs4eFBRJLkmIPNOV9dXQF49vxHx+Px4f798Xic59kuBtlX02oe1Xxl5jSItgUQ7toHipeWS8DYJGrdHaG0HEE6b7bwxAQ4lFRba6Yh9Ky+spkHUZlIksRm2ihsgDnIs0RxwWizG1trtRYnij5M89a0jeOGBQRxMJMDPJX2/v3db7778rvvvvv9777+5pvvvvv+3f39CZTyuHWDkbk3i37u6hqCdRNQs6bN3JyJOYE5YHGrhYnKIz/G713+eblz/Adu1N3g3DyIgnDxJh0xkXMax3FMWUSE2EWcjEN9xc2Mnj9/bmaHw4Gs4+5U9VJ3CQuZBMuH2Nqw9gYPaxymwJfWkzZfTKX4UrSmZRRnefMQG/SUWIYclA/M3Gl1NeYtHS3GKKlT2CZJ3Bmbl1WiQFN3o8r5/fv3L3706c3Nzd3hwd2HYWjqA/NmsxmG01S0i3FdQNougwq/mPC5XPO1q/bBDYpnhmfxi3ZQ37fLuWPyBbQC78PqK7LprMI1TdNqvlYUjGq9/FB3mk7lyy+//uu//ts/ffIXt7dXMhBMr/I1SI/HY1ybo7oFVKdD5ACs42RECwXKRQuCmWNzlBWQlTZztXmqzQ9Fh7mRIYO51ig/NnMl4pD3NH0E9QJWBPGF5bnYz0QUznq1xkQUyEM+Qxkj5mYsUB1a4vI1pFknjz44MvG0pdnVf7OarCCwcHciW9+h1przsNls3GhuGhMf47B9+f3XXuftkIfrqzI/1MkAZmHxnCglsMEcQmQOScKbYQS5N7XG7sog5gxCSs4pufeiLZOHzNtmdwVhUNaYj2UhpRgriJBM5BB4uhiMTymbWfD3rA0rIhqGIczy999//8UXX/zrf/2vr6+vj8fJXaP5jkVDmIiSnJlLcOEovfepbN3e6+H1BZ24Pn+9L+vxWTWi/aK9drENzjfoMpboh25JpS6/ki3CD+vWWm/9irRq2rwSND7d4+BnIa9TafX+3ibhcZNTbillZoTUk0gOTtmIeQFklta/YpgiN7O6tpp6EMgaoc9FaMHMBD0H0z94rFbl7ALco60V67CeEV/JPpZu4cVtonURLs/R6iXXd+veENbHNHMfumjuxoQVExJ3nJnNvNY6mdX7h1Op2+020BpCQiwk7CKmVi108BjCDfBaVzK31po1TQvft8Nc1ayZekz5EwXjbL8qXSaD4xHEUEQh4WKLwT0Pv12cb77c9LQgPJkfPfmHZvqD+4HHpj/Q8Y+M0YJl+uH70JKdX94AXgUbl3BK1YUZimEYqtVXb17+xb/92X/8v/yn14d3//Wf/urvfvur7dXu9vbp93/363J/+vT2o3pq89QMYgyGBvcLmiVGgxG7kBCJulW1WhtnXdfBzBQao8kdnNCnv3tKFlij1XquvufSZPcLiXE4d6HUjUIo/LAqoqTcpQUUvpQomJNwjXYZzIyJmMEUMnNsZtZ0TXtiAROlMW8SnMiJlQRK7VCOr+7f+uZWNhutXtrMICZq6vf390RUaz0cTq21lFLO/Y5EQli1cRIxa62VMlljVS2lTKV0mdQmXM8lE+1aq0uhBYEH88s7u3ro5dSd/UdKzMxJJCVOxA51NdfqkSQ7YcHOQY0G+mDCeD3wstBqrTaRYBHoRCzFnMwJHl5HmE2oS3StzOYABLTQ+jOiAk3m7sKZs202ux999uNvv/zdPEOotGa3N6OZEcOsmet2HDdDUhmKmses6Rl31IMz4Dw6RhfohWZaFe6ERdrLmUACphgNdzCnHOP7SInMPCRtCGAydTdMdVpN27pQ/IMZ7vWvb9++/e677/78z/9chFS1zlPO4zzPzKyq48gppbXc21oLGPzZZHd1h+buvk4Zu4ejHYZh++IjSfzwRuzhfmp1bircarOU4UxuKLWxICVhQQRqCLmhEFZeEGIXsYsTnd3YmCl6bsKcc2YBBa5fADC5wXqP1s8yYnAnjpFiYgKB6N3de0np+ZMnT559dPvk6fXTZ5zT4c3kTgERZGZ2WctY1vkPPrSTl1GadThfd/nuvTwQxaDYrnkYc5b9blNKmed5mo5Wm7tL6rPsgGMxjDlnv4gqLu8jEQJp3iNRePd65+13oRBHJCKrsO3ihru9JXLVFX1tDvMARZibO7kQO3ccvN0fD7E3NuPoTHMttbJjU4rU4qW0h4fD/d3h4eH49ddff/HF77/47vv74+l0nM3IkYIFLwZcoe5oZtYQsQWHool3LpwWkU2Psy+CGyyH6IMQgRYazw/W6vKHxYAggidaU+fzcxxASmmz2Wy329XOMBM4sYOaOZA4/fSnP22tvXv3TkRyTswswroUOq3jRRszJ+pFFiLykBFFYNSxXuOS4AWJBS+/fCSgSkRRI5R+B9WdUxpTSilL7LL45PVKzR1MiVmoqxnH2BWCd9fZDAyLMctIML7++uuf/OxPPv3003e/+nUpdRxHn6s7j+M4juNUjmZtqZJoJxJbAoBLg7MGwZf3bi0zXd6US/O1WvgfnLXLZ55DQGAhunR391rVDKo+jmNKTEvtn5nJGdaaaWIxtW+++47+9m8/3Z7+w3/8y8+Gp7Upszqaljrk0YM9wh1OASojIjfiQbgxo4tdrfGAaTTZxNzUoNYxunPhufhUtFlTSPNkDMCKNutT98bM0XiIyoItA3vrul1aG7lgBBCRUopfJISXL4nftNaE0+oWsUSerTXz3iRY7w6RLw1efHBHeh3sfEOxAgLNaaUkEJGg+wrGtzggp4f7f/nNr8jr05vdx8+ut7vrIXGrcznN10RQCgwWAQJiAeAJyU3dev8/cYrvyaKE5NQnZoUoMVISYVG4uzJnjnKzkXubprAt0uNMWkGgpq2nW5fbjzm11ojs++9f/f3f//LJkye/+MUvaq1ETBQFmkcByUVj47zJvZOBPVIcvQyNfuhBLm+6XihvrYNt/Znk7mfKJaIot58nMmTRObs8SmtOGM6IlkyJmedaM6GZNjNoNfVmygybTYLIjQVOpfX8IqfTzc3NkHJTg7bNZmChUqYg4Y8ydPZo25l7V75dl9d78TfVWv3xASciAjVtH6yVX6CyV+cVLzGLEm73vGtJFgvZ+OoX1reKmFbk3D9b745fqNuvMW16/vSpu4Y/iATa1Zh53AxRaTg9HKZpCodVijBkmqZpnkHk2GT0qSgBE7i5Ta3CbOBBODLMiTmqOAvzl0b0kLhLi7CSRitZoR1/GdWa1lzNxXp2ulhIax4RiNbeGF22LAPGQam5eNZuvs/hzR95rDvmh491TjJITsI4qxqz/NHxDJb1hWd2HF5EZtJCj73evDa3/ZP96Ti/evXqT/7s8xl1uNm+vX/1//xf/l8H1Aq1eRoSX90+a8d33759/5Rv07GS5DwmOAFKcArt4l48EKAPobFkV5OUOOc4D6paI+W2JWQnTyGy4PCLas2aeJiZ2znY6pcZC9urIn04B0LELEZGaK0hlO5BFV07gXpZJSg9o4ezVhz7hzJTFiZGBN/jMAySGG1IJMJO2kBN2kO1tJk5D+o2n2ZTEozjZowD2XM5XahrIhCxs0ynurmBtR0fJgC6OhXqxd3lWy1ZvS6+4WIXXR77cIeJhZlp7Vu4p0RJJOecswyS4KqlNqVWChMlQhISkUQJSylutQXrB5mZN/WmWNy2W+t1EwTJYB/7hipI4H0oeZmBiC6+gCyGkagfWwaC9YtqrZlxOBzMjCTfPNnvhnw4HJ/c0rgdbnB9KtZaYxgRYI083gEC0jO7gGCVmz831QlCQTTSrA/jOsHABjeAJXMeQNwCmA1q5szS3ObamgYLNHfaz97R9BUET73QBfTeI8W4fr93qnd3d+5+c3Nzd3dnIjFrFDTN0dRyPwb9TWst5xHAqqm49ihU1ZNHdBIvH4Zhu90KfHdz49om1XY83E1HYDduhkbESUiBaPss+yqFfB5MtVMSBct5MGatsWag1s0siwR9SMiTCTFxbJhgM+0cWMHlRvBlftRCHcbhSuxOc9XNfn99c0PCs7U9cXOUqkS03++b+jzPp3muVWutIEGkCn1culvC1YsvdiAITtTdc86BRVxLvLENJDURGcbx6urKrAWU1LUum3whKiDKiYNVwR1L4UwMZBz2p5M5mTfve898IaF5bLGXjoH1CTpfWCjCAi2HBaquauxqcMliRIAyc4IIWL252Xa/adWcqbmXyR4OpRnlnN7fHb/99rvf/+7LL7/89s3rd/M8Hw7HN2/e2EiqDuKcx8RjJZTS5jJLTkGCESloVMKIqOn54UuOtwa4l0bgA7Pj5w6JrX9e2u1l667dHvLOahvqwQ09jVdipCSbzWa33zBzHDN2SKj4CDnR9dX1ixcvfvfFb9++fcvM45hTT7Ta+olxaoIsYE2WwhfDfGXCwuOoSIQpUasWX5Wi4AE1N19aPUYQYSZKvQRGHKVEDrpeJk6RQZWuXRa0tv1TDE7mQIhNetBvBS9PrfXNmzf394c//dM//cPX39wdDvvrbVOvRaPXLaBiFnxjfiE6soaztpBtrC5yvWuXTmT9pS/FpvUmrkbezCQHNZeRn6EoZnEf1+jw/La8aNW6e0QXka6bNWKuSl4aEjFomsp3L1//t7+anr148dGLj1LetaLMvtvkYRim4xz85AQmOIyNzJ1i5fp39qDddqhZbdWdjR3cnI2EIA30/kGbmnl2zuDMlJqh1la1BsgT6Oo8ocYUO3+NkdbEYN3J65LGla4B91KNwgdrq1GRfryqnSKOz3KRS4aAFatFj/P56HfRUuloTdeEMCqqThrZkZmbqimm2l69evX73//+5bffnaZ71Ho8jK71+ZN9SmPilIfNprWIS6XfRGcQCTUtZEJkLKP0kpC7O3h2SW4soKDOS+TMFPV0F4aQUQixllJ0lS3c7XZEFC1BVZ3nWVvnzI6tF0AMAEmGYcillC+//PKf//nX19e3t7e3K4wwbHvOycxqVfMV3fahLbq0TuuSCj8qiKxnZzVxl0bbHhfZQX7GFdKZFW+pPZmZyYU+04fR6fL7NWsVEbLOTBBfblE/MCGeSiFVIYzbUZhaLa02+ExSr5AcTJSNgqiS4R5qBL3S5g4zwMoCdlivLvakLvp0l/aZwB0v/ljq4/ISLg1mRFOXdp4vRtmXledL01RKXVLH3iS73O3r09bPSp9++iIO5JgHFjKzRJyztNZ225GIDps8T8f1dM2VHh4eTqcTM6ec0YcPzUrHXDmxuVd1WlQiiCiMNUmMlHk1FRFJIA9UXiCzHWDoo4TkcgU/2HDLVlw3BK8WkxZ6MT4TsC4dyD+G+fzA717a9NWr0cUNji9gi8O5fHmfeIx9v2D7OlUREGgqAKWUMOJirM2zpNPp9PNf/Pw//k//bsb8v/7X/+3d9JCf7Ma8nY7zw3QamDEO94c6q0nxrUvOo3SZQCNv6kreyGHeY8ecx1Gk1ClgHcJAlpSStFbUYvRymb8Hen8BIUidmBufT2k3vpe1jfMadkZmIqGFFcbdzRqpQwSAkFPXDDMBuIsURZsLiLiSkJhJaFg468iRcx7TSAA78iAyknEzprQfrj5OM1OhGZwGToNyTuOQd/lKANSqKR0XTZjuZmixcbTUVIZhiOgniJY0CrHCIjLVMvBAzNxaBGtE58xtrazE2TGz3W43SBrzICJkHvUlM2Oy6Hlu8pgSE0yJU6PiC6GC8RpCwbvCeLcJi8PA4ji74fNzAUmbtWYWzRAjAqIjREQcZDEORvCAQ0DV+ilAdHgRLe6AALT37+5LacMwXF/f3ux3tUzb7Xh9fW3+wAOu97uAaBmMqTcbzKn7tvNpcJzvLYxcOi8+cZC1CsNjEDwZu4yUN1tjmVozV/fuaJvpXFXVHByFFtNHyMAfntxLxxA/77ZX8zx//fXXn3766el0inrpdrsNAXFgQa4CIWaT8xjne0Vvxmf1emEeOrcKeBhSzvnrr7+eDg/l/o5qzUxTU6lz3mynaR4JoSqoaqqdJjTEhS/d1Wq10kpNRimAye7kAXujcwWdiHKwOiHIJyhKyu7EzM1rfAxczE2JzLy6S07NkDbbm2fPr5884zzcH053x2PO42azubu/v7u7f/Pu7eFwaM3ykNZrB6I9Yw1go5h4WuemVvuAC7ZbYcGi9p6ZzazUybwxSET2+y3znoHWqqqKSNMyz7O6HY8nZo45SQDOzhDm6P2B/n98/VmzLMmRJojpYmbuEXGWu+SGTABZBaBQ1Wj0kM2RaU6PjAgfKEKKzF/lEx9ahBQZTld3s5tV3VN7AYWtcs+733u2iPDFTFX5oOYWfk6iKgRIOTeOnwh3MzVdP/3UULVkASozhAA1ckYH0zUBaLlhVRVx4IGK1GQQUQQKht5RW0d9otlyajyWYBAAUXEoMwBDmDK+fnvz+RffPHvxdhzKcZifP3/98tWbw+E4jRkRU4yxfyxhRLK56DCJ6kGUTBGISCtWohL6A4gZmPlkLRd4N5fNfVltwckSfdd4rS/DlYr+vZetlEulv1OThLzp+92233Q9q0fNLQsgiEBEH374QYzx9evX+/1+t6l0LyKyS2mxswpAqCc4tJmpFTAkQMUq2zGxKS7PWFSBCBADsSuQWidsTxFC7TZwUmhEZEZgQrKTASE0BYetQEP4Y/sYwBYW15wCMBDUIUxRFV68ePHTP/7jjz/++Pa3v12yt5pS6vuUUig+GhXVFHmpeDTvojqsK55VWPLRD7TTd7dsvbn+CeSTWqxmttqmE6Hdyxr4DTC0QplankvJwimm4GaXEVWLCJixWc7H4/GbV/Nf/s0vH7/3+I9+/H0Ms2YFojzNkbiWgrypwVWIedbJPAh0Bk8UNTUCdASvEgpGAxYFETzMgJCQEmJCTgBsMpdS1DxuklKKlhkMGZgoCObmbq21ui0hn79pSxTt89Ob2kHElj9qf/hAFWhD+7dmpVX5pcEZ/JWztLi0bbT/+HA3jcBIVVLqzeztu+vPv/r6q6++ur66yWVOTIHDnPX125thOG438eJse3lxTmNZ6IP9YZ1OF7vNmRa/VWn89YgYoqM2BMliCIwZQRiw6xMiCnIGnEopxdSACC4vH7up2u12tzf7Fy9eTNNUisiJ3IWYCIzcM8k5d11XizfFPvvHL/pu+/N/9bNHjx41zsUG8TNTwBPacy3A7Z8tWlt+u5DN+Lnwc4h1JqR740QISFbUVD2ADCFwoHXFbJEEMznh1UUEQBuU16/0eKwC2peSY7MIjAYi6l0fjGLqHWths/GeQ1PJWRQh52yqaHQ3ZOS02/QhWJHiVSsnv2gOcpOWVchzL1r+bv5CVU9zG5tMLZXPVjdaB8lm5vD15jm0Ou1qBWxREafVaLporXBwocZpesbXYVNqYcKpvbhLMaV0d3udMxFaROCuY+YYAoAeMvd9P45jKbMukOtcLOdc53EFEjQDdfIDOk2fBET12V61NUtQTcdpLnNGj7/BPJfzACCHp25IlwxA/2giq3BTXMMrAI2QcBmKYmYG2rjXmln9Z/T1aeG8knJ/HZnZCNeF7PZfF9NWEvStTSktGakFuini2ReC7ubd9dkH548ePQohbM53f/pn/+XtdLN9//xqPExz2W43Sbrp9rqIbfvN3bUIzpusZxARi5tFUSM0Js9WqBmZOr8qpxCX6re4NSVKVIoFLAqliBQsYEamS9HJFzwAqWk5SXUrNz0siJ/0uBTHgAAAgQKwSSEiRk+pGIIxnnIhhECEDGhEBAiskTiEAGqqyowxcoCAIMyUInKkzIiJonXv//DD18dxHmYw6vvUlxQlmlHOAgCtPFi5pGsFgzwILyXbgiVw+Z/nOcvilmGlGG17mlc9P7PMUPHMFennr5RSH9Om65kZRMtSolSZAYAWyJZ5qiIExq3kOU+jy4uq1pHyi6vhPzBzCLa0IwMjRA4ckAFRhcxbuuvUhsDewMPZIHGIxKrZTwEZOD4HCcAMgT0bX+evG3Bg95AuLx598MFHv3v3tnTKHIkhRh7HY9qeXT46nxU5YDSanTYNvPDQJn1jm/DmEuGR4anoEwMSAJKgWSCOoaeIiLGLSphzEc1m6gVeZEIgRXJgtohlKYHjMlkUCBkXJUvUMkSntKKZjTiZ2RdffNHS52bmeKIQgpv5FDs/lcxRlgYGcPC91qNdshYuqorAAGJm81xub/e/+vxzmcZY8vsX508eP7F5PO7309s3KfWFKFhQM0UoKowAALPMzIyBUdFUPerwkqMBoZPU46k4GTh4c2lT3KiiPsxVnWsCEdEWtj0C9HxY7aE0UMACME6571gMvn3+/HKSH/7oEonvjocYI1MQ0WE4vHj+6nAYtB5tpwUystpYrg5mlcrzvMqsE7r5XPVGNl+5yOwKwRtaGGGBUnMIXEqhTccBj8djdRYFmpX1FmcARY/f0DupRBVFcq1Q14nm98rp6wo/IhIGR+FW+RTv7PRdprpilTOwfo4sCZ1hnPu+z0av3lz/9d/8w1/99S+/+fbVOKtpOBxH5rDbXvQ7cyoYMNjf3VDgwDHEYOZjhEGBiqkhAHknGHlLr5m5V40rFElTrQ/st78qPf2igZvu5dUA4tVBAFwmWwLQAr5xR8EFyAi567rNZtN1XQghAJgUt6eqCqrMHEP8wQ9+MAzH6+vrxBR8TJGoiVL03IG6lpM1hA/E5yeufQ63xX6ntVTCzBxr5Uc93FpVoQlFCykABFygP2YCHJYkugf8Jx6Lmp8gYkJmDByI28RGD/0BEYGYzLrUEdHz589/+sf/4oc//OFnX345jqMohBBihE3Xb7dbEZsnVfFRg+nBCj/4odrKlbv8wM1oN7nevhYNKsiD65dPILD6sK1KCQAmQJVn1UopWUuZVGbq+55S6IiNCxFpkSxilq8P9l//6hex7/o+ffjeOVssRVBKnzpXekC4zKsEgIrsaLdBBqZGBhQCAIuxWhSI2XAqkEWBtwCkEMzIihn6VBn1qZOeO865qCCDEpng5Ad/HdG19VGfX7Ja7RbRNZfGU3rrckeFKDu8s1EhAHA4FSE9qMBV1Qaqe+aZixrRnLQuoqNImpvub6bUl1Kur28/+/zz3/72H1+9eetOxTDnTYpmwkw73mLcjBnL7fGDzYXrSR/eE5bWd2bO85inKecsSwtDjHEXOKVkIGglMZgiysSEXfLGQlTDFFgohL6DkC63n5RSPN1pis+ePfPQyUmMHOVhSsywpFeiJ0kd8HJzc/frX/+667of/ehHT58+3Ww2TNAmZ4bQle90D62FfP3PtR5bW4Smr2Tp6jwd9mV/uaLB7xVmPHh2z9kLKqoKoHV+2CoXg4gtdbIy6Is9wkAUbCGJcJ9wnmfuExFLlv1xJFBGSqkz6w6jIZfY9RSDFgGBQEAhGGT0RIkzriL4rsGKfbRKJtcpprgKvQQMTCMvaZfFeC2iZT7loIFomldzknNPzi9FSFthTRsULoTYFI6tAp95ntc72IQ8jMUAGAhU6+RLIxaRi0eXWkTK7LhlBFUtIgJoqSPAMA1lmjKYEJqo5JynaXCkIgColhACEZhJDF2NlYqZGaEFw2HKMTGqs0WLj8NRRz9pG1pRn0RqT6rXsqhh6FUd+larBmbLOJjvCCXictlJvL6rcwFWNri91pF9VQ0LTqmtY11TrLTy5L31RM01CSG4BLsi8wQGEYWQRNWKlTn/42efvR5e/M1nf/Xkk0dXwy2kQMjHaWIM5++9dyzx1dcvvzefFYCzac65gAihBQaU0uo9iBhCEICsKmUmFjRD82gYFiwCew7Slv5xIDMkxUpr2zRyQAIkWckNuIb1HTUDU0BDAu+ORTNCMMBlfkulWo3oHDbGPkJ9Af9F59M3ISJUZOYU6wCxyJwiE8AmdSFKjGQkGIIFyCibp2cXF50ex+F2KGM2C2CkBQ63twAwz/M4jmaWUu8WJYRgQMM0znkmIlIR06LCSKo1AvG6BM8MTGamrbdnKduZmdMoA4DzQ3KFg0ZeWEaY2RagGgBIUTAs6PNbndLFENRWScpSiuSiUU0D3i9w4RIYMBIjAHOIFGlB9lfZYwAKSEYckMwMRcgUDVgBEcgADMjbFrRWxBAroI4QwSDPM0FtHN3tzsdxvjyHTb87Ho9Pnjx59vzl8Xic57FAjEzjcUYmMPAYxpzpHdGMyPlrEMGRaVWNAQZCJkVQAy1ZvUuFQgidqhSDUkoutT27mM0izMTM3qpogIKmcHKF1wdwrenW7wPAMAwppVev37qRMzPkmPOcUmIkAxERjgEA5jyllFQMuW0BNg5dRPXENHFUhZzz9fXtb37zm9dv3nYpnqfAm83u8SXm7bXIdBwi06xSig9eATFANCJcpsUtgAUmwhAAmGPVM7XGUhv6kQEMBY0MmJkdcGrihTsz87aT08BH8sowATIiI6BPUihgMfWvr67/85//Nwz9/+3//r/8/F//a0MyUTEtRYdxfvfuehxH8Gl+y2xqRXM0+aIMPWjx/atL7dbdFryoh9a+HVM++gVdjABa1MS0lDIzdiGaWYwxdR14n17fHw+jN3iUPFoVcZ+OLeCtfeitqiQ+PEWdfsN91dM5bf/1qAPRTQOqIQGa+vwGVPW2DKUAqKbgnapFhcyAKOwuHu/vjp9/+eXf/t0v//Zvf/vsxVvA1G8up0mASREPWUopqoUZEYBTj4himGeVarfIUIgCAvo4IEQTU6cHIAi4IjFfC/MDO/XAZj245p/5b9umBZXhHgma1GJv3/d9nyKxSeGQlMjACNQZ5FNKm65/8uTJN19+td/fnp2dIRTP6y0ztxQAeJWFMbMQiQR1mXPYKgZ1SRYmG0QkCkR18LihwTIgql5vBcAR5u7lSAiBOCGiohFam4FWtztEVAcVmGM9mJkYGumf6z0jJKQQQkp9znl/d3z27Nn3P/nBxcXF67dXIXYq6t+16fppyjkPJtoi6gf70vyt9TbZKk/fNgtP0cXa04Wm6uU0juLeqz6hAjEhUFtsAAJFvYfrtmIlRlNVdoJFJPPhEWpDgSGPf/er311env+bf/0n3//wcYrGIaEUUzUU508kA1WsToU5aR3SYnEIUYEQAlAqGsYCQ4FJrCgCBqIghrmI2QRENRpEcUydt+2BgRYoORec3Ph2Xdd8j7akLgMtNmhuSaulu3hjTectOD2rjrW7WO0TFlVw8tl8+vn6sPg7bYub29NqLMwRgHw8QAih5Pnly5eff/7lV19/e3t7619RSgHAYlBy6bru8tF773/wdB6H/f72xkhEyGALfMYdh/pdh7tDmad5nr2jgRE3G9rF/hwDU+IACCWRgQCR9JEigKgCwSZSpBSxzxiB+3l3RkTDcRrHEQBENITAFN2miEgpKkv4gcgp+RgDbNnSd++u/+Effn04DB9//PGHH76/3W77vq9ruwTJquoBUBNUESG611i0lvzvFs2awNuqERHAe1XvhYiL3si+yx7utoAwcrVG7UyJGSxb73vRnKhSipEkY0RTVfcSPRs7z3MgNpCSC6ohWCQgNrAgUtRyiDMiRgxAonXk8un2zMzZGdaPU3+FsPhpC6aAEBXZzHu+1iqlaYOF2QgfGAVbajDLm3UZV72dtF7kdj+ymoRhdmKCbevm/w2v3l05txi6u65Q5llzjiHMw1Elb/q02+0CRRExKGUYiAikzNMwDgNWqi4b8jgMuRToOvAZd13XBaYQCAMzUVGyImYGjEicRYMRhRjSBqiAkRmL5pAiCoUQHJJmZmogasxsIIBs6H1TWsRyKcQJACrwi8xZod1lATVdgU8qn8/Kqj4wsb/3td6JKrVMhCc0SP0tVgK4GCunZd/3ALCeFwn3oYbLfeLTp++/unn+8uX88U8/evbqxYcff++Ix+2ji/10FNXNbhe1m27LUbNs4zCAgc1m2RRVO4JABEhFxOevUYzEAQHKOJdcTAVWDdaIKObmFcHjbkRgWhARgYhLKbNkrfz7pKpkEImXyewPFMEM6qwfwZaTXI+iiKqhGlMFC5uPBFhyRd5RTQBmlJgExElrPfhhJmYkob7vu1SYy4yFOBjCIU8aYPfoAnI323Q43mHWRD2F2MdeljEbIgYwu9I5jkOrEPpDDcNxmkYTQ0Sgk7aapSBijNFBCaW1OXlJnYOtmnGbskspnY5frchKO4euvAgwEBABE43HI3logGqGnqm61xnsZE6LHCLWdXP0oEerImXTn6vO7KkScDSplDmbeX1XUI3BgMBp98rCWdy20EwYIYQYkVX1eDzGGM/Pz3/4w08fP3509fb5T//4XxyH6bMvv5Jcuk1/eb67O8zg8wPNDAgNjAjsxIYCAIBa9SMYAhihIqihaAZRBQNGJZzF2yZ9PUtIkQhmNQiROCp5HhEQgYhjPE3IXY6Pr4Q3l5sZIKITujqtdaK02WyOx+OXX33z8ccfq6azs7NJCmICgHnK86wxsoJJWcwkeBB3ckfsNMeyiv005devX3/22ReaYur6kEJRyQa7bX/53ntQZJ5nySWrBgQOwTSLnZwbMXDPL3KIMVrqrSzUarRos8Vl8bbeFJiIYgygBQFDYFeNBmROkVFLhazg7VWRiMUIFEDJWDH2geM068vn3/zNL3959ujpXHIX2LKnqHQ/HPN8gtksxxwVjc07OlalvzY+B9GtgNXmWxYxRG1FDz8UcylBoX4C4TRk7RSkAPQ55yJydn5OzKnbzHMuRaYsy6grQzIVQarMFl75NFAv43oFuu7NIoEOr1MTlVZPQCmWUo80hpD4PpmEx4KO6kQzFUMIzPjy7f4ff/f5X/zl3/z6t1/e7SeK2yJ0dXsgTILEzAIqpBQI0MZpImIENAMFNQwOR3X0hSfjkAhQQWp+Oiw0Y2bmWgtPiNyHsQesePMeGPL1ZWsVbUuBrspyy6DnYqZIwIydtzejlSKAAR3YRR7qY9fF7XYbQnj9+vUwDH3fz9PBQMjhSIDO/OC0nvUOUYlCu6NFksUMHYdWjam6a5LdmamzRKhWFR0v4fo2hMCM3p/MAVNKZgoOBDLvcK9UK5FYGzCtFR+c3aYt2mpNYozjPAPAy5evfvon/+LR5ZM3765T6vfDrVT2I4ocCBDUaEkEn7T0d7appY/XL1vFeLjqiHvgHyMuhF+rssmyegqry/zbVGpX80Lrhug93QDDPHktMRBjMLFqqu6G6fHFxevXd3/253+1TfGs/3m47IJpAHDmIyLPu/u0vVp2ptqRW916AhxnAyalkA3HgsOMs6IhiQkzIWJphQ5Uw4JoBsWgMAHFGBAmEdEiUHGwjV7hvvSuH9n8XPt5aUkof78sPPteIbQlmet0Weszggtlt5mVYiehBUBE5xtr7zS11kBDXMkXKz7o5Yu3L168ePbs2fX1NQB2XefpQkRj4WnOpdy8ub49f/QodtuLEL98/nqeZ1Xt+77rOgAt09zyaOApETMi2gmcQfjgDEqCAIEZAwlhZJY+YqQiYEYMKZaQJOOU8zxlTbrd9iGIiIzj5KW/BRznd25zziIncfWkDzOLKCKJyMuXL6+vr9+9e3d9/f2PPvroww8/9A7weZ43Z5vF87lHpCcizPpd4W8VEVi50HqfRqg5S7C45w/8csQlgFHTXGTOMmdx7IlEKVb4hC4uWQFq66NzT9pCIV5K2ZxHt8IKmmdxjqIFLmTTlKWUxAGQxpynUjYBS4FccoxzioE3FDECmGpmPAknETkjQ3P4XUqBTgy3p2hwMZcIpEXuP+m9ovRaCfifi95rL2zL6O1RzHzqkEJExJxrr7jex6zW/r5FFZ/ef/HmXYyxCxHR+q67PDuTMr958yowas5g8vTR4xA77BMQ9buddwZHTrvNUwCY59nneneRu8g5Zyffe/v25nY4nu82KW299GJmznnbhYgcKUTiaKC5aM5GiKoyTkUAQ0gOmzaTaZoRzRkgNpudiOQsMXSqOsps5tVahKUxIzBnMxFtdRVc3P0qfwv+fK1z2x7o0iAOLV8L94vXvI7OaygIAEzkKRZceKsauQIv1L0tY+Eawb+CQzjcHs6252PeH/b7tI1X+2t+EscycgzMAbMVzRQQdz3OudxRCfz2cLi83KU+EtGu727HkQBDCKhQzEwUXWNmVS0EFZegCKbkUWvJ6rwtRMoKDBiQJJoUc1xhjSEJzRgguaeKiGpWnBlhIdV1XTnPI/hkHg66DEIJTERkanOeluqxqWoK5JNJJReOfLbZlFJiqC4tkMbEXYjMRJwCAoHN0xB3qZi9fPlyBEyXZzc4Q6c/+tkP/urtX0tJSHC4G0IPMXaPHvXb7Zkz3QMABSZvB0f0kI+IKDAg8gI1rEX8RR+VpdJLp8HxoKpGy4PjCQDsWb1R1ERTSng/SxRC6LuOCMkpxaRIKSEERpjHCdG22y0zmllKabfb3dzempljIGtMagawpaUJntCymORMgMyYLE5xosEkz8rMHPvEOUPHpEXRLLInjYTJu04NzMAW34UQVcs0YcCY6Or6HapcXFz8wR/8wXA4OK35D37wA+AQ+v6bZ68O+7s+YVYroCBgUJBDpKAIIuKFQzdqgMIcA1EEujg7z6UgWc55KhOQKYKojsX1OAGDCQxTNhAiQqKx1P4lQy0i7r1JPjHurA9mC87XOhoRQWEYphDIzL74/Kt/+fN/EWM8HiEyE6XdbjcO9RnFNIQ4DnsXVC+lNz9js9n6Lg/jfH5+fnV184u//2UMAbtUSoEuQggUIgQueIxdTCGO4xgBtczFNBCXedLAbCilDgJyYfNwFwN3ISLiPM8mJ79HFYpKF1MtuYtyYFrmFxs4ntZjbkRACV0pRQtyJINQAIphAXz8/kcffvLJN89fXe8PFOI4jq/fvkmptxSD2G57/vLVu9ub/X5/SH03l6LqozJcwouokhEHIGq37WDXddhc5xPiwmetqhRCQ+8gIpJF50ZA2O/3McYyHGXOAHB9fR1CAOC+7z/55JP3PpD9fn9zu7+7O0zT2HU9euNALkSYQkBAEQmEAJpSD2Vwwti1+0hEiKCqUpSZt9utx42+lYlDjB0IAJBIcX7sru+1JIQQiW9u3/2//v2f//pXv3327KVZKsrTXNTYiIsJAIhlMEWSYgqgmCwPwhQNQcRUhSwwE4XAbtHJSim5TCKCRCFF0hoKrs1QWYjUcUmu2ao3ci3na+/hnwhLWsSirXPbLyilIFgXY9/3MUZEQDRCQ8Tbu33fd0QwSz4Y/B/+1f9RJF/fvAshgGYmBHHE+D3fvR46MlACrU/BC0UzVTgxuvlDREMVKWC0cNu2e6ZmWQzB3TU0H0YfYgwAZiAElXEI65AnVi15mgGA2AnMEBUVFQGYWQXQLMZo3lgqGZHB8Ozs/Ouvv45dd3V19S9/9rOvvn02TZMViTGKWInx7Gw7juM4joFTLqW5j7YQ161/hnu4r8qf0fyN5raKnGA3zQCZWYidZw+dn0xXM8SsNjKL3UtSZ0REPmlCLwfNeRyLEFEK0SwmjhSDmfWb82Ge99OQ5+lv/v53F9vtn/z4B+8/2hQopnq22R6nkZA3u7N8HMkgECtlMwNDAZWiAqiGGPusPGY9FhoLFgjGbEAu9gCgUEt1hkW1MJlBBjBEAhJADoGYu0nyMAwu/81RdkfC/YrWYbs+IMuO1wyUB3K4ZFLyXNoHVjS+Hw2pmHaPomOMFfW28O01YV6/iMg/IefcYF8xxmmaXr58+Ytf/MPLly+vrq4AGRHHcTRDbxEysxg7M3n9+m2M8enTxymls4snX3755X6/9wwyAATmUmbndKxRBAITD7Pl2+HNPLxPvN1uExvp1EXrCAPOViyFYETFLBBtt30+ymE8XL15s91uHz9+nHO5vr4mIqCw3W6nKbch3tsldBnHcbs59yExXqddFk1Ehq+++ur6+vrNmzd3d3c/+MEPHj165PhYV55dt/HC2jiO7USs9dham/mGtiJkUxcN9+vy7/UrX2T/FlUNgSpTaC7jNM3zLJqdWbuUImC0+N7tcCFiSqmdFD9HDoUIgVVhnov63aqB2XA4MrOzChcfTVEbaMWKp1v1MMybPmz7LcWIJpInUHHq4lNOEQCW9gpZMQOrQt/3VttD1+CCE6zggcJvPmd7qHVs4uvWllpVu65b6kwPI0mPjdc5x/YVuAKnAAAzB8B4HPJeB0a6uMR+I6XoMBbTAiocMBsIhmwkcx7zcNalhrsFwq6LzLvNpnv06FFKyQPuEMJ2uz0ej5vNRj1BpOojnkRtmovhEBJnMSLk2HnSX0QMOeeiJl4bzEWLCjMbUkiROKIj44jmOQNRiJ2TZLRjrGKBAizgt2ogxU6IyvsJpwev9VL6Cvr4hLp/C//ESaZ9ABxzy+n6jaxNgl/sB+a7hqHWT0QAbH+zv3h8aYnnkjGRqlgWnQDECBi2FGk7XA2Hcd/tnoxasvFZSOM4M6ecJ2YAJlDzagsTdJGzcoOTMSCST5qAwG4xgSAUdCiIkkKx3LRt1aGAIYR1ZjouiUNFWPWMLV+jYmL+CT7VV1BxwcoHOhG6EpgRMCCgMoFH7kQYEAMzM4XADBHARIt325RSjof5qkz95c4Y+vNus0mf/vSTb//2xd14e7l5TzVP0+QnZ7fb9X0/z/NcasHTzMACrGRAEXWZDOZHgpipoj6cqfyehLRzWKml1pAhqp4xITSyZ6lUTJm8fxq9DaNIzkKkFbRrZmQLAmGZ5NaFFF0nOsNvSSlGrmhDAyIK5NztyggxEIrPORQAEJQAyibOrdpSRCGEE7ywQmEV0bpNsjJfnO20yDbx2dnZF198cfX27ZyPv/rVr/aHQZEuuv6Tjz8e5vzFl9/kSULAGIMiqiGQkaGzhrAn+QmRAjNHThEJicxw9kYcBFEwp0dlUkc+gG8/IgQkp60H79azFcjeOzx1NRySvtMwvT6h0zCklBD5eDy+u77a7/ePHz+OMZZ5csPjEBQ3xiEEHzTUEpa17GZ2PB7NkNmKmCrc3d1VK2CIiJvN5vz8vNts+o4dESR1soGJI/CQISgEslkcdSla9xGW3iAiqj20odI/ISKGuDQ/1jhw4cdiq2QlpgY1l2E4ZmDuOEYMCTklDpEihC5tz774+tnf//I3qd88vXz0+Ml7OWeO3TjOjx5t3169+/rrr8dx6rcbIirLNCMoXmcFM2Ux1aWEUXmJTgpzHZC3dSEiMVBAWDCtKjBbCapEdWIBQCimDM3BFdeWbkSQQt/3pej19bWqqAr4ZJVWmSTVAv5/W6hZ9cTSqd70jShFZJom1QRLuGrm5pkQoYiFkCBGNc4F+m7z9t31n/7pf/nbv/38xavXw1BiSgZYAAwJzEqFoCiSmBQEBe8XxF7BHJIKQEAOkUePbRwoDkZE5mTx4AbiPtFFE2bzvpka/C+Qz/sVpHva7P7cLau9KABAnrqsH26gWlbBuSF4WwFO08TsHm3p+k4Vu6579OTy5etXwzBUsvUYuy50XUzh5IK5dy6avfDjABlVdT+JDJgocCimMQZHX+OSLcVlnq3eg6Kp+WweQNBaCCfyoSPKTo2xDDJspz4w44KnqE+3zMJxeWFEIII6d5k2m40ibLfbt2+uvvjiq/c//OBid/bNsxcxJnOouCkhRg6B0LxZ//cVZpsVWJ66/nbtt7V0/ml/VwF8PTurDV1fqY7gxOpgNHySp2DAtM6DbdlqYGADgyxFVSVIH1NKqRARdEplGPM/fvZ1x4QGm5/95NGjnebjfhxNJfSpNLZ6cLCp+Bw2ZFZjUQRIBbkgF4NZcFQVIEAyyI4sdBMJ5kAdyZpFsiqgERgbGjGpOtlQBU/JqvW3rOZ0NwHzF69eOWcXea9x1YOmsF7b1Qad1hNXdUJcJVPad60vEJEQks+v8nNdSn758vVvf/vbV69e7fd7KcYREDFx8hClWRA12B/Hl6/fHsc5RNr0lzKLFfMnd4FGCyYkAibGDF2IbIiKMssBla6PyPT0fLtNQa0WEmOMRCjgU6M1q8y5lKKH+YCIAPj27VtVPT+7NELm2HWbyg2j6OOPmRmRzs/P/RmH8eCRudtTD6hubm6GYbi5ubm6uvr000/ff//9fucVn+SwJvt9A7GaFgI49Za3NXdR58XLathIWtqsmurzrahxjjRMqZGiWe2LwNX8vbU9qgwOSw2NGlxF0dAAQMW71U5TGVQV60HjhZJAhpwDIQBNs45THucSyUAzY0VrAgACLFQplH006wnadZIogXWr+0Oo3QOJXeuNtqqqSljRajV/AbXAxctoxybJ/gMRr+3C+otO7stK/kPqNkUkz5pN+Dh2XYfqJQlvN0ABKADBcCwiIjhPOWcnhWPmEJxk7JSaHecZiPrt1pCLAgbyc0YBEqe692zEXTFFBUM2gpy1GBmFohkZzR8DyZCRI8cOEZfZW96aIuNc1EAd8le1ZK3MrKWzvvSkQdeb0fTyA+MKrWYIoqbF1Ns3HSzq6SUDcOITbyGrGVxCAfM/MSfVQDBTJlQ1ARPwgVh1HjEwUSCxzIA3727Pvn/ebdNgM4KZFFCLiBQACbrU9492+dl8dXfYzOn8GLZMFylNx2mbWIoyBgzknD2ubSKTEi6yjs7NQOZrhu6mESObIaMiKxlNSozE3M4qEUVmH6VlZrgAycy7+6We87bmZuZmu8m3H10/6NUDMAAVRIzEHJAAgZGJiSqxeIjeKk+BO8IZDYghceACwzDeHsen55faDdfz1d27d5/84feGl+Pxm3w73GyZGjzGXXwKHEqtvRyGIU+TSwgFRsTAtW9BnF1apIFaFWGZC71O6jQg1En7+Ouk6az2RptZUdFsC2TU2GcuqyBo9E0gygveXcHGcczzLGIhuOKq0DJfWo+4IrNxJlAzy+MAAAjKBKqCRmTGhh2RJSbskWs2CACIY8sPea2bCBIHDihlIpUffvyRSBbVy/Ndnqbz8/PjgIfDIGJpEw+HA1D4wcefGNDNfh+6nmOnhsVU1EexAxGY2WxSFJztlhAZWMb5OI3mbrRZtuzcJ42FyFM2S14NKDTkQ2WctmXawD+jRtuxhcUtazbGDO/u7r7+6tu+7y8vz6VoShRj5JCIBiJCjJ4m1Irx8JFQdXOHYWCOCPWurq9usCLyIRBfXlw8ffp0d77rQ2WyJcUgoEUMgVTMdJ6pFOuBiAIgSimgpYV5uHJTyIFeiOCJVcIFfQBeYIcFmOBwyaIg3viIuB9ls9vs0jmlJIACLIgq+OVnX3759bev37z9gz/40e7s4vz8PGfpVedc9vv91199883X347j2PXbIk5ZFqoitBqGOA+KA6qr2V6UGAIWmYmIVg6B/7NtIhDV4ySQ0YKJqKFo7f8nJLViollCCAoUQug2feq7czk3s82mOxwOd3d3niGeRfuYui5Kmd0ThwWM9CBCaE6Cs8iqkkEWyadiixtghRi6SRSAjbtvnr/7i7/4i//tP/zZiyNOU6YQ1GguKgYK2QFTAAqgIAZYwMoydGELgGKV371VkBRMpSxJXCAMnv2wZcWav/tAjG1ppqpiv1jx9ZXwndfpFIDVpINb/bou3qTtuScm8BYsRDLIZgB9irP4bEx98uTR5eXlb371W8e0B8IAFAMwOitvPMU5qIgYiC1W92t52wAb+1/1XTzyX0pqRRQdQbpeNGtlQ1RECoiBuHJWg5mhiWoRXeYWgIGzgDJAIPIeQ7d/IZI3NiASYTAkFVBV6vqcc+o2N7evX7x48Ud//NMf/vCHL1+/5RjVQTc4A1jqgue7m3PxQPk0R+LBm7Lq1Wn2sWWr17rLf6v/BOOdKvCS/GqeHJxSMCdl2O6t7ouox4SIaIQcAiHG0GnGl29uyvy7lNJ22//LP/nDvuvm44wEvRfWiBmQAHwJsgphCqEDxaw4z1owZqWsWAyKgAIimZqTeJNHqur63aSUYqCgAj6GFZ0qlkgr+FNE3IdcxEPXT9HWzaWIVkgQWyYGLQulCxy0Mso0ZBDpElqv3L/mNFbWq+Xbm0FX1TVvDRHN8/zy5cvf/va3v/3tb3XKIoJMRBxCoBiYGdjrjQoAgIZoOcvhcIgx4sg0a2eM2RAhRgyKpBjFd1xi4hQ5cjRVEdmDTOVQAGOMKfbMqiZGCFgMsCgWhUHKXsaru+ndfriZKYSYc7m5uUHg3W5TTBH5/Px8oT5hBZvnzBxS6pzLEAAOx8jMwzC4xHoZ30zneR6G4e7u7u7u7uOPP/7w4/ffe++93W4HUIkemzw3n6dJLyKG0Ma0gJmUIiGEOpg5eC9fUa26FPEUnDdXQVWt1EY7JAvEhazVdfE+LTOvWGRlGabte9qAe6SEiDmXPM81MAYwn3LnhWKjSlZnNGWjFAx0LnmYcs65JAqe6wGjkyyBISOiqdCCdLBTioGzPoSGmtUWq+/qEF3BO/8p9d5SaQ/ebziL5v+s8SarwhV+VwWpaiCClBIubU6u4re73TyNWkzMprnM88zMTq8wzs6/IWYmxoYWAiPglCXbMBzHeZ43xVJKgCQiZfZZzwAQDAyA/TZuD8ecJ5FsZqXoNGUVZOYpz5vNxhtsDofjPE8pJQPabDbiXVUUcsnHYRrGmRcHd32wAQCtBh4PFPRpJ74Tnbc1av/0v3JyBV+4lsYDAK4cTdz4YV1AK///ijfc/9bvrVWxm9ArGAOaGBDeXt2+N7zfPd5MIPM8QbFouEkdGBUBi0h9SB+c2+Hty8NVT/q464bcRUAxZtpgdfGLKxvPPzPUMRhWt0HUPXcgBe9vaTzCXgwkMwNRNSXTUuvg6K19AKDLoHl0jtCyuM2LmlYBRR8iVemGqSoEZHDGCzNTM47ElCAQEwFhjQZ9YnVkCoGYCZdxPRGZkSxrOdp0LOfnj/7t//X/8mp4/ld/8WcwF+ph0iHQpuI6AgOAw0hil/q+V1UKAZlDCHPJJ11Gplbnc/mOiMgsJcbYhpg9sM1VTu7T1aqqgDhkggzyPHuDeEuUgBqgMSATMFiMUXwCoNlcMhEl7H1+Y1OyLnLeat91XeTgJAcXFxelT33f96nruo1HknmXS5Y66h2AY5pyNkOOgZmLmtXp54QVB6hFs5kFIg4YCLuAj87PxuPdtC9dDNN47FNXcqXVGed8c3crhmcXlzHQ+0+eYIxAPKtkqXQvRUVVpagoFBXRLMXADBUj0DxNpkrMwJbF1WGdTGxmdSyDgQ9WZGZAXGip62w0qxUk81zKUgZRW5gYEBFOtSY1s+126/RCXdeJ5G+efdv18ac//amfU/IC1JxzmdzfTSl54lDEAISgzYuHELwMYqWUm5sbAGCOSByIdv3m4uy862JiKqUrYlMpgoSJmZjAUGcZx6IazQLUfIoaukSaFHdzkJnAEM3qVAILlhC9ARcWkQPzJlJEQSwCPvETmIHpbPeEiAaD4Wa4PQ53x+E4TEOWl6/eKMKTx++9/8FHPhSTiMZxZOAvv/jqiy++mKZMFKZpUgQ/L95q3Er7ZqJooFDHNMBajbl2OVVIoDlV3JlqHQnkuVTQZZsEAIqoFDWuM6+goAqITWbmousRwAfbD8bDeHZ2djwex2GYpqlebwrA6DN+Ft1ORIhGhE5ktRx2ai2gqpVtXKt2wpQ6Drw/HkOMh9H+43/+3//jf/yPt7ejhvMQGQPnYlOeFYiZxYQMAL2tS6F2v3h/Wv2OlgUHALXiMOB2qBHRc40Op1z5QCer9F3vChF1Ueb/1MW4epkPXF3a59cvMyPiGGNkt18ChgjQd3HKMwD0KZVpVtRPP/0UAN6+fe1D55l93CwSoOQCjmRGNVNQQwPvVsg5g6O5kDDUIylL5gtrpOSph9NpBQBAICbCCppycj1UIDRiJgb0PisQAGx9rUTES4s1MTEBMwckdHghceSgjNGIiIECABUxERm1vvq+3+/381Q++uijzWYz5cJIXqkuRZ0q3NX7OuPc1M7ac2j/XLtc6y1ul7XNXVSZYQrr65dryaxCoZY/IbdalWXXbF2xB/D18SwSlVJUdBiGaZq2psEHv/YblHgc5XefPyOC2IU/+aMfUr9FmYspqHZdlFxCQlIARkA2YqUwKx1z3g+TBTYKBbgNRvTvVTUErxoFRDYrKqjFfDfMAEFdu+NSF/KjUbyTYqEpXq9tez3ofSIiopbyWyohjOsFb2srWhNGLXX7YGts6a3C1fgEAPI6ld/PPM8vX7z61T/8+osvvhyHqYsxxdqW1qXNZrNxez2O45zHUoqRD1ap0buO04ZDhxWhGpG96JEo+AVd6vrU1wIDyoxlFsFj2e5z33chdkQEMIvNZpZNR8GD6M1Urvbz9d00aDgej0TktP9d1wXAzWbT99sa6iDOpajWEuj5+XmIZGbV74pxmqZSyjiOALbIf7m9vf3qq6/2+/313bu7u7vvfe97Z2cXbc3XYfP6CBCRVz/8/bKacd0C/pV+Xu0X1iKvHw31HCUIVV5Er6ggIemCTdD7iIP1GWw3Q0R5FmAFgDxPZS6IRsQBSUQMRMCLisGIVCCr5FljhKKoUqbJSqmPqaII6lPCEOtwMENqBqgdXkQEJpMC9/M1UGFR6t5KO78GYAatdn3SK4hYQ+b6OADgBq5l/NcawF8uz3J/1GdLSOmqda4GkEjGBJRCikxoIYQucNj0KQYts0iOMS5gCzYik0iRIydwZrxQI87H753lWYDHTtUjTBIxM1EgYjOUgqV4Gd0Q7eb2aprG4iT+AtNUTDGEJCAUUkiganPRMWdF5JyB2VtjmaEUmUWLASEFHz5xf+/r463CvbY9631q6nWtvr+jixtBkBPA1/V1Z8V3pUg+yYHe0/jtW1r7xwNTMZUZgEUKoB7vjnfv9vG9TeBYNKNSQGAFsVJMsxU0e/Lx48t8eP2bb67Hw7HMxzm/tz1Dkb6PSiamhBAYkRUNQDQQARQAE1hS0aoizjGK6iMBrVYCADREcnwhoBJDNARQESeYQapYPtVayTlN1yUiJxFxh2/Jw9UVaFABp+kEADIghsiBGQOzmXo06CifhkPIIkDCiIykxY774bgf51GuXt98/PH342xffXN5eLl//+Mnd9/ONmgepE1DdXFnVQDIUgcNb892ru9c8alUHbTYEkLEdL8N/Z4VvweOOJXdx3EsSHmavUJoqh4QqhmS1nFnC+smkutBJQRDhKLWdf7cl5eX8zznUvn9mNkj8EAcAjNB3/eRn6iWFGJMzMDMbIiaooi41+DkDDF4nyBlsVLmXHJ2spEY4yZt+hTjtuviZtOlLuRxfHx+RiiWtzEwqQyH/TiO212PiMdxYOanT9/fj4MCpq4riFlszPM8TUVMwEShlJKziOmsmkXEwBQBgI2EsFaYXSiYgJCYXf1VkfDlWFizqn1A9xtrpXvxPR76yt/1tGgBzZdSAKnrAJGHYXjz+t3jx68fPbpwprIQQkgxl6mp1JYwNjNU56mrWs5TZuMwTdNUisbIpEaqWsSKmLAwCqCAjfM0lDlwZAc4GnM3gxY93GVUAPCBQvV5OaYUwBpC0BTVId2lFHYH6r6NETNAMjDBYIgUOwzRAEbh/d3h3fXt7f5wHMbjOE+5zKUA8Wazefz0PQBKqUfELobD4TBP8vnnX757dx26lGcB0BCCiBWpw+jIQd0AHoSfykEATmmoKqoaQvASe/utL2PaJDHFAnUSHSojIbKYOLWxmSoYKqqYqjJwMbU6o1WYGYjAOUh3/cXlmYjc3ezfvn17e3ub5xwYAY2JmcxNJCKGEEYrRGzCIpXG0MxViiHYMivVT70CcUq9onLsbw/zX/7Fb/9///UvX76+3W63RUEBsGgpxfNEyACqVvE/hoZmjE2LYotIkQMSnfo9bNVXtrbH61ChKZe1PK9/tVSEfo85eyD/65/xfmkdl86ovotd1wUPCgkQMXKc57GUcr7dXV3dXlxc/OhHP7q+vrq5uVFVVDMCYgqIMQYE5YDOBmze2aNtrCKYGRkInUZliEjoUgsIWzHHXU9HhlcBQ1UrohKoA1DHM5DDDZGRHNpCZkbRgz43vhgIY4xdjIieLFAnpEHk4IymHIiCIUcFEZmAN30ye0MUbm7uvv7660dPHvd9fxxvjCwSp5RklsTcRZ4I5rVXsdqa7+6F/9e7LVrJYnV27kWSS1iqsFzTLMsDYYAKTrN2ypqePJkl1Lbg7gUKqJSSZ7G9dGljwVLqu3RGmt/ejr/4zRfdJl0+Of/+x09QcJ6HiBSIxzx0Gs0qFUIxnMUOU7kd8n4yUgsRlQgIjRTURKVo9jFvBJECmoAJmVCN0oBUnXj8RH7rIUer2Ptz8Wo+RBP79WG5vybgbXv+CcbQCrMtxjAzNWmVJW8LhBVYFxbo4LpAbWY+vdD37vb29ttvnn322WfeB+gehf83xf7s7Ozs7MwJL0Tz3d3d4bBX9ZjFQqBARAyp61R1wVIiIhBT10emmFLyD2zHPEMHJhOUt4e566ZNf75JURSIopR5FpkUJw3ZgiXiTeqFj8djzrlk3W63m81OwC4uLnIWwlAd45wRsQ6b6WOLwD0fejgcxnEgupymad1geTwezeTueHNzc3N3d/fxx9+/uLjw/LXjw1v1ab1ZrR2jFXVbWLJ2tk9aEcRVa1N35rkOdCwGMjOICtTIiJhNF852VRXxMDKEUKF4sMwhWPwH56XLs+ScG7KvqWJVrWzfqpolF81zIRWyMoUyTVPOxrFqAUQU5/Jz1A6eOlnap6nTvN93Jm0h6l/L8/oP1/FCEwZfirZc7fgDnAT+gUppx0T13r21i9sR868IH773dBgGXwUGjDEi2DgaaIEUJINI3t/dHfb7aRpVFUpe7jIgjlYf21TVia1ijIShlJIdeWrCFAGo5NoBzOxVzeJxFHMABo6sCgY1z+ergchMEYxU4Ob6zmGWTmhesrgvpYpqAGCKNTl70hcrSuq2E+vYfa3KHxjgky7WCpJ2rILbNofh+c+yvJA5LAMo27c0mX5gOfDEFgDQcBEK12+vtsfLtOm2cYMcQskoxaQoqSArgu0uzj96/O75s3HKt8PxEXZPNucmGmM086FBBuCYzCKlWFxwaGa6THB27LUpBCABrJlZAzBg9lFyTg+NALW/08nxDAxQkYzUFtiad6I5jYisn5Sosg7Qgg63iskERETyRfDYD80IyYi4krWQMRqCUmAijhwZVIvOhzwfchF7+fLNX//1X//jm1+8vn52xv2nP/n+68/2V18OZxC9cTbEGEPnEeswDLBkHJ14tu/7tOkRcTjODoGe51lEvLZMJ0rr++HHwo7gR9ruN+0ooDjeFYkXeImKgBGwD84EMCMwYqbIJtlFCdBCCLFLyHS+uShZy2rIocthzhMiImgg5JRAGdDRcbn56GYGZgXMR3tnqVixaZ6Px3EqGRF328uSx+EIRaXIvCBb7Mnjy0++934fw9lmg0JS5pgYNAyzdV0Cm0wxbVInZZicHyhALlNWVSmSxaCo8+8HtYrfAyfRMUJgcdQHOgkhuQstpmxLEr0qzWUx3Q9A9OSZyygaYKB6UfurWi9EqKh+Wv4BADCPk7fvj+NIRAB2c3f72Wef/exnf2JdZyfWny0RdV03HWemyFwW96IOUPVwHakmd/q+B7glIs25gO7vbq6vkun5psTj8TjNhVPHYoCci5gJmxVAIiYKIqZaAICJiimJ1AxfTaa4C2OBkDmYqPK9BgDXG6pqhAAEasgBQ1SkLPb5i5f7/fEwDKpQBIesc1bDsNvszs7PLy4fpa7bbDbzPOdpLnP+7LOvXrx6mXOmkEopIcUQ4+FwwAXvZ2YqzgZORIbOCnwP/oArawrOL7pYZxFRkVrqqY4CA5EtGt49V4Slvkc+m8QdaFMi8Bk/c6bd9nzTdcx8drZFxM1mk8fp9uZtFu8BzqZSCrhz6WfBfHxlzrq0mhsoOjyJaurKBS9nyUWGqfzd3//u3/2///Srb1+fXz69u7srkcwM2MfLe2FGRLM3jaBzMxmCsoslJiOGRkfZrLJ/Oze17zUfANWyNj1rG/QgGFj7AQ8M1gO71uydqi5zpB+++pi6rnOqwxSICLz45i8V8fjwo48+evz48W9+8xu37Llkb+IzE0SjpRkpEFOIDW6Xc+77DtVaMOzI0tZIrytMFKy8f799f4J6gQotycTqI6IxcvPjl/4IRGRgiMjRrwfz0mggjiHmnL2GzC7KgE67nFLquo6ZSynzVJ4/f/7pH/7Bp59+erv/ZSkFzFKgkriUmFIKIWBZmvRWQSAu0NC2Fy0UcSehXdleurDsNEfZX2XZd7V26ldTB1fj0ZvbV99xaUE1AFMARBEppRBBJCYiCAGJtEwYk4iMw2xKMQQTuRvL3//mdx9/8v7Z+c8f7aIYMIFqQTWTLLmoqgFlg8MwXx/yfjDFhGIFFUnFWLViW8TmPAsAE0QwVgXLfj8JwQCXIjkCBLSFwXuN51r/0M5OW/AWHzZhc6ypK/nljIOteBRPaRdVXaG0PO+HCxf/P+UKerTgUdbnn33xi1/84tWrV6rq6doUe6eCOz8/Pz8/3263jCSau6673d3c3t4O0zHn7AlHEQnMlBiNNJz8ihBS2m5dFJvAuBSpMKFRGUcZrkc5H6RLsaeO2XKBSfKoNEEnMUWK297ODF6+fHnYD4joLClZnXdHqXbKVbHcbDa77bkXBtuShhB8TLKZOVskAJhJSpUpcLwd3V+apvzhhx96P2Hf996o/2AN5T6coW2HLnVaWcZxN3luEUuLXYjIkQBL8/xp+Dsi4lLyaqqmKRZZWGGbMUJExA7NtGiZRYoSoBbJauijdoxET+IhC2n2DBJBSrFpmkohiMSMhGSGwROjhgJohpHajdcHF1WRe6ye/pBm1YVe+8xNbzxYySYSJqdAGpZkOi6lUV+59Tr78jYzRAsnzVrg17FiePns22maAI3AmzkEzXLOu03PzCpZVe9ub6xIJWyMcdmtasJ9cmkIwQlgkgaz2R9EAMpxIJrMWMWIQkoEwGoyTpNqMRPETBRM2Z+NGe72x/1hIF78QtEsVYC0dqwFL4aIOfpxWR1yLNaJhms1evCkiNf6dP1+M7frA8mITnNE1GyQUeUCEP+66saBiLMMtOsW5U4r1qCaCFxSMgoRyQJEQOgs3by9eXx7TI97NIoYmESKARgzUAAhuZnvtmfhg+9/NH315uawH9LusB+2IRUQRVMAs9pnAqKgxfPisEQ1xRQxKAL6qAkllQpbUhNTNDqdutPJRD+Q9c5xYQNT1Rg6QVnnGMyMiEG1zaD3wKZuk/PmEuECZPJtYDQiJmeM4FaUAKdeIAYWFNE85TyKIY7H6dtvnr+brkKgaRxGG0Ki2FMnXe1+9ECUSa0qYlUtenJQYt/FGAN3XjAcx9FjQqscyqcJEFWdecwuC3nGd6rKzZ9yXENVSUwOVmuLgwTBR7dpIWJvMS2mzvoVt1vCQIC2sIxWOfTtcCfHxAWJDLoU1tuk9W5pKlaRKhRC4BiCVhsZAEBrhSQVJ+owIdPhsLcuyTTM4zGPw7briXCYQ9d1x2FQVQGb8swxAuGsJqbHOc/TOOQi4JQSBibiTVpmldraiIAJgAgJqDi0DFlJtQj5CFRVAjJnKNVTTz+CmSxCBWigS2YNllNf/1fxSgDuZzfr4sngnLMUxYgAtt/vS54uL88/+ugjfPrEjJg57LZhJkTMVGKMgEpOdLYaQalOvEkEAJeXj1+8eIWIzrB+e3UdQAlK7vub/V022J1dIBMCGWlxqJKBAabNdh6Oqp5PBREpBiFCKebVs0Xw1ZmVdDkuRERop1QkoA8IBAJDKgbTnI/DOJWQlTBuAtF0GO6O8zjPfd+/tzt//PT9vt/2fQKAPM3v3twMw/DNN9+IKQA5thkAnBjMM9cOBRTVJVGyat+nNYSeVxnKe0nfk3e1MNyqainLaVDy+Sou2iKWUU5/bkjkjT9eSRg8wmCk7bbfbrc5Z9N5fzfPQ1EpyNWB83Ygr05DTUiRFmedFWOldRoOEICmeR5t/rtf/vrf/+mff/b5V0hnAuk4iU9JYO89J5+d4EQpzu5EbNGVJnvphkvrY1EvP4J5q2DDUzgLIwIQoPyz2ckHRmol9qfffvea5lWcLnav4vRPjakPja2IvZnRGKDMGRFjDCKy2Wx+/JM/9I4pZo5IMhEzR2Y09ajL++ddEtyhLKWIFFVNHHiZDaBL91cxbX4bLUinGOMwDM0pAQBEt48EWYmIAwUfwLeADN3/UDEysGpMAyJ2FBxTioSBAjM2BkJmDiEBkiqoAQMaYdd1+/1+mqZhGDDwy5cvSyl//Md//I+ff3ZzcydFEDmEEKNWNGN54JA9jNubq1r3ejUjZ701a4fsvgCcGDjWH05EoKf849rpXP66Nne460dEBmAmqihEEWvwiVBANUs2VQUuCikaBnxzdfu///XfvPfe2b/6kx9FIjObpilGNiuiWQSMoggch7wf81iIOjZFy4UQANSJwkxk6ZvQjg0IvH4OZiAFABWcxlwBNaCogIq0ytIDH2ntJbdzsX5wXLBXurCar8+CW5C2Yma2Gt7Y/mp2A9FSEi2Z5ZeVUsZxHsfxxYsX+7vD559//uLFCwDwTMpmUylWLnZn5+fnfd977zdSAoDz8/OU0nHYH4/7wwHGeVItpWf20NVCSkmKGUK/2fTbbYw16eB364IUyhmbokxcYob5dsi7ntO2G2UqEDQwUmDYoCYoBEJluJ7GzJVRMxARY+WsRuBG2brZbM52F5vNBrD29JY6BNgQz5xRZrvdFu/QNvPILeecNbtlvLq66rru8ePHfd+37Xsg/6radR0shriFKG1TRGSeZz/R31Vf3nIObmuQtYirYl354uuqcvtYWkgBcUnAtftBE11YplWVjFENTE3VIgIIeD67zrDSEDpVf2pRhXkep4m2fXQzTYxgaEiOctX72pvI7xs9HQktk7pSCOunbvcJS45MV0MIl4jmXuPf+owsb56sHtT5kPc0Ult5F4+WnvMbCONwJKK+6xFtPB5FLaVwtt3M88zkk+osIEEkr2XPdTbUgqlDIG0PZsxBAYoIMYcQQKTb7cwwZxGyEFLXbTmgSI4aRSBnK0UQhQMTsyky4zSN3rUYUx3Q1HiHVbWWChbN0E6yB15QaaAWbMbiTNh9Lqm2B+0HX5SmZRaRo4jRz/xaE9nS9uD/ZGYjhKpHqG1G28hmDFomAxuHlRoAMNTU4N2NHQ7DbpY8jxatB0A1QgyBJ7ZiZT8dtrvHH3zy4avX+7wvYHQYh+1lD+AZUgRUUwU1YmYL2e71+5ohERqbgdtFZKwj0UTEFG3hn20Shsvg6UmKe1o+26ehBZzQaW0UGTmLwn2hN3PmvVYevDeyxldpkd36F77jxUqBAlI0YylaigrSu7fX79696550/RlP7/Y6yWbT3fLstIjAhnwAAQAASURBVKIOBzWzKDF0HGOcc3bV71gRAZvneZ7nFDdNe87Lq2VQWprAzJY5b8tz3fcDWoZYVQ1JKyl/8Rn3xQxFAAzUEFAXnsPiGYVSmAdUm6aJZokxxi6t1x8RQyBCJPKJFwEDBkqISHjKuNco3W0qKjvPfGAWjjGGEFPXDcNUtWT0OAN8vksXeTjswZMvuZjZ1dU7VT0MQVXFCjNPJRtC6vu7wx4iE0dBmIvMJUttijPzOcR+UD0Zjw4qssABADRnQ4+ueSHeUEZGRORVwOEqErEUqfp5Zc6XY3g6g+s3m+DhwgGQc3b01zzPIjNR9+WXX+92u2b7U0oq2bsNEZEpCi2DARfPgMl8MJeqbrdbX/aOWa1Mw7gn2O86b7dQYqQwFu26wByRlaTSOfRkOWf0ITSM6hMmAqkakvMa+o6YgQIar4Zre1jh50JEDFCQVEHINOtxGG/2h1nOxlmyFEO6OxwPw9T3/dP3P/jo40+ePn40lwknnIbRVF+8eHH97t08z5vdmYiAWdd145zHcdru+mmeERFQ3blHsBgjAOFCgUNQx9kQEWKcpmHJ+5w8uRCCrY/2qRFUQwgORXNqKlU1rIMKataJiQGranWq3mkSzCLW9/02BKIgIo8fPzbdaz6olRCwAa6Y0U8kIsYYCUmgelqO8eGlKuONKGhoan/3d3//N3/36+32scL23bvrlLaTZNFsEIBMpNYTQggesyCwk2OgOAEHIJ9GzIk0NM2J+UNV13PwHliitTDjKoRov1qdj3vXNP1DdM8AiRUwMjC6Ryrj7mbVWLUGpWZoUgoT9X1f5mm73X7yySc3Nzc3Nzc5Zw5RRDCGGCOadV0XAl1enPkkp4aU8QM1jiP3G6/bNPcLyWK3acx+sLQVLWMnHgY51RYjVsj9AsclqkVkrTES0QJC2aTei66BMHUxRvbBrSklDMwcRW2eS156gZj5xYsXNzc3x2nssX/+/Pnz588/+cGn235ze7svpcTIVHnt3Qe9FxDqfUDj2or5r/6pwGZdMWgvAB9egIhoeGrvqe9ozZXQGhICbRNP+X7RCs9u3vDpexmLzPM8M21CMueDEJWLc/rHzz//6psf/vTH3+/6ZHme83yxOzN0chEEBDWcpeSiBsGAik/5RAjESxlzgbTVpyYARDBEFslIoODjWIzUqQutOaNr+X/gBz9YtwenYOU2LFcu79tSGqqxyuKpuj/zwF7gkrxuEANEzDnf3d3d3Nx8/vnnb9+8e/36NbMTtGhK/QcffOCew7bru65DRBMlhpS6nPPZ2dnF5dk0ne33t3d3d8NwyDnflDGwf1HgvnOXaHO+7fqeQwoxtvDYXzFv0QobRyXId8d5HOZo59ucs1EMXei5V+2PE5VSJtHXz57t98dHjx6dnZ357icOISQA8NlvvuCbzWa73Xrqs8G2RYiZu67zNkhcpuCqnhDIh/HgLrcfN0cZhBByzg7Q42Vok9amvlMvaFNKsMT8bZ1b+aGVFlTVq2Eu+w880iV2kjz7RzlODSusEGsigDkwVeQCIiJQKWomeZZShOt4m2gmaoZaU4hN9mjBIJjOHErONo4yjjRvLCQCVEe3LxEBqDeLLz7zgn1bKIBdxd1HCuD9V1u3lkt9IKIn+1hX6YRQWC47yc9aHckyOROWSGR9ZNplYZc2AGBZDawLPaZ6msxQjUPsYkJfp6Iyilodp0e40Fc2jUxmlg0RO0ygYJORobICQuwDiZgpBnH+rs327Hg8RowXmw0zz/OsVmKM8zyGhZIsxuhcVQToZCGbDZWszerM07HvNmoqIGTA5qlKUG11GzBVh9cDIRiQiZk5XITIFKoli5tunudcSjUtMYSAFHgXlvzHfWe0GeC60+ADLUgIp2kkor7vnXzVUyBYo0GrRS9EMyxFZ3l3dv64DGITnMXLPLy7/ctXH6VHT75/fltu8pM0zJMJPtqcxf38RPD1oxt7cvHim7vw448++5tvhqvrH5190F9wmO8eU4hcZh2yCoSUrZuMyBKoAjnL04xSAC0wqqoQAEAxAEITUCQFkDIj1HjGj7cTq4paRymGICL5OIM3QHOCfAxmaOJDWRxQiwDEwdRMClWWfUFniE0VVBYIEmNkTqGqrSqahmBkBECMROnQD9P+7P1eO3033ORkYduXq7G/A3ylHzx5/PbqG+riMQn//OLbu6/xOUMyZLUspZScS0ddt9l2u36cp8HHIXIgMI+0c3HqS4iJu34LsC25Fusc6mZmDiglCF3fz5B5SaqvjU09hJU4DSeTokUZOfaas4gUI2ZkNgMUgIBB2dQkAKUumNIwTABws3/d930HPXPMKm6vAaDvOkQMSImpC0yIIEVElN2RIllI293f5JBEJLu+APJGRBFpsOpFZ50y6F4mFRHXYvOsOecCGxEBCMgM0IsVK8j8mJBzLsX5QrDLKrUUYEtheXE4EdEAiqrH0g65Cs5+YzBkcRYIrFkbYnQFV/I8VXtATsaIZgbLgL4HDoFZTfquNR0iBqeaSAERpWjgyMzTXIZx/vqbZ2cX5x9//LESX93tiShuz0u+EZEsZZynEGib+sPh4LnJac4qWQWMQjR7vDs/Ho950yF1mXDQ+Pq2nKlyf4Gqh8NRsg43d4Q+fAkdPDlvRowAmDBLr7QJfVCVY+5TAhQlmcmAASPnQAXzo3gBACI5IDIZopqKAacuXg1DoV7S7s2hvLy5HYoZb2U/mIhM8/54PBwO20DvP3300dPHuy5Mw0GxTo69ur5+e313dxhDvz0Wx/rqWCTGuNlspGiKUURKMVMCQ2JGCKbUpYVRXRUEkRAMTO1sc+55ehEg4rT4BEOelqnHYmDEpqpzKUUzESkzETg1kao6aY2qV7/VjFsaIIQwHQeN0ac57493vr8fvv9BICaar6/v7gZ7H3fHMeWZGAvBkPyolT7GDiNbukp8awAAZ4WevD7iE80Xl+HdVb66lv/Pv//rP/uLW4ufHFUnPeZu2Ou0gwvmAAYg2FFnZlBgiYoNUdSKmBEvc2iXOKdNYEJsLlEDEZ2Y1vF+FNTMf5PttTfgiYGTg4sesmtZ1dwIEAFNDdXALGJQq5BdH+LCaIEIMW9Sv93ETUrEQM6MXaQQhRAU07u7/c8//aP3P/zxf/nP/2l/N4cQONhmG1DGFMOu3yam7aZTFSKLqAyFtXA0rNO5WBGLGSFuOo5obKqSh5m6GN3jVFXxNUHrUy0nTtMEZqFLNUokYwYOlhLFyNH5ldRS7OZ5VsmEwGCkJWLou9htC4APPsAQKCVmJFLtgiAqqMbQh7S7Ps5DsZi2N/sMvH13fZjLjBQQ7bPPfveHP/rBz/7lT589/6rrg8isKrPOFGl3sb0rE2JpTptH2SLmWIHF0EOtbhi0fqSmqWjxlWgpRsEKZYpIJiAq7uqZGZHWSpdni9TyirMA+IQldkvgLp4ulQEzyFlKCw5tq1qIVKkM0zV4NI7IQ7dJF3/7y7dn58/+x//hv3t0ttnfXSNvdpYpBpmmLDwOZRwhhjO0UIS1gKgCqGBBE0QMhJC7+Th502gue0SMXVBVAz/f1oUoInmYso1EhAHH0edSOq1mNmPXJF568qV2J83x9swMYOM4egd4q+811BIiciAyLKWkGFVbjMeBoZSS5+JMNggJgaQQUyAMIlJGU0W0YKrDdHz9+vVXX3314sWLu7u7nHOfYghh06XHjx/XkmAI3vhHFchTScv7bfUbY8KLy7jdXR4Oh8PhIDfXAUOFaqfEzISMiH3sAQAFyep4cVcFm+QVK9RCFneFz99SN47brnufl0HzRXQcDjcv3r58+fJYynb7COMuW0ycjN2RJTMrOotZ7FKMse/7rnddWrG+m8226/ppmrx1cAlIYkr3ytFn51sXtr7vHz16dHG2jYzTcQghhJoxAVQxFRXxIpwHIbIMiCcKbgpFRtdqplRyzaN1HEr2Cd7KREigWkouBObdM+KthIEDJhGDMpkB0IkuvmgWK2aWUkIGsVJyrqpRwcCIacz743jYbDazhnmcYoxE3Zi1+UUq4L3xlG8wi2WceTvRZur7qWwOGbnLanc4D4EwYojag0VQom1xsEPNXORCRAyg5qT1CnUoChAYkelCiW9L3aueZdFKzk8nNlFTY4qES2YToBVWm4YxkwWUCoiAqGuW0VK01Q/8AUu51+cViAJA+6xK72NmreLcEjVsrKzYNR4RQCSAU81x/bkejKmagqM16oxRXNCG3tKTUjo733rmvs0n9VDQ4dQVm15WYTdFr12LCBJ5MSeEQEuctr6NdQjnKjoGVlWxhcqfa5pwGAZw6gLHQ4elb2HRy+0DT7H+6rUsEnpCf9ndWp1bHTB48GlEpAhEbKQAwBRLLi+evfyXf/gz6/VuuAPEgOF4PPbcMdIjfvrjD3+cu/GNPONPHl/95vVr5bMQfvjk8UwKKDMUBUVzog/gqC4iXgg3RYPFtvhtq4F5j64Rw3Z7LiLzlF1TtC1zQW3y2hI5y4IjrAZiImIbctp0dLVP3mVHHEJw2kxm8iaHtix1fJSjIgFSFwFkHOcs0jim7u7urq+vL6bAMSqJKq5bsSseTVXViAh57kMgcjoN7y4vDEhLedlfy2NiCOHJkye6kJK5cDoHiYtlSskxqNM0jePY5rHCKou/TswgIi4NnEgWcKFlN08E+1BBVwRlymW2IXChwBBCDB0ATKVEBmIUwLEYWCEDACoiVMtKIOZlWxCwnqwoiKgY+lhOERExtwHOJaRACCzGKgYiCoQcEUjawsVYhqKqhhiZkYmNjRCZSm0Q8863e3DitTu7PiNNZtrPrnPsfqGvnYsH+qR9pr/WP7d1fnDEbJnDYwAA0hoLgVDUnr96aYQK8L3vfW97fgYAm80mHw6O21HzalUhopDYh1uaGQA6EtjJA7DqVlvkBJFOtGlo4GMhPbBBxP1+v9tst9sNzgJzySoBQ7cJOc9ISl6HZguBKSARackUPS4CVa/jgCnIXDgkC92xiGvOXGwcpnI3IuIwTcfjEQAuHz967733zs/PnUO1mB4Oh3fv3l1dX3u7v4tuWzddXj7fyB+ZlmYnRBSrFgw8HrJFs/rczgpnrfA1cwu9EOdafYa6Qb5lRLSaWeOfb2JKWj+qqin/65IVIegJYTEMAzNfXFwA6C28EajoRMmTpwIJAJgMQUVzzog8zzLPhSl2m42Yvbu+ffV6/7/+r3/+y1999fbt2wKMDAKChCEkkROLyFq6quguZH0cIy/zlBHXde6T8MN34r210ljL//pXzej4BcXcpzD0ajGYgvc3kgIYeNFmQccgNhJKozoLNjCGUJvhVTXnzIaRl7YIKwoqkneb/ic/+YmqPn/+vFIuW0ZkDKBAxJxS5JAQBMlAzDmQiCiGoAqmdTwJmREaE7AZAkau86mbAVJVQGhFCVhAN+AZ6wr1pBgDM1dIHvoAJWRmhgoidfRH17GqSlFEjjGmFIOhSgGAGAJxFOCpktsxBp720/Pnz2/v7jbbXlWnPL59+/bm+u6HP/xh3/fHYSJKRLbdbm/mu1JO0tu2aa2R1r6Bv+nlyuYetFdjXFzLgDXTdb9s1fJc7RsfyIl9h6q+3c/6SvOK8WJkAcDPGmYEo6urq6++gk+//+H1H32aIouoAQGxSPEWqTGXacrU9czxeMxZlvlViARKRI4NMTO3mB7wPFgrV+y8EJnA0vrUVsZd95b1O3lWzEQnppn2XLqCetL92mn7zFoYAVo2RZeUdwUN4cJyNA6j8wDd3d09f/HtN9988+rVq7u7O/cMN5tH2+12t9ttt9vmIXtE+mAfaXFUeOl1rD5tl07TvGLsUu8H3M+ja1qskZ74X6kIIodNyvMo8+T6LYQACtM03d7u3727vrq+vb3dD8MQdxV66oW+1pFYKe5UmdnLg34BMzeEl646Nk9VtdULANQqwiildHZ25osgbLBCJegKguhjC5pvhqfePGxO1wItARHRVQ+OqnqXqK3okcxsMfX1S9fCgFhTM00Drx08WJBcnq1r0theTUs3KRrH0QzRTtKYVUoBRULwP1dz4n0VNHBkbasAtVPpCYhm8nBxnolxfXvtJj3kaVvQrin3uWHWf/vgzteqY61/2rcvZ83appdSwtKMa/UDoflVuqKxMUJywwcsIuLk1+zDslDV1BlHnCbeP0tNDVDEgwEGcLYSgFX/QC7Tfm+bzcZlYj0pkoiIgtesc86SSyklxb5C/kTMLMZY5tLkCVaQlUYfDACAoAt8tyzjj+rCeRuPat/3zBy75FqsLBPoaIX6aBKm93vi72k0qnN15nkGqBDqJrKONLynzRGLSGDmmAhwkzZ73b/69vX2H79870ePjSxuUmSaj4OxTfPwePPppxd/YHF//dsvPvjeJQ/5l3/9zXAsBvz0PDw9Q05R8wiSUamnUGgGM1AToXqCkBnZgNhUrAAIgCJpWCi8SylIwIFEsE1N8JwNwMlNXzvxyxPhgiU+IW/bk/qmi2HzLwFARWbVYvNut2uXLZkIKaUYlb7vFA7jPBUFMQYLRDyN5d3bmw/Gi67vMs7GdLZL211anRM0M1PLORv6QxMHNpMipWQxH/S7QK6ljTGFRpaFzO5kdJvN5ng8Ho9HIFMrc1YpFbQAoN7UB+jPjk1YEFGKVJY2JAQfkFCZ6NGUAZEUkYix7/oupmm2Kc/jYSx6RA4cAlEwwoAUY0xgk9cktAQkZi6ojdV6LlrZ2gwmpSILuxqcTODdON8T1+WlVv3Isrz8mvE4+VzHGGPoEjMjAxmJaa4oBQQ4KR37ff4uLJYAVla8GcsH2rCZ8LWma79dS91a1681HdxPBv3eVyklhPT69WsfP/D9H3ziGsBdVVk64qBNznSlrJWjLISwOz+/3e+bQi7qc7TMA8Jq3bE1D5OLvWYAIKZIKYiozGXCAoaAxmDkU0mIAwI666oUIMOAAGyEiAkJAfDuMIZtj9xNx+NxmtUMgHKepnl2RZpz3mw2nskGQk+6H8bh+vr63bt3h+MRnd8vl2Zgml4tMnuCEwAak96ytmUNamr2Zt2rgKsYxtn/mmyYIpLFGF3AFrNdid1Vleshqlw+D4yfFADI/l2u4YcypL57tHmcUlCbQ0hqQDHkEVQBTIlrjVpMZykGxEGK4OE4G0YK/eeff/sf/sN/+/P/+su7O8kaKYVScoFCEfkhMA8eCJXvafMI3eTDfUd8Lbe/99O+K/y/9/jUJb3vPay169o5aMdB0RQdJGtO/sgcQqg1ihgjoWL1aRbEF1Eu5ezs7Cc//sPj4e7582fMmCjOU3Eu6VwkixoHAQyAKODnAtXIANGIIIRATETICAEDo7EZE2QAIv+fm8plc7FCv0JI1cIiuJVgrvxt5MxlRMTYJksx+DTF2PUxdSGFUIoAFUJOIXYxMYLNNTsIqGKiCkjEyCJyczd+8dWX+/1+d7YlIhF5/frtF19/9W/+zb/54MPvffnl14goYDEEWygZ2t7Z/djvwb78U1v5e/f9pNDo5Je3T2g+dLOqa2n5rkyub2YtVEuOB51MGwjBW+nUbm5umOzd9c1f/tXfXJxv/s///b++OOvnUiRyFsxmRW0Y5zlL1zMgzCUXJR/WyoACwkjG6BwK4zju9/uLiwtX+41q9fcuhS7tFZ6eiDE2XEC7oD1Ig4E0teMqyxGba/2Pi7vfghPjyoLbbkZWHOPN6h2Px5ubm+fPn3/zzTdv3r6a5xkRd7vd2dnZ48ePz8/P/Lw3Y8ErFjpaoSLbMWyRYYyx320Ph8MwDAgUQ/LbVtXFDz8lSvyvQgizZACLMcVAhSkGDiG8evV6PA53d3dXVzdX17fH42iGMcZNOL1o8eB1Ic1m5r7vt9vtdrv1NqjmJzdt/yBgaI9Q5QoSLuPWuq5rYd4DU778OWod7tPyv9aQ8+vj04RcTzh5WJIpvlPLrFEAgHssKVSp7219274CuIpy/a58l2uf/IrvVFevtXyqKlEIKTKjmY7juN9nRrt8dO486T5hDUEACYzWPsmDWBRWSn6V765Y3PU6mBnovRBu9Tn3FM5adeB3rNUDLdTKNk0qdOnSNLPKxXg4Hpe/Vk/O+jrwgtkVEVBz6CYAFD6N9CWi1ih/XxVy+0qSLnM14apqVjeemUXzdJyOx+M8z7vdRlVLmf1DnL9edW5tCb61XrDyHfUUCxCuvve0dm29kAkBVNuKr6wvk4e5EfHi4gKgEoc0ilQiIg7fEeh/ztFcy7q3ySFijFEqquc79ROmYRp77PrQgSJDCJbGcf72s2cXH11evvd4xsEki8gwHwj44njxuFxONv78Jz9+9tXXmx9/7+3rw+cv3hxm+cOPnv6ku3y660BF5iko9SEJZPB+V2MzJAqETBhUtSpJhoRWzMTEzIbD0Rc5hoAJTMUMmCjXB1czh0tXMDQxLaLmboFr8JN3uG6UAFDGWh5MHFxI15y8S7LIcwruQ0MIlJ2/kNKcSy6GEM307vrueDv0T3qCbETb7eb8on9w8KySWJR5nkOKyNWiE9WtaD0JsrwIDRHneV5UKnp20JVgUfE+w2lJ4bSEX1Og6+MA6lhZ/2cdDWsApUBEsMpxqAEppbDpO4iYj4d8LPvjMJcjEBqyD3iPzgEGoEVAlBgYqdHcy3KypJIzcalzHrwT+BQ+nWTvdPDr4mMtGsxa65aAxi782bRDSCk5YW32zEtlBjopKZGHVr85MWudLksb7Upb2VqnPzAtcF/TPdCwsJCJPXhfVZci072suZmSc4pO04sXL7wF4sOPPvjwww+BuEgeplGLxcQAEYBKKcgBqAbAItp13aNHj+7u7o7zSI4cA8cPE+HJHSFmEKxGxcWJdqJ0nOaEHFNHIUAuU8l9WOY0E7BPzjZFMWIObIgmpqaEjGYkGDBRhjjM+W6YDuM0FZyBfJqwi0HXdZePH52fnyPi8Xg0s9vD/vr6er/f56UlO+fc5kS11WsGElat1ADgnX5ezWsZ0PXGtSOgq6Ddp7c1/amqaNSiTVjB762iTtZUNARAbsMaTtKMiE/5uFFLSDGluN2dX4yPYlAAMCBxngADH/HSZCAbIgcISTDe3Ay/+s3nf/kXf/uf/vN/LSUCdV3XK9BcJkCfJ3GvPLg2rr4srq78kRueAhsLyH13f62XmkDqd9Jqa0H9PWL/nbgR76dCHh6ZJQepAAxg4FOXyX2mPkYkAxUkIzO1whhiCvkwvff+k/Pz81/84u+naUopdJsOAJAZAI/TTESp3xrAQvoDNXRDMAtmSATEkZmZAKEQGCEjYIepBvMrIID/ec4Z6OTroHurUntUGNDAEfDEyGpKRIxGRCFw18U+dTFEUCNA5zNDDxeJMMacp1KKzKVYmDCE0CuEwzi/fv3mzZt3MbIBIoWzs4up5C+//PrnP//vfvzjHz979mIqwszePN/3PVHG1astuN7PGjeBWXl+9c21x/lAMOw+H2BbiuaBrHMuD76rfeN699vatgNeTJGQgJzfsH4+2HGaH5/vispnX3zdpYCI/+KPf/LpD78/EQlygXicy5QVQyAOhymvbZyoqhWn1iDunCLI2/jjwkEI940OrtxZXTovVs5eHVX/4DK8Ny2mBsYPrmzP7sdznmt7CwAAVZFr1meoA4SKJ+5FZB7GV69effnlly9fvhyGIcRweXm52WzOzs68Nth1XVNTS93iHl9uk94He+FijxxUDOzUYrrEVDWgaorOP3waj6oagqtWLgD7/f7u9vr169c3Nzc3N3fjOKoAInfdxtku1lCpFg2CE+nF6LXNFr81K6xL0sEfZA1fvB8Q3uPbr06Fom+0rurY/mrg8JWyCut9we+gSOq6eaFJHXRSI5Em/L6JIkIUXb21Q+Z/7iDetTJsPzt+eL0CD05oSzqAs0qDgxVQROZZxxFj0JJ3GDFQQACQNgzXKtiLquUCMERuK70MRiK/KWY2y2t5btLCK3rL+9bhXrNMk+0Hy74WSFuaidY6oT21raJlMwvDMNS3QbyWQODmA5gRvXIiq53jkxNgK+PUtBwiIpYmjiSoqvNUvL87xujwKlUNIWw2G9XiA7adzINTVFWG00nznQshWIEW9/vFpZQQYhNlWo2CAAAncFuaPg3cFmLFuBpCIKxNsUsa2+4H8cwsRVpyBX4fD/JaMwIAL+mKGCMA+rl6wKF0b8dCHPcjMvW8MQPNGLnrgxyvhrdfv9uefcIxCGoX+36bfvLjP/yfNv/zR4+fPHs9fO8nP3v2xdfXw92/+B9+9l//099+9e71HIolm987exRSAAVCpILsgRqqCUBAcOp09LbHSBERyIy0ZPWwObTH99U2sxC4FWb9rmtrZ5054TMMHSHm7P+g6tQhVSIIjdAIiUNMKfUxhRDITxAAoqlIa2ZTVV0ASJwAyErREDco5XA8jLMasCnfXO1vrw9Pck89AxqSxoQeF4GdxjLZwpEF5PNCgJmDY43QdJVzXRsbvxMzKyW7e01Em80m9Z3DRMucZSFNXmzSw6wEVAwDIhqhAaCaEigaESygaxFFE8loQEQYYog9pcmmOWcrWQEVmHPJANn78sjA+TmwEozXOWNm/theGVAfWmoLUHJRK/eAne0gMyMujJpaqfsAFBIBp9hQKBj8FBQRUXDS/cbk9BBH3b7iwX/Xr6a1H7gvzcF6YFlXiYZ7FZK1RlpvJZ5ynM27MqeBHsd5nqeU0vX19d/93d9dX//g4vzSzFLq+z77TIJhGCq9q6LpAkwViap93+92O+NaIFWTosIFI0ZiPPFgiXu5S4BqOM5lNuhj2HWpTx3HiBKsFCBDQiIL3hZlwAApMcVAhGomBmYkRpNZd/bozd3wbr9/c3O8uTsOs2azYZqxgDuvFxcXT548SX03ztPhcLg97A+Hw93d3ZxzO9eqyhyb91AXnIw8Bl5NZ24VvK6LzRNam4D1bjbvAarxu+fvInrRiohrmsAVrPsKDz7QzNx4VY7reqsoICqgqgGDH+1AtNltIzqlExigAIJaMS2qztbDzAUgT7Ni5LT74puXv/nNb/7xsy+LgBGZT5RUNUPmyGyleHblntC6+LnDsaiI0hyaB+Z5HSesH+3+A9775CbesIKWtIvxfjzZfrs+O+tTYJUKq4qgwyzjMk0eABDNUH0qLBozsqkQ4Z/80R+Z6a9+9SteQE1ASJRAy1QmnMo2C4cOjIiRkNkIAquKh0yKBHU4BIABmzAKYeC0aY8jIro023klGRacVIu3A0ErxfjDel4sZ0mhKvQUYpe8IwtJhYkDBjNDlTJnCkxoBEhgipRFZlHi3ohLmV69fjvNc4i7w2HgFJ8+fZLzdHV9+823z3/84z/6b3/5N4c373a78/3dMfWdTb8/GoQFirZ26ayWXB4iiR5IwvpNM/suL3r7KLgf8tkqfdMu+723t/7z6uSAomTzehQRAYXE45QDG5l9/c3z//T//bO7u8PZ7pKfBoF+zPu7Y86KqdsZ0vF4mLNJJfLzoboqaGRo5H1i5I33zj/n7lC7T1xqFESEhLJwesNScmeODdSHS47JFhiCrlBztgAUW7WAloTjOgG6bM09fFPXdSK23+/9HRG5vb39+osvX79+/fr161LKdrt99OjR5aPzzWbT8CMtOOKKom+5mHqrrjYfQC5djxFx6zrxsCRncQZEovX4KL/ezCznabfb9X2SPN/ubw+3N1dXb/d3NzHG/X4/zzMYhUAhpH6z8WDP13ytVfyOU0qOFHXlvygrC4EBfBwfrfTVCfUKAIiwUDI3wt7Qyol4mqJ+Ejw3tXQfk+kKp5kJd/lUtJFL+W46UHHJgyxpC1VvfPDFd6RriAFOsPj2UCf20XV6xW+yBYSOPXThCavRI+7X+Ylm0xiNMFBEAyymaqxI++Ow6yV0XtJwtWwI98DMa93echC2pIRcfkQIzDw1492kPtrNmTtMTc0W/w3MgOjeqW9rvg4x4DtmhSis/wQqGacBnHrx/CaDnAC5AuDDiD2OUkMfG20EHkD4pL+EiF4XUj2pNl5i33YAVNXIRMxBWQAAENvhZ2YOGEJQLV3XpRSIIOd+LoDkY9m6yOyGFgCk1JSPW2Iz8xxD3ULTyvq9CKCjjxWMvQ5jpggBccpTCCH13WkG6NL7YQgOC66rs7D5rRVQC+W/mxGsq89kKoBIgUU0SwEAQ/DmUVtVw+qfExpCEZulkNRhjCn0x2H85nfPN+fb93/wqNumOWOf4icf/+D7sM37/fvbp1d37372J//92eXXX339/NOff/Kbv//s5f5q/Md5f/vhjz949HTTh6SSRAQr+TMQmFfMROvEZ0M0Qh/zQsFAkfo+5YyllJyn9oyqSuSI33tSiIhSC2VVdSOjQy4da+pCX8fNMxNRdBRmjITovfMIjUa4TqFUVUBFjw9NCuQiYsjHedofZzVCCARyPEx37/Z5erzdbjIUAEnB9iIqgqpkRMiI4hi/cRzFVMGYOVDN1aiAwslsNG0ORk2GtXKxaONWZebdbkc7NDPHxiDi8Xj0+Qhr90VVU6ymjpkAIDL4DVT6OzCzoqIiUmQuEomTa/c4CxeYywxIQJy2m5UoEmKFqIMUwDqj4sQrQeRdXIZetq1iBw5VPQHEDXywA4Ax6+JSAGtzOLLmkLq03fi0LjPLOc9zdmR/jd6xpgbN1uWde7qpGYy14/LAv9FV7nydU7RVGq/d2IPP59WYkHYNIpr+nkO6iPQCOxymnCcASCm9//jxe0+e7Ha7GOM0DT59mAMXmfp+OwyDK8xxmojCZrc1tromefK9JCEA9tSKc+ND1R6qCqAGBoo4i8o4joQd04aJAzIAECAjE3YBAxEjAimj12oZY4K4ESMpcjvZ65v9q6vb/Tgf55wL6Ar7kFLanZ/FLh2Px/1+vx+O19fXqqp2omtzUfGpFqclxVqIaCAWM8m5jYkwj5Obv742om0rmxtnNa17Yri8F0la7SOiBdnFzGU+DWKweyOSyD2J9UabYuiCgs3zTH3X9dvIRoExJsMIWBQKCORSCIpgAMRiZNztx/LFNy+fPfvqs8++vLmxs7M+z1S8FYKQqBJplaKMp16O9dM1C9qQZu4kNWXSHqFJ+z0zcT9B2z68/dX6+n/q5/UX6apvB+7FCWigSIiAhB4YETD1287zO1JyyTMDhEiI1vfd4XA4P9v95I9+tL+9/uqrL/xhiwJxRCQxU6JJbJglbUjEGBERkBjRGADZVDXEpN60SIZGqMaAgSkbtKprtQ5mZsZEzbKrKqk3khPhyTElcO2KPiuNiBiJCGIM3jBFiCRIFKByA4pJERDvZAkhxNDNQ56nGeaizIdpfnt9BRzGPJerm6K6OztLXZqm/M03zz790Y+fPH767u1V6jvbHwFoHMe13V9vov6+LhLX0A9cN1zKHScddT8+XKvE9s4/Ixu4CkHb0VvXau4JJACCjx1CtKU6ZAZgaoYGInC9H/K3z0PsHj9573/+H/8VEt8d8+1hFIhEnOcyTtNhyJ4xjjHSSsU2OLdP8vAhdSLyXYfVlvbR9iBtAU/+AHN73x9Nl3Ji+yes8iCy4g6tFaTaPQimWnmOHbFjwDGEFIHweDjuj4f9fv/mzZuvP/vCzLbbrU+Zd2R110fPofhdLZnrGgeuN2UtEk3Hti3w+3TEqYg1Fjcz8+A5pR4REUvLvPd92m57NXn79u2b1y/H8Wglm+Hd3R0AdV2nAqqATN7b0pZCVyVlP2t93/d935p1/UF04SeH+7DDlvBqT1eP4cIs3Tw6RPTB43CyEQ8zYvfFXhHRvanlbtUlJMao86wLBVHzHFSVGFxemmDQKvm1PjhrGXvwT1V1lke/Jc/mu2S2gqGjydo1MVbqriKCoIAwFe0mvL7ZgzFTCGhFNIIQEYASUyuf0gIVbqHsg6O61tjrI0BEtLT5PHg6bFXZ++nFtlYPHlmkhv1wX8OoPjRSVbbrbZEhMqCC+khf2Wx2RMSw0JUuZrvg7iQci1PqEfbqvmsVTlUPelf9oAoErcJHRIjgx3aaJk8DxBinPJ9udFGgYOSjnIsKzDMsxDOyMB9Qnbpj6lHLSjQQERCMkP0kQ991XYNQF3NMeSFiXYF+VUHUFBUdV4roLOc+UYQo1LYxAKiDdOvNysIfU0qRZfTCyorc09FmVlQpBiiYi3YUYuxQcxbYxLPhMLz75urR44sUOR+z5vLrf/iH7eHbDz789P2P/wDy9l/9yf/06U/e/T//3f+DOv1h/uD5765fPjvIeF2O4XuP6elleXQGKJ4uQhNAtACmJkUKhQ7MANBADIwAFQORufiuUMFgZjlPHlY1UYbFZQ9UOYvB1lhzppp4qHDzxhvBiyq1IqXMtij0s7Ozelq0+Bw//wsnrcsqYy7vrg/7YeZ0RmqROhgOV2/uDlfD5qJDImLY7tIBswsMETEwGpipqeZSKFSyaVAvfxOo2TKJBVpQ63S5C08MLdww0zSFEObhSN4OHhMzbzY9EfZ9F2NwEwQAC4usgVjkmpthxhQC4ikiJTNSESJV8VKpq6ckstDtMCnPaiBmJKqQ1RhIPVenpKZmhICoXqEFUENTFFQv0S45CGmJG3QQxArY6S3sCoAGS+YCl+QOEVAIyCxmJWdXbU7GC6RaG93WHtLvSY/BgpnBhf2laQ//lrVvtJaxBzrL7veTrN8PIbb9Wn9UmauLoHqP28BMY4yIaZoGEQkhHo/j3//93//gex8Pn4xd17333lPkqEAi5uah6wJMSBCYoDWWUEA35zSil+acpyqltN1sHSvuCWMiQgPJPtQbCU2kDDnPosJ8vu0BxUCBgZkChy4gI1lQ4mghKAaIG4mbeSz7sby+efvi7c3V3SHLggRHCpANLPXddrsNIeyH4/F4PBwOzihQRwFBzY75KbNy8lYNxKxu3JL9rWveDIYDxFqqvhm5xk7eXI2qIgibV2EGtHBuVRdEAJGYouOW5dQK0mxn3T73Y5iZCNb3U+lzzVhin6KBqBFSVEBARjZzIj7IhiBaFFPs4u1++sU//O7q+rVASL1OsyAEAcBTsk9B1MfZNZmh1ctWOPMmrrakz5p8frfE14TzQUqxuW6weGDf9XXa0Wji/d0D8uDciZlDxhVMRUktUw7OierkLsZS6swkRDMtIdCHH76fUvj1rz+f59k7/AEAKOTiPLJhzuXN7dGo2/XoDhNqYVIGIIYonFJUA0VgZjQ1tYDAgcpkuEY2Imp9FomBAqOD3hcEnYKpj1SoHihzWA54IHRNGxzx4XgiYAI0M67TYNFEiwkzi6kZF7WiwBCOs7x6d31ze6cGeco4FzEFwssnlymlZy9e3t3uP/nBD7/+9tnxMN7d3R2P493dXc5VSltRy1a+ftu7JjAPdrDt1HqPqpOzLMiD7YYlhb92kddysna+T2f592lURBRTpgDutYAhGAKZSlGNTKY2z6oBwgSv397+8tef/Z/++58j6s1+2g+l22zmqQzTpEjTNIVg5Kx7fs9qhjWR5OHEOI4eULlibDffnEP/53p2dkPrtXjDVlEu4gmO3h6NVkmituwPlmJ9MGExRq7l+r4/Ho+3t7dv3769urryG768vLy8vPR4iZlT7ABPym15Cv+oUxD14Ah/93giYgiphbruxDpZnariMnKEFmRcCAFRxnHc3968e/fmeDwCqM8/OR6PtVgEBsv4B4on/km630fXOGZaEOIrnHPF1q0zFE0jUUvQr5AR383b2qpau8adIlZE3nodvP6PSwsYL7Miq+uFCKiwAMTMBKBWy9otucj76jlH5voUtNte9qWeLZe4cZznuTCz9yrnLL7s7t/mXMZxHobJC1chMEWGtuOglmUckMmQqUtp0wUgRAUFVSxgBKsk6frg4ypmw+W1Flr/ua2erTTDfW1PtErgtgsaSdVaOcBSKW3B8/J1JFJWdnmpOiqGJ08fEVEg4uA57KIiqo7HQACo4yA9mQc4lG2TNrcTlX0xbdYHoMHB3d8yM8Q6MlhEmGmeZ09J+sVO8llK8eKmX1kFzogZvOm2lDJMIxogVmY/hwaywxfNYGlhYuaiQg50acnFEJ4+fYqIAlZUSz45MYbg6NC2nY4GEABDJGYgarT9hmjt5C//A0RAlFIaqFW1widW6bp7UT4iliwpRDDLRbaRI8cpcylDnkRZX371+vLpRaSnMXSbLr349vXm0+6nP/v+y1fz5cUHv/rl19un9G//7b/93/703/3hjz+QA4zv7t7elDJdv72WD9/XD99PH3TvMxMToSkHQzIAVcq6kgZTNGOEgAigJRBAIKqUXOwpE1FBU/IhwMtqq2oIaXFDEEEdGomg7BGKIYISGoKZmqgiAgggAiDEEACgthQGH8QpdmpyIUIUmw1VVY/TfHM3zNli6nHOkQAmuH17d/t2f/50wz0C2abriASIgIyQGBnU9+rERIeIsAxvISalUyZmbdRdgTBzTFXI5zzngqnbuA3TIl7q3CyvvFA2V9MrIiIBnBQLmNF70QMhM5c8I6JiNfbMTKH6ymhGoIGAAyZlAylgZiYuOdWQA4IZEPIJRelH2tQUdZnASUSoAHFRHKDZzADueSpIJ8pjW/WPMeNms+m6jpgdOgsAyBQpiQgaNz6bk+bSe/porRPWCmj9XWsr235LK0d8rUzX6bR2lOA+dKql2cxsKUdVLuZmcYnCOI7MnFIvzlhVCiL+9h9/NwzDbrdj5svLS+Ywz/PVze1ms9n0W0IGBiKaS0amTZcUBNA4EKB5lOiuKSM50Kg1PLtayBNXeEykDkylmEzFlFJw1UIExmiEQMzEfBaQQgEqygXoOJY3N4cX1/urw/HqMB2mImpGGCEi2jzPhCnFLnabojDdHQ+Hw5RnVSWO7sQY1cUV0ZwzYUBE5/yCJRrUOmYKHGTV5CGE4AphHf5BzUSeMFq0NOuLCMfQSPwAwP0JVd1selkSKEQEQLnknEtcoDvNnAMAgK7OJqr6/CU1s3EcQ0oAwDGYGYEgBBQVQ0L2MZsC1dLmUox6MRrmLGC3d7M7KCl2uaCoERmaFSuoxoFT6loXIa68KFyRWzR/1NPPKaW2CL/XKVz/8+QJLdCaeo3VOUb1pIDTXy2px/tI6Qef32zN8l8DcJiESS5g4oOYh2FKgbsUQwiEPYAx/v9p+9NeWZLsQBA7i7l7RNztrblV1pJVLFaRLJJFNtmc5tLshY2ZBtSSPoyAUaOhXyVAn/RNECBB0JeRgB6xNehptkg2m1vtCysr93zbfe++u0SEu5udc/ThmFlYxH2ZbAwwjsTLuBG+mB87+2ouYZfL5YMHD66vrz/88H3PCwBCdxuNaYqTmOE8yThvgPovrO56XaMCBUAmI2PogZkRPGBIBGAaAkLoGKnXxiRoDQAc2DUK1KzTxxTd5HMk6WpQwtRMvSKBGiTMs9hT0pKUwUSm4sVIm+042zRpAF5QvxivxqfnV+vtJIZTSl3XrbfT+pNH45xO7548v3j5/OLl66+9uRhWn3766bPnL3zOwSyhKG0EeWYGAEBu2lnS7TAXSxCgF+vl3P6SIWuWyyygaHloZka7jaubi8VHia/yl9U8yfYnbcIyBywXAAyBiFCyNqKqojZ03bRdR7RFN4RAN5sxXK8ngVk6Vb3Zztuo3Yo323mzncOwaNdWMVNVDamqi7V9Zc2fr4Kg0njU3Zhya442nFJ5O5Zu/vWVqaQvehfoIra4Pkj26pz34rEpJTNYrVbjOF5dXYUQTk5Ojl8/QcSu4zD0AGBg1IVuMcQYfcpd8pKD2jVg/3WqeHKe4DyzJcyceyCSZz8MpQIl7QbEu7vNT3h5+ezm5ubm8kpEjo9XInJz+fLq6urunTsish1ns7RYDEenJ8NiCR4LYaYQuOtCGaCtqsNiQUTgs0nKBwWgLnDfIRGa+qRCqsP3CB0hc0EQITB1XUhljAFUgxCk8sDWekTEYRi8VaSU2sgQgg++zwpSCDVnwPe9cDlBZPBZU1iSRbnWmOQEn5jm9nF+MHNZ5M528se5teKmck0/xNIRtIYHnbE7DjMDBOI85RsNSczirElIjDoDM59zZwDi0zWxOEmpOMGrhgONnrkDoL9wycpBRCk6aksFqpoTd285mKqV2GI4lFR2LN5JVWdRuzFFUIws9wWH06PlPM+qgkbEgIABkXnw4jBVBcoqu5mllBbkGdspbrfmoRJABg060S6RkjTOKLLs+2m12m7XvgciklJ+ja7rkAwR+96vynV3c9pl7sUYAUgge2M0R1qKF0Rz9M+5D6iJiJSxHmLqLWcQEQMf9b13N0+W5wJrE3gFQmmqWrlU2aaUsGg2WSYV5ablSloqbcxdmqWDoofXHDU95cD0cHidGYhIRyGEMKWYkiDi0C0GnQlpc7P+2fd+fv/OWX/aP/n46e//43/0G7/51gcf/TzO9wc6+8EPfvJH//XvPrt+3iFBSu989YvPPvzZ1RbCycmzm/Prp+srW43h5u233+qHfpyuAnqbtRlZ57Q5OjoK1G+3cxJg7gjCNMYQsIbvi6ctU0ddeUyTq4z90IGouzXNY3EeWAUMXSg2necM2zAMi8WKHBcRmbHjUH17MUZvPYfoAjQHK5BATbuhny/XN5tpOybV6fTkTOctDWSTPfn4+dvvvEEDBiSV6IvsKAQOIEAUmCFKCqEbx2lOcblcDl0PYCklzFlsjIgeReFSJOA1D44DROS12t4ew9lE0kzDVPxzQ9ctl0vf1hjjer1OKc1xZkYAm2cxs9VqgdyJJmY2ic7sDGyc524cu44X3Ok8BaKjob+5uUlxAqSUhL2ZBlIRvh7NQLD9WEEZXo7Alc06KwKvC0TJisjOk+QaGDScJwFA34fVarVY9HWWJhF7eBl8mEdulbNLok4pEXfV1wjFPGs9uG3uEBHVlJt6SWVeB3dwf601Drb6a+WzlSHqvjcdG0PXmxZ68YDtMs/ZdbW+Wzw7f3F9sxmneO/evZOTky509+8/PD8/n+ecGTgEcL2fiKKmcRwf3Lu3XC7Pz8/dETuO42KZa6VOTk7Oz889daofQsDh/Py8G0KM8e69s0effLxcrkjjmHR5tAymcxw7xGQ0JqOB4qzUGS9WaPjixc37j89frKOG7uJqjArYDaiKAGboPr/QdUA4zpOIbDabaZqYeRiGcRzNY7kC1fueUiICtaRpZ044tteiRyjpNEWmRj+hZhn5EcqMosoMIUc2ujyf0owol2fEGH3oBXudervR5PrTLjG18uSUZsQ8er5wYDZ0Lh3GcdYe+o5m0QV1YViO61mmuR/CsFhNUUy073skSqKaZLOdcoGy4eTBo75HZEAMFMxExDNHfMw9+3NdKvlQ3B3RNe6JivzViq6ga6U47iYXk7ML1xGdw5ycnLiHtJKG2Z4XvGJ1q2dXiqh/IiJYHf1MjKSWFCBK+uCjj157cE9NVouhD4EJzDSldHZ0tNlsXn/9dWb+u3ff67oOmRBZEeKckAMHfP78+cXFZd/3Sv388O44z6erBSOs11erBd85OxGRpCJGhjnoStyhipsHTqX+n5WmT06P3HUBaTJx9Oi6rmPUlPJsSkcqg9DlPMCMn4AByXNV0jQxc2DOpUdl4o6Z9cOwXsdRrD872SR796NHP/vwUzEQUw69AaomQLheb5DD9s704UeffPvb3z69c/eHP/7JNE0xRpVd+/u6sxU/rbHTqm2Q0txqgZVGuMl8btHDd69FLWi8XWHX7He36bgffHCqca3jAE9U1bu5Oscr7h4Db8SPPCz6eR6JiPo+iR2f3Dk6u/u97/7gahPP7tydRQ1pWC1TBGehzMyAZrZcLDrCm5ubbrly6lgsFtvt9ubmxnt0OyOqmjeX0VxJU6VxzH075q4biMijNFhcq1ByGqkk9VTvjJn5I1yhd2Ht96eyLy5rqm5W/BFqZkdHR6+//vpqtdpsNmhUfFsYAlXnPiKmpM79ag22I9WB/t1+sBLzqVJPd9YLFYWWV6uVt1G04lc6OTlZLpde5JbSvFot7t49i/P49OlTVTg9vUPcicIwDKvVMfe5Drzve29ftlwuPYLiUPL8zNqjoeKtqnLYq36SZjic44l7vaE4BM1SdTlVj1hlhgdmD3PnTe9g99bZreYGXg4FGbo8naZpKP7AsgsSQghd9gKEEBAtzs5jVUREoyftEBFCLWmZmVkVQsjVhlAS1L2Gsy4AS/VE1mxj9FonT4uIMfarXAQrIooYAqhhTDaTxdkkcTheLHuQaZvi1BFrkzruMPQb+r/VtKmf+35hxZ1aSnlcjiTnBpb7ghEReU9me5XhjSWbumJ7fbW6HlfhqpzKGr7kKGKW46Dz0CEAqwqZdQwALBpDF1BVERQRwFQTGjCC6WboFgBAkGKM40ZcSK9vppyg7F6uXkSESHY2KEDXMzOnFOdZF4uFNwTH7PdyVujGQA09s/eVMqy9/JuGRUxMWK18y07BnHTu8086V1w4u3aJaJ7G5qHmXnFIu2TIyj3raRVvKuVUE7/6xQu4yYsG6x0q4868X/MmYXXjGQGQjxYTMzUKSAbYh8HDntcvbz746Ye/8CvvfOXNd+6u7m0D/dl3//Lh3W987Sv3f/XXvrFaLvE5/eHv/JFq+rM/+evf/4N/8Jd/8dO/++G7X3vnK8+fz+cfr1/GcSvdm2/cO1oNSDLNNyDzoud+6LfzJDJCQqYFYAdKXcdJrpmr3Eqq4AplSilG9SEiNUM6hABZFAEgmBLk4BUcLxfqpYCYlf5AiJZKDRIHYiIgUECfFoqqKiZERIGJyAclI0lUm6NMUaPolCTJVoWXp8uTdLzdXq2fr3ULR/dPpnnLOdMsd55AKLQBNMeoYGScuhSIDRA0gVoIVHmNNa2DWoOk9R2oJgBSTabe7BPNLMl8dHTkfe6JPLLTh0Cr1WJ7fTXP8zRNblqPIwDa0PWBGBg1iSabY1K1KGKGAYwJKAqqDkwbBDUZmAQUDAl1F9xDd23tqQ55zUiWa2Bv/1ZSnVudIxcw528CYwhhMXRDH4AJlFQFqtfDbf8mM2TXexaA9udKWZP81qqqeytqjvbXAwW3PefgPnsUvd/Go9G0MpEaoiEgU40+lfMJANwZfH19fXNz8+LFy7Ozs9PT0+VyGbqBkZLM26yq62KxWPZZgzfEYbk8Ozvzp3sz8aS2QOz7/t69e6GjohNgN/SI5u03H77+2jRue1wNAbq+C6g4d6YqZEyYQj9bHGhYb6fHz68+eXZxOcqkNM02xqjUCZoR9hy6jgFVkxAP4zherW+y1OmCAYxxVtzFUg6gWrG9UR/32hU2/yoFb4+O3sLWg1nIhExI5FRX03RAsyZUBB6aoUj2s9TdJHKPdsfMc8pDhJEYiZCwppu7bgdkCgaUU4sCkSH46qNIvtQEKIRuAblw3pCJAKGMThZVzb21/DVQDU1zg4fceImQOU8XzL3K3CNmhkSpEb1UNR1E2+8aWnHv9jf+2XVNLOXWbuu6/6WlkboF8db8Oj+IqGYZ2f4W5102M9EkEQ2UcNEPNzc3muLRanlycnR6vBqGoeuHm5vrB689vHv/wccff3pxcXHn3v3l0K+342KxiHFab8ZpitfbeT3GMarR1YePu3tnx12YgWy5POo7y3aCa+AUchIBKrrsQzazRKiKKZmPBjEVJgJ0Ly9wbl1BzN72q+iRSJ48whg4BGJwDcItBM/acH0SEA1NTUAlSYopmtkkoDgsj8+uRvn+u3/3tz949/Hzi251BBGzpUqoqnNMN+vNRx8/+vI7L0Ts+Ph4nuI8Ra+kRQy2l9ucmU8qc3qwSS1GRNWdu6oytANWVm9lZsVefrWdf3CHA4yqv1bhdfvY10lqwhMFzmVgSWwmWa2WX3j7y9/6tV9/+XIzTikJjDGZoiQz4hrX9ffKVgSxGbb5n07mbh/WdqOtQti67Sr3zi5dd6A3v/qSW5i3yq41QUUoPuyu66TU1jL75LOdS5GISvdON4H709PTeYzuGi6n5aYA3nU8hOAKTF0DNqZRtY64sULrK/sRmvZIVtquqOpqtfKHmvWqK0TcbDbb7fb6+pKITk9PT06Pxs12vV6nabYytxARqcu9MDh0yLRa5gGDdJhte6iRVkBBw5Hq+Za9SFzNeCtZ5RWRsEmagP3sxGqQeHiwRUhfA5eMa9h1EvKGgqlB+Bx+9EKtFh8K7K06K51v56nvGZFqawP/c6cM0L7v0oedVIOwrqqgFoJX2CKSUjJjNbEwRdiOshy4J/YVhUCSvBTCMzty0YWKAiARkxfIeMJXUgEV3dkLDWHmJlseWW1xCffHOvgzILuKyCuyK04CgAc26hXe+sGKDVwDVBWHw+bq5WKxQLI6Nw8R5mmaNBUplX0bzBxCF7oZcT1NExOe3jtS1WmaQtDVMoTgmZW5anae53Ecu7sP5ji+ePFCNAKyqsRYQk5EHLDlcQCmCGJ5AImL50x1uSI4HxUXFcHMSuIFlrbVeHx8HPqu73sjFJHokCVUSVWCazE+D8h7j2UXvK9WQb624WiVluqvcEsnqJ/9xa0UPXdABD5hSZIpAQAEMA4YksTj1fG82T754Pzu2Z2zo+OXj2/++yd/+3cffxrh7Jd/Hb7+2hcef/T4/tnrR6tedP7mL67ff++jr/zC3Ufnnzy+etqt7iB08+WLn3z45IMnTx/cP3r7rbunR8xhiGQahVQDhX61QuvibGQYuJPSUVa8xYJpYOK+H7egSQRy453MRhHUlScgRKSu9uCivmMR8MIt75SLkFQ0EDJxx9RxkxZoAEwAZtngJPflzBIDgZmKWBITQy8EmsbNcLI8XizjfL2+TC+fXT14836aNwvuU1qLCJoSExkwhszUVFqaQ0RAVFP1Vkq0a/NdOXvhCJrzfRCYvRUoVfVLS9TUvyqCB9wt1/f90Wpxc3OzXq9FoohMKepGdKFD1xMYEQIFoFlV1Z35mjqEWQVVFn0/jGOapOvYTAwUjNryP9yfebj7F/bUhT1ek1uP1ApYQERvSZtEzCAE6kKo7avTjm9W0w4RQU33bdGM4xX/HUp6K1J3iyj2IoSNQFJstJx6zsGfDbUemu7ts2xfOd6nUwTYzY6LMXqWekxpvr7aTOP19XqxWPR9WC6XIQRVbyQye2ixH8I0TdfX13fu3HntjTcIYH2zAbS+77fb7WK16rru6OhojiMixhiPVkfD0M3z3C+GlOY333jj2bMnpGpxNFMjQOAUx9B1tOgFUCE9X09PX7x88uJyE015kRTW4yRIFDgYiUZgcv0p6miEUcWlmvPhCpDKebQY/1ZyESvyOPW1hSIFUK4GMdOu2VK7ZS2OVYnrVFz3sVXgVNUzdV0aibCIukuVS+cwLF6nEDJN7bQcysEfBTUFMTOkpN7DDIV1yaEbFipJJEoyZMqVvaAGatmrAQaUnbAIedhN8aoSQ0DSkr/uyk0VzxWjKnhbTPtM6tvveARFMar1F8MwmJn7j1qK/qwbvvIRe6KHS6/h/DMBqKcVrLfTdjut19vtOKWk9+93p6enaRzv3rk/LI+ePHu+Oj4hIgUMoVfA7ThfXl6P0zzPSYxSVLuenr+8Pjo5no0CyOlySZDmebtYeCcGMFBVQlRGJApE0IWFiFBKs8youcTDd5kga5tVO2RmEkMGzK04kJk9X0lEevJe0QpqEpOLVODgwS9Hc1VJKknFkAVhVri5vHn30eXffP/dD568wLDkWVLSZBYCIyGAJrMxppunzz59/GSc4/2HryXTqBJVKiZA6Z5SsbTqkQe0AI1qgfsdRz5rW+uXt2/1OWiAjd/tc7GlLsMJ3NM8fCgJ5AxlxH5YfOFLX/zFb/7yJx+9N86iCvOUFBDdHYnqMRZ073BO6cHAfbKdzl0jJN7Arz6+phemlHwwfat31XIha1T2yrQPJEW9qpY8qGqdSVb1NLrVxL+wr8xh3CA0s2k7j+O43W7dkqmbVRUbgDwfLz8uV0GX1nGUaz/aKNCe+sd7K6FSiT0MnScxOtw2m8319c04juO4OT0+Pj5Z3blzZ83h5OREYzKzvu9FTDE33ifmEHpmPjo5LYZr6fJQeMsBUhWU9AYfDpDaKd3MlIi7jt1jbpbzkgBfwfCt5OlUaFc9yvXD1iL19eh+olwlJcvz9zxz1Us/Ms74/dr1w/64KX9eRY+mEywS5c4udRSKNY2gHHk8m9T2o2oiBpAA3dAyswBkANB3tB31Zj2t+rDoyNSo8LHW6QAl+H9Ala78qypgNryrCe0Lw1AVoR0V4H4pTbub7k044APuXVEfv9RY6fkpAoY7z45fFd5648FisSBy1470fU8MKSWvoun6vusGMxMxjxS9+bCf5/nq6qbruvv375vZer3mXB2bBz56O6Ptdnt+fv6zJ3h0tLq+vooxVuXNGSgRqe6azuVNZVQ1NTXZSw9jtx0LvGBf9Bph6WTJBAgAq+OjvB9gXiE0SzKzRb9rf9weu5j+vq5TeUcFXP2ysrZ6iAiE3N4GELyeyrFAvOSGK97nfhzsrZ9RwURB1MDQAqARyaid0HE43U7Xj957TAAovO4uw9GDx5fnf/HdP717fDek4etf+aW/++nPGfWrX/3a3/zNX50+6P7xP/uNP/tPP92uU6AjGVbR5s04X3369OnFswd3V2+9fv/N1+5qmgiNqQMMAJgRjyJRdsMCmFmor9n1bBCIwXTPYdn3wT0xAEbknUHd0yOAQpz75OZGGgQcqGfuAjOzW0AOqy50iEbMGLgLBIiqwAiALjlExBAphDALoKTN+ho7O1vdmeDq8YdP3/ji692qW/RH0/TYkgAQKjIghRwdDhbE1ABUPGOH3HbLey3gCcyVVruuq75Aa/VpKH109wX2NE0+Kb4yGsfGxdAfHR11Hbvjf5qmGGdPbiHAjr2TZODA5LGOJIR58scwDH3fr6ctYK4KVNxz/Xp7WDg40Knj1U5iarrdZmTOzAJ9TFMfwmIYQgiM6HGcnLK1ZzkQEJgZNvc5kBPYOIPtloJS2Zlz6oOriMidgtoUqrUX3qbf1k6u4r+u4RY71n3G6nkUAADjOHq7tqFfOiZc3dxcr9eMeHp6ulotmFk1xTTNKQLhwwd3zWzczs+fPweA4+OT5XJ5cnISQri8esnMhtAvBtGIiGZCBPfu3bu6uloulz47/s6de5cXL5CHedqaWc/BYNH1vQ7DFOPzi/Wz5+cXL68FA3RDFBtTMuJFN1AXRESnPNBZRLbjWnFW1X4YMjK7FGGu7bBMS2zv1l5gCY7VX9svbx/W2OcHqZJ+qCoC19vX3eGcABxCiC13FREzrGlLdQddRiyXuaN68UlzsmQaBUDMVE00j0JVhjBQTyF0vZiKKSNZ1lGSiJh4phyZp1cXGiKP8BflL2CY62TlRoMx23mmrVFYzYxuKfG30b794MkXNTHBu9u7MtpifkVganS7g71oPzT7m41/QgTEwAzAgVhUwUjAppjS5dU8J0AK3aJbLO4+eDBO83vvf9CFPvQDc5eSvry4ury8vry6FkEFQu5NNSm+WG/O1uNqOSxCiKJEisUX4O5WMWM0CIGZ+9Bx6BBzXo/jTEeMiN4Pw9835PwoT2/LTS886sjIREwGMSZFLxGEOvvUSoFGMjUTQANQBUDmJBCG1frl5gc/e/SjD54+ezljd8zD8The+J5K7gaHoKQCKvD06fnLl1d37tyrUqCqaFX8VfxsIxUVn7FxaR0wsdsEWOnoAGFafQMOqRUOmKc1SvlnYWDh29n94ffKGipj13Vdx13XOe1cXFxKMqCgCobGTEwotUh4jjKIGqIBMmJgS7lZhZtknmrr291aZXWpoQt171rQWaPU1n9bcWz7qqDf35o5fjnugZnboSkCiKlX4Et2OO56QLiuvVwufUN9bIYrtLLfJbU0GjQASHPypEpuChcrPrRiMe+4i0z0M91zGsr+Zkctc8l3JaqZ6t6eoO/7s7OzrhsQMcYYVRAxj4AYFn3fD/2iDdlBca8c4FVdW20z1iJqVvm8G3wZ21jIcM/N6pAX2LWYrtuh6m1rd9Et2NcKqnONm3pRzvabQwYAsmLmvkW/0EARag/LrG4XPaDpT7Nrc5iNyZpNCsWIrZ+rNRia4WcxRoIec2tDMEIw7YxVIQlsttKTbZbxeMnotWImiH31GbWv3NJpy6v7rm/ho14dbei1Y8zBkcH2sxIODjPzlNSDg8rsd7sVKyYMje4EdZ3hX/zTP6AaEbLkF/mfPn/MDKcxTnEGcDfeyw5hdbwEgHHeuhHY972YupobZ7lZr50XnJ+frzcLtUQMbNh1jMBuWK7Xa19HpX9fN3S1pY+1iovH1hERms6iHvRzu9yn27W4Nc9zVHHOV1HfU1ZU2tElYAbuNK2yPzN6JMxNY3edjokYAEPIfTpahQB2ZeWHGyZ5zl4Bde3bYcF5hGctoQEiGAhT6PtF3EYBDdRfX2ye8vm9k7tnv/zOev08xZs/++s/u7s6+5d/9K8uN5ff+9GP/6vf/q3QDV/88tufPv3k3msn3/7Nb/zlX/x8vb7ckjAGCrAd0/ZqHuN2ji8vLqavf/WLHegYU0pxCNwPbKYpzQ4lLE6LujXO8hw4udO6EREtKHhaiIiYJhVDD2S5cRcCM6qJJ3T0fU8GzMSMTDkJEXI3WesCAXWIaGhiAqgc0Ewg96eNMSUxc5SJ09oE75yd9oQXz66vnq/fuvsmXL5UVdwZ7c5nS8035GahpmL90DETUCWzahD6PqY0+0jGWuTtcUvvNpNS0sIloQr44jyr+KCq87QNIXDokRN3YbFapjlO04hmEtM8J+/lwERIwYBEXLQyojHRMCzDdo4pd3L7rAMbxTGrtxUND2zI7OuqzLRca4hAHNC9uVRKRtOuGh4xJ3DvlKH2c/5mvxj67z3aa9sLDwRMPV7JWBGx3omaJCK7FZBpALWDUbvUfhgAIImZJROtlowCbsZxO02M5jmMwyInY9y5c2daTs/PL548eXJ1de0GITMP/YIYPTtUZCC0Yej6vr9z9zTJTIQhLMdxvHN6tr6+cbbPIfRDpykK2kWMNzebn3/0KIkZL41omtP1dha1MCy6rjPCqvrMKjHGaYyCucYJaxV0yfE+2JFWzBQg5F7Q7hDB0v6xhTYy7YM9/1T3xfYNkjbfXhIoqlpqTkbmrvYyVdWcvuy+aHBKTHWd7kb0dgvu1wdib7aLwObdq5DAbI7SLZi7niV631cwUDCwWPAJwWOEQAa5xBHd7ED2oEdU0SITrHH62r5u2hqEXJpeHCD57c+FR+3lp9Ru75UPH5DA55NVi/B+qCTX+Y2oagaQA+NIaIY0J9H1hs4v5mQPzpYPX3trPU4//+DDruseLJYp6vMXL589e77ebLfbSYyZAwIDdob4/OI6hCdHy+54cboZZ+jsZOiqHAGgnMnExBgAyDsVQ56PuMup60NJbSKs0oeIJCoioaKqms+4UUtEZmgCgqXnrWduICUQK6a/QW6dBsQxRTa8uF6/99Gnj55dz90d6o820bjMOgKAmMTMiBEoDMvV02fPHz959vrrDwHZm9cQUSyoW1Uraxr5tJSV4V/iigf2ob1KT3jl959zSUuAn3+T3SUGkJsWOdWrJ+cAopdp9n0fAgvY9ebmvQ8+WN9ssZjBYAgMZsaABKopbfTGIzld1lx3FFEttGma6kSEGqOoJNNaia3DpRbj1ZNbwFY3X33fOhjdzNzf5DCnphmsO0mxiUcB0gF4iXOB9GKxmKY8th4AiMCVECguLb+KOqoGYaVQatrMHGCF5WwUcEaHmDfC9Vt/BS+yVk1EtFwNiLbZ3KSUNAkiHh2dDMMgItQF9pZCPhVw6LpuV8tXd+02529lLt8a11RlaDvbvVhWVGJ3u5tUQqgZHFU/hzLPtt2sqmNbsQZr/TkAFGXN10ml5giwNKqAEm7ZR3hxZc/MW0C23jqrWus4jrvckFzjggWnsH6DSGag6tcaEPgYBiIkCoaMxKakmKZoMZqIdczm89VBKpJX2dryioqBWPxce+jRoDfs8/9MOAIAQG2KihoABO53++vQQkRC7HL9HZQ2e5nueFe4W4EPAEGmTSwqvviYQq+1S6nrOsKwnaftdoqzeHMqs+12u00p9d3g1ZkiMs83/shhWMYYLy4u/M/1ek3DwpsrugnObEPXnZ6ebjYbJ4zGIDQAyONOERHRyFUUQ6RaheK5euTRNcDFahHKlMyAu2iA9y2YJflvRkiAROTpUlKafdUROrWwssLIDyN0TgiEvpOeZOHWUZK0W5i7zWRnqXvqXr1/3Q9XuTIxGBKgsRmCGSgCmqphiuNqebLZmGynbhFklpvn649+9vHl6fLoaBg6Oz7D9fbq2csnX//y/W/+6te++NUvnj979Fu/81998ujjjz/+1GTxS7/81ofvP9s8HTUmAu6578ICQv/yGs+fviA4euP+6esPzrpOQLfJRibrBuO4qIiCpVBVRMzAx0ZXlg0+W08UQAHUW8KEUFKzTBEpBAodm6GZhY76viODXFTiU8XQNb/8JzIDQNTkqZxECOCtLNkH3RMZm6EJAaskNl6E5Wa8efn86p1vvCOzgbex4gFEJXkyZxU8qGVIPSMFIiTyOYqIeSaglc5gMU41tbroxCXlzNRAAHxkqqc0WEqJPWbmkkdNS688RyZTJMa+73UY+r5HhGm7ncdJk6AJAAiYIpgFBGTqwcQMh2G5XKb55gYQ4HYwEMyL9goWe869U0jRO20vpOAJvtlKzLm8CABJBAkpdNz1yMEAxAsGM96Xrokum508a3ywVUH2Q+sHJXzt+VUzqEdeYfa85Ahh1TCgyJIsiZvSrco8W4WjvWfF51bJKK8O0KhWIQSRnM7tXzN6IweNMaUUAbXvQ0csGs3MNN69e/fo6IgeOnXYZrP56KOPvvSlL/V9T4wh0GLRh0BJZjNbLRZ933vXkI45zXp07+T+vYcSp+XyKATCwHOcrq6uXl7eXF5ebiz0ix4Ut+N2TMqhA7A5RmJW1VnUUvT4uy+eQkgp1Uo8CgwAClZVqMri/H2rqsrMiNWza2UXdipXZgvNRKO6rdak2VQLp0WMVktAzM1pneWW1LucmwRALUetS/IUNTcOmSnG6KODVNUM1FDAQCCBUbJIiqaSkBA4dIRBii3EaOrT+LzKtiCAF72FQLnIR0vKU0GlimxmOxGL+40fDxSv20c9oX7w2iQqPZC9/56nRN5W4/7eO1eVrpIbAoMaABKSdwRARAVCMBERUM/DNMTL683VzbbnN4bl0cuXL0+Oz4jo5eX1ZjNeXl5eXV4nr3k3UAFkRCMz2IicX169vNk+vHNMYL1aVE3zXFAia6sIGKN4+76iiWIuKwBDNA5VIQOAXcCfoLQQVzUBBjACM+s4EBEYqqglk5KEAh0CIYAJmIhArnqi1dHx05frTx89eX55NQkKY1QSYChN8FFd3Ggox3Y7bbdbr6In5o56Zh63JVIHOdldb/mbWvj7YAdDMCivh4BE2jQZas8n2yOcek6l30pQ+fxbzWwPLr91mOcPm0/mMEMUA+z7HsBtchGglNJms/nwww87xb7v2XKZpSQUrH0cVBXGcQQgWqyg8SP70x2xpczqzApVIR+zPdOi/mp5EdKuuMXzlpPXL1s/IJU2J6rKwA189qrZzcpA8CaTRUTRpyUDdH3fD0MprZxxnn2iNZdBc2Y2LJZZWS1Pqa/TrrDdzfKKqVl/Ni85kIiM0xzTnO8fOnCX3zwyhr7vA+aWNiEEps6bk0NpNq4F5YipGj0K5l8CIVAO+XiiRQ1BEHKFpDNnCgyEIuLdC5AJEA/wNjtuMNT9qhKkGN1l1mWRO+1uUjOtoAFUZhJQ2irDrnYUAHYslIjqaDevkm5fwVlyfZwrflhyWbH0rXQ88YZeVqw4K/HDGCMyMfvwCRIyVYhiHUPgYKpx1jmpF0sJmBQ8V91r8QK33Bm+1Go01m8qYK0Jd+8M8s/wlbfyvYIaEU2lSvOqDmHOD7cikbOLEBHDi+fnADDP83a7VrDFYsFdUIWrm5t+sVDVy+v1OM5MHefgPl1eXoYQTk87Azw9PUa0Fy+eznMUkdWKVPXlyzFG6bpumhDZiyY9LK8i2ePrVJNhZJQ9lohulO4oB/PYiJ2XiKjnEEIgDEQ0rBZcNmBK0ZJUcAjsgsLVZVWJs+4T6s7DVxkTFhMfaLc9frSaaz0qU9BSxIyIxLsZd/WeO1Hh9xfVYJ67ZGQAKiaM5P1re+6gXxqIgco8Pfno/NNF981ffgdX4eju3c12/Tc//M6bb33hV/7BNz796CNGfHDv9eXq5P69N77/vR/GL53J/PInFzaLDiEgD/NWrtZ6MiyH/v57751v1xOaPbjfdyElnTqMXaAFHvtbeGooMCmSUsn5NiAkCp3n+TAzzLFj6kOo3AGK/h1C6Puu6xix98bEIeRCUFUFsEBI1BW+acDO0QzNXUP5ayxupBBC6MSIWRTR+n5ANZsVhZ89fj5tZxASkS7POoNUYhFEhJqHJNR9hEa6EBExFNN3LyFeGxd+xRBmNsAWtUQk2Y6k6yXed7RUI9AMMyJSYAJ0f4omkZhAFIBUNXCnYMidwByTdMNidWQv11sD8KkEhpWqAYBgT/MubrZWI/f4VuU4jn22M6IgV1AJUWDuiIJZHtpTDQc0j33vCMdJo9JpZTdouy+1TCUK+1OJmwVD5b8taQDkfrMt+VhxG1sxEQ8caVis0IO7tdvdKgTQxAnrCd7Z2E0UMzNFL7CwXEJgLgFnSWmaN+M4jeubm5sHDx6cntw5PT0V0XEcL64uj1+8OD4+RoKjo6WZrVarzVYBIMZ4fX25WCziNLsckhhfe/DgxcuXS6IoaYrjZoznl5fPXlzEGPujs5TSZtpsthNRCF2HIiJbT2eS3AwjZ6R0XSeNo7pibOb4CNqUbTg0PB8JAIjAZ3+20qt2l9nhz6uCEpWImu0jaAx421ngudmIVwDkljxlu3xjcU+92xn/lTxV1WxTHqRqoIoKaO4kJkwgGGAK2lN+k1QGFJlJLoQlJiAkRmQgIwJPaEfMrg5HPbUd9z7ArtvoVFXhA+C0qNiSDBTdx2/o3tVpmupPB7f6nKPqBwffM5J7PRHRZxuAERJ6sw1VUAUiBsIkEuPMoePQffLpo+PTMxF5+tFHL168iHNK7htiIjVABiNAErC+WySVZ88v7q66t+6uVHWaYmjqxMBRFHMmMNEO93xVHbGHcCsc/Cr/TNQRse+GNb5atALGQt0iYKbMhAH9hApD0Sg0ffr40Qcff3KzTkarBJwUsVvYVH0fSKUzNpimKF0fjo5Oqj7Q8eApVHUrzQ6poyWQihIHor8lzPbPfDl9XvzwAIsqLh1887nX5l+5CbI4zJlZJEKeKSqqenNzc7Y4yk9UVULVWVUBSTWFEFzixDh3XQ8ACHnSqUfS3CB097fnRddUxprN2FJEPeoJFaQFSgdZHjtGV/Wu9qVEBGSPJKu3KxufugvgtFRZN6WmTZYehX2bPwkAPe5067rjLcnXX7M6IbGcTM2jjQjdbPawJACsjpYi4g21HHQhK0tEBkaIycysGIREBMysRm3MrVVWqTTXyeLNDUJI7bIPxGjVunfMqgmGH0hVPeyfn+HcAsQhp43TsG495npUc9Mupy/liI0deBAAlTAws9Z8D9zBP9vGpcFMPVJUA/Xif6/WrkWDLWPXJqQWo7J37PJUTlXPIxAOxiCC23EeR1p2rjDkpNZ6twr8Fl3rv7afXNDS74FxUXHSbA9jW+yqaGmNPyXJ3liO8qA9e7VsliFiSPMYQjCJPpWrO+n6vt9O42azEZE5pZfPX26mebFY+AS266t5HMeTkxPC6eLFzcVyQ0SPHz9ZLBZXV1fMjEgvX740M2+/ni6vfX2LxdD3/TjO2+324uJihw2KsBtBj4p7hVvQWPxVNey6buiXTqiTzGaeS6IxRisBH39bd7ua2SzpAAsz63dSSeCKUYWpllmIhD79KNekVnZgJd5d3Vp7bN35he1KDqpLqf5aeR8SkIiRGKiP5EyahmERYzTRPnRzUgRcLlZm8uLZxbs/o2996ys319OwOJrS9D/+hz/+1jd+FSR8+a13nj27+M7f/O1v/Mavf/Htt549++jtLx6P29V7P//o+mrDq8VycRwSx2jbzfZk0X/4wZOL80dfe+fBV750enRsZmmO8c7izR0mys4fBgDzPHuntZrXxMxBd+EUKG4YETk7O0P0mTOBg0NGEdGng5gZFh9MGTWR0/3RO1UienB4mmaRHbSZQYyYMcW0PDtlojGpCTx7cr652YbQey2flfyjCm1NkTlURCqkpvVFACBZqhkOUvrHeturqnMj7xkhjhUtGR/ggIh4HUIIQcCmMTFi13XTXNr3DxSnOU2zAagCL3oQQTYzS0n7VVgsVkQkBmBouzhh7uhSJjhbCROamZjHVf0EAK+Ry0y87T2aKyENEIg7YubQA7KopDpobsfXdgp9rUlouRIAiBk3g+kdxjVpED9XU4Gy+vK4vVBe/VybE7QnIyLznrcFGibY3gSKqGusl9qYx4oMa/3xAAClckIBKpzz06dpe319PY7j3TvbzWbDHFR1tTr+9NNP7927B2gid0IgnxSPiHGap2l64403CHiapjsnd2KMDx+8fnm9psDbGK/Wm5dX188vr64222EYxknW6804jqqqOqabG0+5d0LzcT5VHhDRPCdE6vsFFMWLiLoysNgNANcq6q7dhhvktumv6HlITUTC9vWnVp7Vw7uf+0qqEAIAbdyfdV+qAkGls3yRWFZ5bHWdeiP7oyM279RnYARgpKoImhLECEhqkFJKUZKXyKY0q5JqbmTJITB3QJbSDLVSS01EJakKKEj1c2kTGKywskbdtGLUtvA8OHBfWUQ1MkjTLHNkQFX1iQuUk5p2UbO8L6/uHfyZ1mOS7H71FrB+Q4cldR0jxjhNMXbQEYUw4GKxWC6XH3/88Scff2re3kZsu91y12eiUDRUQgJvD4pqSR8/fXLS68PTL1mglNLZ2Vnfh3meYxQVUSAs7RydTqswdeHS932UXWpZPfzFCkIagbYKLgCUAMIOf1TNxxgiIjKB5AKNTz78+KMPP3l2Ps4JuqNBuR8TSEwnfd+qU4iYG2MirI7unp6eutnsvShijMyrA34CDRtsX8E/dP2u9eUr9YSDC/GWLdFe217Y+l9sH0k+5/AzD6xBAJimqe+D82rRrClWtbLe3wmZOZQ2+pyizvPcdXNM6ulTZlbSLMk/uzSpWnIb9xCVVh1v9eAWRBVKrTjwD5VX++J5v4FKPdxHDAVziHaZCNa4z/p+kVsaFs2bSv52B9z3/UHjRyuD3Rs7xCONe7OX6gtGqc2H3ELjqkPO8zhN02YzTtN0cnJyfHycUjJZezWfG4TOw0E04CK3vfGpksx9H0LfSWotIsU9S2y3zsYg3NFai73ahE+qGBWRsI+N+TWNvGl2BVqJoOzN4ahPaZmVqnocsCbJA+xMxLqV1UKrmOBP8TFvxUCCugARn0AJIrmjdYoaY7RS7uicp2bqVe9qrSr0BTuUmAIG9GAIIoNBSkkDx5TGMY1blOWCu7ySir3VElbVUCZzYGPOHJgkLe2XaVi75FJfVRkARtV+qWCsYPc7aPERt/pY3jjbgVRz1Une0DCFNzYKysonDzFwHE6u5unZ82dPXnSGqgpTOhpHjtdR9VLNUlIierK9Vr1UVdXHZY8vG4aFAIiqgD0lMwPjXs02syAH5jBK5JCXrrgX6pVZfAuw1Np56pqq9n3vY7KrcZUsJVFLmSYhsECOPgEjAAuCxN0goJb173mhEARMTEWd32WnCxiBmpqYKBEFYi6xyjTHjCpYwydmYBYVA6EHQ/2lsg6JTLkqL5mC5WYP2AkAGCJZP4D37xEREZxDUF54FrhgwnmrRPxL48Nn3336XLsv/eYXrmmbAmzC/OFf/ck/+c3fvXvv+D/86b+zy6vXF93Ja3devHn/0fn8+79+PMSP/u5n0PcvpnRtR3fnCbHrNsmgWz7fjC/evXiytq9+8fU3Hr6xWoSNzia67KkTE5lOjo6Y+fn5y2F1ZMSA5g1Q0CJAJItDWGGXa+1SShg6Xi2oJNljrg/pvYPWOI7cBwBiYGYmBvCULNO+7xQMEBShAxLTGOMsCZbHMM4M22MY79MY0K4V4xypX80Kahxx6MLJdPXi5v0NGy+WSCTr8fp4OFktj+YxpqhA1vedaIxxVE1gaJEU1AL2x8M4jjGmYRj6bmlKIoIQutD5whCCqc0TxNnDkkREAQdFjbMBIEInIqqmhCHkWTee97VYDNs0IyMRK6Km2cwwBEQ8OjnWJOI98AhgCBSCdnwFIzCsdT3algdmnnHernqZxgRIqhjNy1wYcghrguZAyCFDK+2MbV8HdUdaYeguBVFEiSF0TKxJRjMDBCQTSczefA/cFVcKe3M0w8oIZm8O1nMQEywzDQnZJ4ggUdeFahW0AjjNiRpfJu/quXO/tb6vxWzGHFKKLX+sKpRqrGyhnlDdN/595ZitDV9Zf5WX1thXhiYic5TM0BEAaI4GQEgLUZujMPfTqE+fPn/27AVzHpQ3DL1ePEeyF5cv7l/djSpnZ2dJZI54dnYmGs7uPXz+/Plkto3x/PplWPTb7fb6+vrl85fPnz+fb6YQKU7TZlqLiGr2Z/kyttutEWLgLrCIiLcWYFKE4AX0KY+d7jggojfvtQLhCnxmjkm7rnd3WN0aM+HQFwgDAhKDqSZJUMJ0tbt6lWqtltxqIZkzlAK5LAgVAIiLnz6J16mGrtvds97KLI+m8A314cjjGOf5muSs80l6IlFnMzHTCArEprxOOnRLm3C1PAK1pDzjMRAh7zzlOfCLpAJpFk/NyIsnZOzMLO3mEuWhnuVfJ6vcSsb1HNg/Wunj37TYy6UhoapeX19noihuqbpT0GjAVd4fIC3sx4vK/Qd0JFfB0iAa0QgUKQIAByMDwCxJF6vlZty+vLr8+MkTh/Z2Mw3LYxFJgACGwaWlGgCBbaFDWkxI33lqT9LFN7/01pcfPozrzWq9ubPEI5QpzgCivIq80BA63ey0JSZEE0vbyWeCORjVzJJq1w3c9RpmA29ckaKqJey6rgsDoKU0mykF7oeQZE5znCGx3JGtLjh1KL03H4U+8erjl9ff+0TO8RjuHK9FQacO1yw6KQ99WCwWDBZjlHk01R40zjHAnQCWZpm2M2MvCSUGDOZtmWC/lizrWyULDku1hW4VfXRBYOKsn4gkKt16CbwrAYCoqULYxU/qVlZ3ZP0GavuZImUrl8t68X7Rx05Td9TFna7pPxXGKMtlTwmGsLh7eo8hQOjGJAASQuio68PSi3ECr7ab666DxWIxTfHq6rLvuwcPHsxzcgbowf/T01NEfHn5ggOenL45xxEAuq6LUbrAMU6kEHwZiNSHrUSTyNgReDd4NjP0DCpkFUXaqwugMhWwAqR+RsTlcmk4IyKAudxnAvJwIioScec1EOLgYSKD0SCPVlITA/Oc5X7AlMDM+p7NKOWx1Ym7PISDgAIFb2asatZulqpKbqesSN4ZS3cTXzGlNM8xJb24uIwx3rlzp+/7eZ4XiwVql40fYqqV4ZhMlQIHymFMf+u0jf2RtxMzzwhREVHvuqHEbJCIGVFFU89skLx3Y2ZQZTADETESqCFhcOQ0cCY59KG+l7t1XFkmBkgKqO4nSvPsW9NzX3yIaF7praaSsdDMBITImBFJRecuoEh2RocQEEg0eryBchW0D2cHtYikJaFwZ/AQidquJtMMiXAcp3Faq8XgLXySjJutv+kwDBgADRjJkNwp4ZSlasiIBMZgBjkdFBfUB0Rcb9fQY0B7frlZrvoHR6fztF5iZCNGFDAwRSRi7vt+jlNtr5rSrLlnDCuQv1p5Cy2O0VAj6lliGiDmMRIAeyFEVR2GoQYeagGtqoYh242SlHYKjyFiF9xTj6kocogYfDjE1fX1xcXFy+sbQFSk7TiO42QIUpJ3K34rEuYE36whOv6J3TJ2waF6KKWqPKuyzc/n0noVSmVtS/Duq6vRBte5VRWausHWt4SNd+HgQa3CV//UUkjTnlzzqaypcj5wnByw6eqeqd/gvgPJmiLmVjm2JmrRagytpFfVy8vLsOgef/IYTuVL3357DtPN5cX9o9OXF1cA9JWvffX1u/eR6Qc/+vHJyck3fukX/+O//8tv/cavJfvRkyfXpLhdX6eZAw0msBoGCrCdrz/89NE4Xc/yha98+e2zfqDeQJUDL7o+pjhN03I1KCgDavAhe0gQCIwYKY89t50jrWRZwL4Sg8WpE5CMDclATcF9MATe2wHcuM4b1BEnhI5w6ELf933XDVFmoz7iJIKlksG52OXl5XI5AOGU4nE/mFlMs+eAZSzNjYKCoYLDE8QEKyZgkxvcbtwB3laChEYn8AmNfn4ocxoBoKbKEJEhpxRFJk1pmqZAuZGXaO5bQ2UabwhhsVhgSfTv+36eXCfdIXPRUF/tEj5QH+vReg33FcfDkccHT2kf196kpSzYNz7rHVxytKpJfVarzkLjAmxh2N5fytiY9j6I+BmvCz4VDYuDsyW3+vr1JgfPahWyzwJOPk1JM97mhI0Y53mekUxVp2mKUTx39N7ZAw9BMPP19bV3Y/YpFjc3N8+fP7+4uFiv197MQFXHcbLG6CrqH6VmLEddhqrWYsn2jSqraV/Qr/XannY7CpyzLa1l6Ihf7t5WKIh9cPJtyNQCAahJXE2KRF1VJcDqxWyxqO7awcHM2+02WZ5XkdSdBYjkzesB1YREBFMSNFPxxDdqX7M+rnRL3oHo9qZ/DsV9/nH7dfyz5g4A4MXqkAEFsemzmrcye3p2lHJASvVoqaOl8R39FnHZEoIfIoLIiuTFjZUlMrMWz703Kq5b4HQdQW9ubh49fXbM6ej+SgWjIIKF0GHXRTOJCcPOU5BJm/J9iqnfMTOikEhg7IgNgteuo1GcZg+MI/Bi6Pq+N1NPiSeivl+gxjkqgyhCMlFJgBTNLrc3T8+f32w3UYJxcqc2eAPG0JNHHiXFGFV84JUi4vHx8Wq1evz0fBxHEUEfLXALM1uww74KhIgu2tyvlAwBvMkvmB16T9pQ8G3u3XKndt9l3792gHK3ZQSXgeN1hZUxtk60oRz1/LprrhRtN1Oeu9MvuHSI8JGDfkkrN60pH6gLc02tlpDVp9Q1VyTx4iBEVlWj3bJpP3gCjTjGoqol3d3K02VL4Ei8bqKSgGo0w9p4vPKodkOpiUZmgOqh16btuwZVmy8JtITetifU980a+NVVlQs+sAoRa1+MSmu+ALe3c+CgcQkd6CotFrkW3QI2v9d+tmR9UA0ftSwI971+9VZE5AgAxeEIOW2HN+PcflO3/mB59c+UdnN9/IaAiiVYXbZ1p7DFzWRFOefS2bVFJBFt3dAN7ml904qZ0Mim+o2IpEytaoETJjLolwwGySeiRYhRUhQEDoEAQCVHOkTEe4NUPunYUYHpttkB8puZT0Q8QC0zW/RDC7pKxa3VU1EOmkZNPoCQSs5tDYTW3fdrw1//1V9sN2NSiZK2UzQg6oIaUmAVUwQiHxBbDCfbUWyrSWBztBuMGZRW4rkOjz31WkuvvLoZdZNCCKvVarFYeMqvlXyDVr85wH7b1/DaFbYwaozyXXSiAhSKVVZxqIVafc2K2fXmTva4bxAerPD2Ufny7Utadu9/Lfvl+c354w+evv6lN5b3FnMaOIWPP/j4xw9+8s2vvMNiF8/PHz58+Oz5MxX6yq984/vf/cGv/tav69/8+KP3zxeL0+txWi1W42ZCJVEFpIj0+PnN9fh3n754+Y9/5ev3z06Jk5r0i27ebuZ5Xi2PxnEOgRCQiBiQkDr2uQ2enQtdSV3rOh6GOg0TzIy9EQ9B3zEzGgH6HDHwQgLqui4HbUBQMZoSOOZR8OmRgVd9t+i7KeGEtIgYE1ruoSfTNHLg8xcv3vrCG8vl8PLFmpboSDQshjwno5CBaa6LEjAS0Vlbm6RGkG7js6Nc7WZWVcmWjF1Lrkxnnmd3lOSU1NKmDAQiyGLovFckGSsYAiGQqOZkDmZvV+CD7Dbr0YmJEMt02F0PzNvH5yBbK64qvgXu3WldXwr2ibp+T03+douuWTzgLoO6BV1rCrb4zLSTFtAYBpVhHSy4noBNy5n2vawmohQtpObkOOFX/1l9I7wVcqzLPuByLbXmPwiBUMFMzExAJAlxSKoa5skl2fXNzeXVlcv4nj6g0kw8xjgMwzRNrnhtt9v1eu3aJxS2noxuw01xb39bdtHuRcXDCpkDrG45WwvnFuAZzo3Eqgpc/dxCsq4qQ1jTweMynpSULdtX/lT3J2R+BiZjqavRpDUhTfPc2oBGmlTQ0FSVRCxFRQAXHcQ7eVw8/Q63PVDX9z0gis8/PoseYV+KHwgUaDxN7U7dvvb2T7cX0N4W962LV37eESDiNEVEDCHM88zUYTEAmLtsCJYoaMYZUUJMKgKwGedPPn3Uy3hn+EK3pGRAwITZ8QYACKU3Y0089v9wh3vMXGqYfbOQmQkQjUxUY06f0Y67rvM2hknFT0M0ieIqR1JJIqHvxlE+eXrx6OmL9ZQ0DJ4F5DyfwZDIJ5FonFUVsl/bEPHevXvHx8ef/u13N5uNqoYud9WqQDvA6paIdq/T5di7gFjyb/eKgnY5F06z+zdv9/E2jiGi3Ioc1gtbqjzgBpWT7+5TyrNTSt4n2fWuyjYt52TmiaBVlq1WuRzGVdiuW9R7Uu22b+Qd/9vYBQCIiIfLWvSu2l1lC4RuCBEAWCkwqeoZFs9OC4fdy+46n+Uwfg1RQMMt60Nz6fi+QXibWnc02MRawUVGObnVAyvPH2VnILl3w8cUX11d+W09CW4YBgAIIXR0aMDXhVV8qKBoFwz7bNn3qEWhyijsViVby3itUfURUaRkHu3rpdXZWuNaLV45tZkZQDvj/pDF4b6D0nNQORwuvoVGZZtYnC+aE/up4pKUrBZEDBykyViuz6rbdJvh+9Ue5/eZdvVyBBATEZ3mmBJ0fUeeEsmGsiM6KzMRSlM05tKg8QCv6koWi0VmgOVozbwKrrrv9QWtMZGw1NZaKREs57drk2qgElFI07hdXyWzYbE8PV4pUAJMYgoGnDFLVRWprq9Qzl5dY8XyA6pm2JNA1igomYRM2m+05MJ6+/thGLybnM/qbffSXyDtTcfeEWdr70HDQyt+131V3fNJtydYqTeti283r6XJFhQHz/JfaxaQH60FUp91wAdtvxODgzSEEKd01B3P1/O733nva9965/UHb223l90wvPv+e//wN3/j0QcfPXrx5Ne/9auvXTx///333/iFL773+JPnjy6+9o2vgvLPfvjkjfv3ry9vCIjYiMjCAijdbDY3Vwr99gc/ff/r77zz2r0TM5iihb5bwDKlGDpkQAAyRQQl89kI5CPcALCmajBTCBRjxOz8JmYsvQRYTNEAc9cQdgZPbiASMgZRhQRRozOVoMnI+g4XPS+HMAoNYEOPsBi2SFNMUeZpmk5Xy/V6bah37989P1+LpZ56ImPmWWZEpFzCyEQgZmoglkyMNOcnt3h1e5dbNlERrKVP3RfMO17MJgkjCKKmKCJW2ZAZxiheEqCqzObGtpmpmehOcVmtVqG/ij6lMjfL3dM//ssPzM3EStfQPYtoT104QOzbAqNl6DtugO2z9vxVrTyr/AiLTUtlelVLIC3HqCy7FWb1QdZINWvUi5oZaPtir92j1iMgsucqguJ1lqbJQX01oozXpW1PiXpFBNQYnZZBBDabcbudiMhHb1YgcJnv7JYJFJ2YiNS8TpRaOZ6NLAREgtxiAXLoCMByY6YMhKpVw74h3Z5QPfT70N4ViBamt9sU94nUDuwt82xRIj9FXxGGrQ86IDrVnY+jvLTlN1N3AAERghEhAhFTUJ4AFHw6KOTuIwAUmAEIwEzJlN3XnGQnoV1L2NXKvsqH2BJ4izbta97+/dXfNiBq6ctxr8U3h4NrhO21FcoHEP6so6iJhyqOL/02mTuxXG/Ws6ST4zOwWmTF5g0VgADMdc4qsmOahn5ZAILX6+3Hj+LJwIsvvz70HTPO82wSAYkCdQgd9wDgcz4kN9gQAAjEaKBJkL3AVNVs1qkbiEC46/rQaSepBGQ8J999WIjIFLxeqQ9ejQGIgQJZWFxPVx8/Pr/azApModPSogxVzCzFqBJTSiAJEb2lsm/ivXv3mPmjjz7y2WUHnLYVEC1soVaith4WRFBSK9Fy2kl5NFBV91EQUbK9+x9gy+0FtGz8YDdvU+XBN1Y7eSDO81wXv1wu796964MiPHlBSz3SARrXQlAsNThVGa1kRaVb43a7PT4+hkbjOsDtemG9SR5mWRiGK/pQBFYrRxoGXnuDGwBQCUqr1g0qPNB26d9YBDrt14DVLb4dZSqwNuKcn+JYjUWdhJI9x5wbvSAiKrod6F3Bpmlyg9Bv1g7UqS9IJfpXUa5995YJICIBOss0M1BDA/IxBGpoYP6NI5UWXt8kzrTY2+KeVRNur/x+T19qjat2X1ruWjNNbKfo7vHeerRAtkPP405+dV1Xt8ZKj3cRaaVm3T4oGcVYDNeqfr+SdqwGM2nvrR3bHVVS0gl1s57Xx/FOtxrnTYPVWTCbmSn4f4iEQEW+YQivdnxXA6oqlvknO2Q4lR+2pqPVensCRDQFoJ0NbGYirU4FVeyGt99+7fwZX92sk6Y4eScBimpzFB8Yp0hmapZcFEnpC1dpEorh1LLISts97zUYLDsn9QUQc1dmJ07f49VqtVzmtjEebPUitBpnqFirtnPktzhawXobfI4HlXlVtlUZQevCaVG/3TbaDynUDy0E6sl1m21fmWhpo67zgDDaXQeAOU3jdlyerhj7p+8+P+mPH6zung13795d/eE/+d2r7fXf/vS7v/Dldy7XN0npK1/9xemMX//im2endzcvNsswBMGr5+ujJR0f3d1up5dXG0AdhoEXKwAbbXj3g6dTol/9xlffvHcyyUSIoe+38XroeqJgCiJiyYnM1F0LBkTkfcCVFREDseJuoDC6IwcAkTQXrBPnXqAComDuOyBCQjYDtETJhRAqI1CgxcCr5TCpzGZdTMDdFFXTLBpVNQxdlDTP8/3X7v785x9PaVoulmHINmopACPvuI5et2pKBgOG1jvgOMlNr6AWf8xsHMcaBG6zU8zmg/3K6GRYWvm5wpqcifR9n8RMU+gUEdXUEorM3OVCuErYWjA2qZmVZFdnrLRvgf2XHVZKobDUnfvrqJiaQMN96gm30RUaedneuS1Sr6RaaaFSQaWdknN+eJ9qHUFT/duST0tH7WLqI6yoOxWSBymO7WbVC9s0gdsGfz1tB5M8fTJAdjnXMjJUM0JiKlkPqiLaNyMBXd9CJhERUx+InldoCghAHqvYhz8h7BvDUEM2TfVda4e3zBn3/ce8PwcJd4bZ4f5WMjngcgeMq/3XzNR2vLGtiMDGx0xNM4ZixuxxS2sazLabgm0r7dxdCX2YRSA2Q1A24yjZoEwxL9vlyIE12KLfAarXb9qt/593vPKe8CrPy+fcARrFpT3/YJGff6t6OTaa39XlzWY9Pnz4sO97r5xkNkQyBUTwIdp+lec+okYmkMzhCbi/3G7f++TJvbPjxXCn64YEyVIMARkNIRENRITkyEPV+6CqHnTyRUHGrmQSfAIIAQYkJXQXTEFvRMSOA4fgIyP7rrMEAmYdEy0mCefX20/PL0cB4w5DZ4Z9oL7v0zSayhxn0yQiDDveYmZd1927d2+9Xn/yySctbiO9ImJ/sIktrcVaxsxEGlTVUAEISv5tHbeMiIA+4ervOSqd/r2n2S11onWo+UGl1sOJi5lPTk7u3r3rOlKd9QIARDlQ4w0emTkl9d5ODU3l5LoKjRCC50Fst9ujoyMraeS2r/YcgLH93otIVT23K8PTH1rFdK0LqHfOHkb2u6GncBAR1lF35i7dnUt0p5HuM3xqIjAHT7nNDFEb6eYyqMvpms5wvJXoOI7TNLllaGar1UpVvaUTl+44zEy26xeCjbxu5V1FTiIC3D26/bVVb6jxZVcG3lrXHqaDfRHglwzdrsNWy7orHCob97s1TgRTVUk7qXSbgnA/M6ikgL7C1VsPb5ripkF7nwOztioPt/famuT5unHVHkuaPGIApZANgJAsMRMTgUFSUrlej0frcbVaaZp3oPC39Aw1TVDMUQDwYRkGFPbmZO60ppqkBk1DHdp3v7aHo8eBTc7M6iwF96Rz+7hqyHiH4XDneNnRg+OTo6vrm8v1BozUQJMqAhMZIQJKEeF1v7XE1toH1G1u3zA1ufJevAAAZi6/d1tVLz8+Pl4sFqvVahgGxOxNUdW+7w+g4FfJfmVVvQ80ZZeVMqGZtmnWVNA2CbUtjGDfX9KSB5cp7e1ibN99cvDcVkWzRilpqaJFU9sXPNk2FmVmjdphv7Ljpx+8OFl8+gu/9qUYrVsuvvd33//Tv/7zr//SNxb3jp/9/ONf+sVf/s7jHz49f/pPf+8fv//D9+er7e/9wW/96X/4izjinbunKuENpJjg8vL66uVlmuL1deq5//DRxZ07Fw8f3B+YoqwHhGHoGJEIjRC0zo5CFUVSImIkNCXArnN9FxZ9V9dsZjGqmgGCMaoaohFBz2SWrbOgkdAAjQwDsXEGfh9CUmOjcdGvVjqZjCD9bGOMJurl5uToznC1Wb927+HqhNI2CqSeB63oZwjZOYZA6OWLZUrsHmfH4jStqN4SaotgLZ5XzG9Zj6O6lckZBCgi8zzLPDkBdV3HPHAIBjKnpKoM2oERURh66gIAeEclZoaYVBXyjDQ3CujvUQpedRQOmLt37HFGyY2e29e8fbTMpQqAIgB3K2qdtZVVHdzzgA/Wf1t5Q02BhKcU4i3nqLf3hEaZaMVVuwZVdR/BgTSqWw9N7YQV52L7xL0XcXi53xmw9oBVBUAwNCnjbhW88/5YdZcQwmaz8XLwVmOonHqXfFPBRVmdqTa27YP0oPCASje5Fo2x0Sbrq1mjRSHuxjG1ljlkpdDTxry6nRChpggBkBULFhEBjGhv1FJdW5VYB2KsdbTVl3KFrN6hMlstDf0AgJmQSrKrkYh/0CRg6v0LLKa9PW1Vkxb9WjxsWfH/EodHn6qKVll921emBZ3u25O3ZSI0+ImI1RNse4IY4FXim5AuL6+fP794/fU3j49Pnz9/bgaqFroOraA4MiIgGpCZ2QJ6IkKISXSWxEQphRfX2/c+eTYEZr7TU08AAJZSBLBRytxwrg6Pko3myO+qByARh+Djzs1E1TdCVEBLxD53JgRiEElJVYBZlCElU+yMhudX60+evDy/mUboBImMwJAwMGDU3M/OCsru9lrl5OTk/v37T58+ffbsWeUVLWUd0FFVoA+4SkrCzD56E8BHB6HruABoat5NGwHVBJTM9rwwrRrQ7tQBor7yknpVi8zaVPdVqndeJCI+WeHOnTunp6dQNOBpmszMQzHlWu8f3qc0juNYdJscdqjY6mtg5sViMY7jPGU7sOI27YePXglkgNwZ229cAwlVOvvJzvf8/pXpAeTUgUZPw8I1s5VZyaglq0og0PCHyqnaRZYkX1+LS/CcblN/QiJAFUnTPG5j8mDpdrt19wcRuR3oybrej5rKgD6ZY7vXWirfauZLNQ79rZOlljfWpVYp3yq3la/Wcw4cfC0m19tWFlpP8ChxPafVKIhAxHUA8QG/pru9Ju/eDZqDabDLQKxLVcsVARVnYN+t2exRxjdmdmSrQKtCvL5+/dC+ePuvH74XzGiEUCPAmKGH4C4HmqNttpIS9YHr8igLZ3TFL1t04JEncQtIdY8tt/RewxKtMe89z17J9qH0GtW2XhcNcXfbqjZALuLbc2eISDg7XZ2eHt+P8uL6+vRmG4Ev19vp6bmJl/2Aj44Xy8QzhA5LKXkL0DaHu6XwHLzOB1a5RrtUB8EmgHt8fFzrX93ud33CG6/LfhGtv8se77jFLg/4Y1WMWo2zJXgtqXrtg16FfHu3bZ91AIF6czwQ7Y0xCY3f4uDLeqtMCQEJep0BlVdh2FxdP3r/8XAU3vqFe/+3/+f/Y3mMy3snf/6d//S//pf/m3d+6RuPnj370Q++9/L5k5//9Kdf/+ovnH/86PzRi299+xs//MG7F5sX9x++9fYXvrrdpKvv/HDcyqo/GkIHNN+M1x8+evHGgxfvvHWHkOY0LUIwTQRJDTz3gAlMAcwQLQRCtBgns8A8ELGIDymCMvlPAXKzNdMcVWNAtQRqhEhMhIOCgbsWDRFRkJJKb0CiaLTobLHUhUoXE3OS9WiKISCqilhK6fhsFSVasJP7xxefXs1pChxMkNmnZil6jNB3gRAVDHLhspP9bXxud9CPrsvhQcfMdvexcUNUhojg3FOZgYiJGACjJBvHELyNFpth/S8mcXHLnKfr+hiJEALRBGAGWoqdcsbRK48DllGPWyx+x2Qz6hq1ZmbFwBar2xu2Dh1VRcaW7VY0Lu+yh9UAHht+RYJuBXv75yvfq11VvVUrDCpLbTcUGkLef8SOhPmzC+TqVZabu0B7OQCqQtdln64k17rQzAJ3FRRdNzDHYVg6t1dVvxvRrsOWxsaFBAa2tyOaw5J7KNrK48pVWnZdoUS3MmnLzal5wb1sIq8oq44e/9WVxQapWia8w4fbQv3gkgM3f/vo1qGAxaqH4pjLd5Bi9QRKST1eGxigdEcTI0S0XU7XgRb4CtQ6eJ0DNLt9fBbdHSDq7t330ekAIO2++Afa3+v6ayPp9x53sJ6K/XWXWwUREW9ubs7Pz7/5zW8eHR2/eHGBCHOMxB2A8zpG9GwHRGQiJFTT3JgrzmIETCECfPz0RRcIAF6/e7TqujhvCGS1XI4xpxx7sM9MCRAZqHgHQgiSsgvMe8x4kCog1V67kodImXm741h2lHiOWw59lJSUZ5FPnr54//Gz9ayRSSgA5OZP8zxO09gR73TmxmBGxAcPHty7d++DDz64urpyg+c2m9L98PJtfL6NGwXOjIimoqrUjPpUVYNDV1cljdvo8Qp0atD19pfYxOHd3qgcwL/v+361Wp2cnHj1IDZ1tvOcah8pN7gWi9U0xWIlHkbvzUwk++92LMh8bmEs6sErckzKta7REhEBuhG7F95p4e8OlMoioG2OdStSVNXIPGl599AdtCpHPSAr3K/fQ0TIYwzMTHxehec5USlpExEfdRNjnOdxs509C1fKVANvkOGLdIOw7pc2kbHq/Kpk0lqnLW5oOayRYu6F8Tu76uLVm4f2wz7qtlyo5HekCpnKOqrC/Epsr3GgsrydhCqn+T4aUaj6OTRUVlfYkptlv16rUey5HX2RKeX5TNB02K7wpJIOVh2gLbEgIoeQsUAFA/Yh1CKLXH0DnZFIStOsc8JF510M3TFsAEDQLDiZWRJ3QoHTV2zRtS6g2sC4b2swviITql12a0OamWXa2St/0FJe2HCJbO+EabMxwmmWNE/L5eLu0Qn1m/OLlxbFM8iYOfRDAPDaa2tqE1sPStUPDvgCYIBdC4Sc3Y8YBLylLyCRZ057NW2MsyFOMVYPLhERc/adu0eayBC90ICbrlmwL0Ftx+x3KoU0vUDqlxX09S1KiUIuqW+zVQ82CfePFibtetpCz5az1Fu1alx98ZYA/JJZJxMI2EsUTTp0Cxn13Z/+/OT14WjZffz08cPX737/3R/r/8Df/pVvnz+9gHnGWT798IPf+da3v/H1Lz978oh4+Nq33vmPf/aX7//w0Xd/8hPSYbwYT/qzk+FkfXkDx2QwPDm/+PmHHz+4M9xb0jxHNuwQjAwUABUJfA6VKXqfTFVNMSazvusMUVLyULgrWabqrL0LQZJ4/N1A0rzrvuUeSjEFMwJSNiIioZBU0AARmIg7Cozk2acuGLySAeY0LZf3IcBom9Xp8uL8akpzjwMBdqGXqNkNbYDeJBdBUQGwFkh4zw/fCyzF0C3+FMaXd1lKayI/UhLaryrErAezikYVAmHOXdocH4JlREomSZKBIZNZiimJatd12TFNRGah73hiIjQkBczOKSLYr6Ft2fErv/dFtq/mDY7bZXuxUO3udYDwB3KopXcRCVyGOzceTcQD0fvqrLl28VUJa3N1EA+1pZbYqyypS8IiKdtLqGQE1IW15mKTH7WXOVy/b9/CzLxGC/YKTsByJ6HOm3OkNGPRd8WSSQ4Dhr4TUyA0BCB0fxlh7VsrkqTHDhEVc8XH3vveymiwxjuOTcfOA0WtZU0tYOsJtovH7l1YzbDqKXNVoAKq3VMrQacKw+qDoybiVzeo9c0d4Eb9FRpB6GvrApetT2YoIghsPokOgADNuEzvRBfhLSY0+7iXg1ef1b7L/xKH9xOC/cxqKOpXe2R4Nim+7VLbb1oaaTB/J5Ios6bDxGAimuf56up6uVw5P4SSGioilptkAnPHzCFk+KSUEKnvwjSnOVkfiLC72Gy7Z5fDMCxXA3MXZ2GYl8tlQTcmQkADYEB3V5fSPsQU40FAWFU9qmYgCLz3moSmGlWIqO/6eHNl1M2Ko9nG9PHL9eOX660RUg8QkIN78pLM0zRRPxgwwI4dofcyAzg7Ozs+Pr68vJym6fj4WETc89Ia8O1RcfsAYZDy3E9RVZXcO8zfCwhAskaD5Dmk9dUOHnF7l3eb2uBqe7Rf1jtQaa86DINnJ3rWYh0lX5uLuvlXdJIdI2XKk5m9y8M8z42fYW/LtOnnsdN2FF5pP1QbRsrAUiw+C8sv6IYNMqPLfa8dKzYqeOVd4YGICKoqsTRDVgNFFfHM5IKEe50Fzcx1VGZmYlXLjc/zPCz0em0EBRMwBUCfulS3bAelMr3Z026hzOWq0PAeMyGEo6Oj1Wq1Xq/9y77va1qcmS1LV8m6fSEEd8C1KGHFQqufdT/eUHVLKZOi6y7Udz9QAypjhDbXNII3RAEjIgrcmycWAROSgamXGxupWkpK3a73j98tp9Sgu8p3SkIVKBWX8ovYni8GGkmUUvKIelG99ppuOLRbsHNp9VnrxRzDa15r+8p1DSlJSpEIBqojQCilJBzU1FAIwcC2Yxon6WnCkvPgfnWCJlPJJ4AggXlzQFTLBnatVfGjdQD5vviamff6CVVoHPgp6q2kaTbTijP/XkvAmchCIDMMBoKGZvLw4cOXN9uLy8suz3yPiGRAhlV4I2LWRNuFQqM33LZ0valAwTwSyWPEur6b51kkDsPgJIGIKeVREzWLxvbrTMxa3u06VlZcrIkyQxOUqFXRVCZYtKLXr2oHB7XwqkpV1TXrSqgZbIqN4w2Lilkk3x5uQbHg2/GmLQ/1O9QxPgckCgDQGUJxcqulSQEjYPzRD378W6/9GjA9eXH+1jtf/ODZR4//5Pxf/3f/5htfuP+3f/2dP/hHv9sF2Gyv/82/+e/+X//2j+Vm/N0/+r1//z/9+eOPbh6edtyHwDiv13ePjq5ojjZ1wOcXL4fFSm3LIahGDIwGSWKchIF50XMgI/EO3e5tctojouVy6bCVMrKs2tVd6c9BRMR5WKdDz/vFoRv8kGFFYQio0VARxjQmIAidAo4xKQVQYqTlEs1siuPJydnzq+df/oUvnT99LlcqBCAGCF3XialZAgSiwGw585AgUKg8FEq+Qeswq5vefnYvkTNW3/e5zKXMlm0xF2OcaVekgQDE3EHpZTyNc4ruX89bnNLWkT+lVGVzLk4mRCZJmtRCN1DglBLBLge9Igk2NbHYqOD+TeUvAOBTPQGo4l7DvlHFOfbO8VHdFtCIcCoVlV3XuWes/tReUmFSn8LMPpCuNXUQsXadrbKNdllA+/pWYXzeFAGLL7MGjrA52oXV6B/ut2AOobOdWgYidWFEtBvqVSUccVlMyUZCRCQ21ShJTNUUiN2HhUwkgIBMDADzlAL365utmSEwInvPVdMSNwUGppq/wqUCXkRKRwCzfb4UQl93ueVLLW5UUFTq871T1WmaELHvQyVeP8fhUN+6stD6TS070cbvaGZJdinxre3hj6sZH3VJHj3wM51vV9WqxfDKsaOPDcTcfQKBzWye42KxADEgSlGInUXLsDimmzFJxqj2niLicZsWqdonHohhvZVv2V548BlKdpY1ecuO59R4aqtlWLesHgeC70DSVQg3YhELeHdzdKCGmhs3TUt6qmoGl5eXq9Xq/oMHP/7JTwBoGIYCHLecdjElX0PXdWKgCs76opiArvrVs5c3aR4Z4Re//PpAQUXGGHviHnf8J3Ts7yEpd1+cpmmzvYmzLBYLBpymyWk/zdFlIoLqnJi6YRhC32me64umME7zHNPAspkEV0dXV/Gjpy9GZeSB+sG0jI+TBJpyoiPgYujGcewIiajrwnq9Pjlavv76633f/+QnP6mQPFqu5nmW/QhGK74PvvF/hy5sNpukcHLn7MWLl8Ni4ZV4PmCAiBFANSVTNkRCKw4+Lb6eihUtQtYjNV0VWm6At3zWFZc827OaK85YnNY2m83Dhw+Pjo6cMEWk6wbE0es8RWSeUgiuf4OILJdLAPCYT0vOm81mHEfn/DEm1eS4cXV1dXZ2tlwciUjfZ6L2NcQYfdyOC1YPmvndArvLkgC8XEKrOlfZkesYdSOqvutJ7CI6z7GW8HnIKITgNbEO5NJcZ2zpouoGrhVU6bnjb2kXOcjgLUSRB8ebmZmb3FXDxGIxLpdLN8t91MQwDM4DicgtQ9lvY+HcIxNCUVypcdFq2nXBaJ8FAA7banY6zK1Yp62shJIFWnGvyu6+0Z+hZOpWjaLyLt+Rvu+T5YGNAEwEdfSFmZXgnnepNb9bh3uxL0REQIdJXU8rgNIcs54/R/Nqo/xeCGYqEqd5HicfzGuis0oNCULp6dg696sSm1VBBZ+2ayb+skTEtAglUieGSZQZ52QvLq5Wgx6vjkIIJooEHrbyR3Rdl0ARmbtgiqpGXdDyrPLKmr0OYJ4UXF8WEbqOU4q+TigtAAGBAvp8ZkREBiJi2DWhrHzpQBC0MhdLhmogMDUQTaL5WzVwXVA96olNuE9VsS4d6q2heltLNyf3dgOCWwK2M5bYzAA1RmXmxaJfLBaLxYAI/uI1a6iVhQevUfld+82BwG6vqvpQZZo7+ilkX03BSl3VBG3vjI0SUAX5wWrrh1YPu71UbALx9YT6uKpdtQRvZsS+ZjUTwkAGhmhGVy9vfvzDv/vyN9+MHF5cXS6OVzc327/+yXf/22//7pff/jImffTRx19958vR5v/dv/5v//v/4Y/f+/T8N//Rt/+KfvDs44u3Th/yZDjF64vn6f5JGHqTNG7ncZyPVmE5rDpIpBEBxAtLgTwtQgP0PTMTgDEjc0dEiOYhEURkdhj6XFFkDj6GIcO5pBOb2TiONashis8nDF3X2ayTzlNMoxItlgAwSRxTQmJENvNuBIYqyTRqpCVpZ8uzo+vNNRCQoTeA3GELIYAPg2ZElPmwNqlV+ltcwiajuJWvdb+o8XrueGtxgsl+0bOTelQxQiZmylwHxL3UoGBREgm7vDk6OtrOE00RCNBUNJoPVaPDSMIrj4aCdjnuuKezktmuBX8jG/ZCee0J0DiAoDCdVo6276uN6w4b/Yn2feoH5AnF+effu5Sy5rh9Ce5rQq+kTdg3Ttp1touBRrGr3cmsybFsl9HGNPKv7lzW3DYCb2mN7TLqNy0AzSzF/KeqGZlHR9Q8SZVzvkXt3sTkE29xX6Za0Wlak8NPc52g8sa6/hakRAS4yzht0eYA4W9jHeIenNvXrFpFa1wRBTOPSiFRQMzZtg3GUql88Ds7MdawNqKnMcZkyMxoJOzGjIF5iqnsQaDlxrfxBF8lWT7jTR3mFfg75PE/ITeM3bWYM0OxPZFU9lpxf6RY+6AW7dufWm9sJbeD0+rn25fXm7y8vNyO49nZGRFR8Zeh81GzipuY5ZHVqWAAAkoCJoazwmJ5OkF679OnJvMXX7/74HQB3Hs+uprHHpPOwozMLCWLqZrNRBRVxNxlBFJpAgAB+o4MIcY4pShJDZ2UCEJ/MyUcVpsE73786NnLG8EuAXUQvAmkSVJLoAAGyczHhVWEdHf3YrF4++23t9vtixcvoKTbOaWUvdvbd2sy6Oqm+7G+fHF0egJGmiKBSJpnmfphWa7C3JwN1BCD7gJWB7tzEDFut8C/aVn65+BnbQ6MJRvCSkTazBaLxfHxcd/3hzkjtxhp7YTZdb3u58u0TLj4Rs0NztrGiYiYOgAoMVeoTKyyKTzUwfY8nk30co//V87pG+rvKyJ1smt115oZs1abUJpZdvVF6vkHpGS7XIwMq+oR42JQ1ae0e5QD7wCI6JlxnkHdBl2rL8DMAu4cSdBwYH87uyUBuzI2g9DDpFm7AE/WAPQMRq8CRoC0o6pDPlDX2QopveUQaaVGidZSMVoOMdYvxWbEt9cNOhqYmfdgg8Zf6ZdVZ2g2jspzKw44+XNp7eENsSonbPY0G6UHmUeVc7bkIyJq3Ob8UUZYRcz6ABiY94ZHSoCmrIaioCpshKbqQREkRlL3aSACoqJReZYvRppiey6NmqDx9WDjAqjA8fPbfODKP6mpd6s3qXADNCRvOeu7a4AWENEn88TZV0PZkVLGBrq5CmoAhw1UbrOM+n0D5ra4ZS8U1vfh+Ph4uVw4NYpGNQm0sMYBUJ948KxbeGYVZJ+F3C0JUZM2Bvv89wBFDki6/dC+b/2+7oE14W9sjDpq+gccWBHWeP3dZ1O1wypsXENSEACBrK6DJtN1+vAnH95/eOfe23fPr5/BMcGSv/fzH749d7/0jV989NHHy9B/7atf/ejjj3/8kx/94T/7w+s//v++3Hz6L/7VH/37//f/9OKD87dP37Q53T0+eQo29AvbbLfztNlsHiyPOHSdmWjOFkbE3NiCHS+luuu85KNyYXfaQRlm7TBP0tjJutsv5o48TwMhoABlF9p6NiGCToawUtk++vTT9x49m2ZMYkqg4KIIspKMgAva6nhy7/ji8WXUSBBMUqi4ATkpiJkBxL0sBzjTmnwVXa0YhDHupRzXvasM6wCFsFXymvAyNxNRK0DMTBEUIRmgJJco7s0dVsthM4xTFK+x3ifAzzoqYdZ1+gZCI4krMqvuRdrL4nfJBhXVKyZXHt1CDxulBPeFev2zEiMj1xXWcw4WX8kwpcTct2CvF7aKSEtfVdt7JXwOGMvtM+v9PcHGSgSm+q1ExBoV3G+UXXyIOx684xXVPNjDkwNDuiwMPcJWF4aIhrl2HBtR7eMQiUjhMBWzfZGqVezUstKsz5fBTePcPTTAV4OrrhkbngyNHXXwfXtJPc2P8qw9e4b27aKW9MrNm27gwN6VyhBSUkQTsOCSm7O/su97lbnVsZoFv8IAe+Vxm/wPfn3l97cvN8upCrZvELavebCkA2dE+1MltJaKvbmONfq0mUGG8G27F7nj8/Pz6+vrt956q+97AIhRENHIcmceSxXmiIiWArl5gABcLUYfMz/N9vj5VZxHBB2GN45OlghmhOA9pn2GbW7CUcrNVYmoC50ryoiITIhIqtYTOlEpElHUqAnmFFUMObDrq9Stx3h8enL5Yv2zDx+9uBmhPxajnFisHiGs46FfYXUz87179958882Li4uLiwtvYwgAPi9U9FDHaPduB5ayR6tlWPVhuxklwWoxIIdxO3WB4ixOyKpWhh9qIrCmzcMrcax9et3o9szKT/AzdKEDDoxNtt5isTg5OeGSxeM9Fa3mfyYLbNa0/IXi0moynnZeiarOqrrdlVrUzSHi/YxlZ61U0lsyMoNTa6hMw9+iiatYjYZVHU+bhFUf8FANYN2L/iUAcg8UAHh6cBXNUIzPaZqgKbgAHxLYdZy1oJ0i4U/38GAW1I37qc4X8QCaR+pqIpVrUM6TETGEwMUhW0HXisUW2aAoiu22tiZ65ectPhxw6VYkQSM16p+VZ7wS59vHwT6Pah90ew215ZW7+K0ZFCGapMx4qBKhPkjKENqKGO0Tq1mORdgBoJvcsclLh9LVs71Vxiv11jCAyAQ+AVrNTC2pMoABmqolgCg2J00KYowK6m2ZrS4jhqDJ1BSRQ4U0h12GWisWWzuiQuxgs1rAViRst6Y98wBL1aRO/KroZGbZ3GQKIpIExHSeFYAQea/ighD92fvGWCuzb4soMxOVopMlr0bw2qTVauGxwRACoMp++01psjH9tu3S6+pVFWDP0qsrqbBoDeWWaCtTqKh8sCX+b2u1Y9Okq4V4++KhzGevCVd1d28T5IFG0t6w3aT6r6oyEIAmM4FoIERkahC16/t5LT/7znu/eXb3eHF3mufhaHU9rv/jX//nv/zed7719W/8N//kn37ywYdvv/12/+LCQEzTsKIo2298++s/ET1//PTh6b0X189pcQ9BDKwjt1hsThORkSEZsicjEAETdYEBJc3VrxdCR0RmydOjmbsQOn/RlJI7/gFAAfIQXgJQAjAAXK0WkqmRFouBmEVkjnETQ39818btp588+enHn/7k/U/PLzZdf2TaIQsEzIUkRBgYGSToaPPq7gn2mMbUgYBiMkDLujIjARpo9hdXgLfOgooSB4hNJZm5Eu1tKj2ggvb+FQ2KSbbLvlP1JlQIxTOkXsgMZmazpBWHYRiWS0m2nqJ5QKg2/zgg/tti4zYrsVIRY3sHwn77dUDAfcXilfzIinrdGrfQCJh6WovP7Yfbd9YmcFTP9FTSVoCVx8HBfnFTQd4+CItP7ja4PkeStawWi+FU8afmF9Q7+KZio5mVb/ZWUnlC5TMHcIN9B21tSk+lEbmZJRWTQ4dofUprOlbzuD665vBgY5RWuXh7o1WqoQ4FUACepFSKQtwaBtceGkvyYC/qe9Vficj39/bWtKTaIoz/YYoArniBqQKQ6xCoaGYKxuXkruviLLB/vHJ5LbZ8zvFKem9v225ofaN6Qmvg3b7qYDF/70pu3/9gee1tzXa19/X1+75/cfH88urlF77whcVisd1uAZT7XkQAFfJACEFEMQQDnZMymFnPgRCRkJEUCJBnL/kPi/UcP3z8Yrlc3rn7INioxsgUwmAEvgYxpcCekm0AHMJyMQT20FPJbiVDQyIyMhWZUlQwVVDHMUQFQKAIGJYLWBydXz19frWxMCgSAKeUTJNKtKJoorfHoaLG5Cw+JqKHDx8eHx9/8MEH8zwvFovqvyci0EPk8SNPNtrHCjNbkW6unke1vlsRmCQjBJkjAGH2t6MaIICZ5/i92qvYOtqwjVZ9rvQ5QA8szUhaTPAblinzq+Pj4+pHizEiZsflPM9pSG6lH4TBs64p4N2AEIujujmISPVwkpOZ5SrBwhCqot/yZCjuM2ZOlto3gsJpa4CuklvVwao+2VpKXuRSQVpvqKWqSvdjiZ5vWaFXI5k+zaIeWZaXMsjMRRu9ggrr9rxNLzJ36xEba0dLXYPJYUCiRYMdiHba+GGqsP952xRsj/ae9cwDw9Lv00Ks7hTs87d6LQAopbJyyeUUFLCkszr0sPZVQlXNQq3uV82vuf2+FeYlQJ3nT1S89fxPKLWXIQTE3BakvY+WGEZFgAoBSYqEYOg1HxlcKkABQDOeIxqCmCZ1G8oIgnrAwn/OxYMCat5lFAzUVARrghfuW7O4b522Jswr+YzsN92pqOvzbCsm5EegTVNqfegA6tsYgBhAmXlSScnirOMYfb6cARSb0JsaAAO27LBFRHlVFyAAkJzCZL7bBhI49H04PT31fMIks5mpJkRjxva1D1C/hQLuq5gHNHPAFlu3N5fiomr+ObpDcSntcaLymgeU0H5/gEaVPCpXrfd/JRFKU2CNjemo+86/SiEDdRHEU3oN0XxOmTBPtBqOn713+eGDj9/6lS+E4z5q5EVIp6urzcQnx6s7dz75sz//0//4//vDf/bPnz49Z0t/+Pu/+/2fvHsnnfzqP/yV7//ld99//+npgvu01Hle6Hzv/tmiH1BNoggDAigCgE+Zz++EhD31te2SlHplT9CvORgVGjFG8VJaxAAMYAZ5vqUBIaJR3XLKY4f4KMLw/qNP/+Q//+37T6+UUEO/HrXrsEOvXSNSoMBEpADWY5J0fHKyPD2KSUwQGUkJjUA1i88GpCG8onlJddEdIHnLWFsEE5HcHn0f8epVlW15gkS+nBCRDCxpMrFQhjs5ABGAGQEgmZLANE1d35+c8JTiejNidv/HNkLSHhUPP+snAKhFU7YXk+kqtppZNWBaGVCprPWh5H2DPappablyLmoLMnGvtVIVJzUFwhppCgA+h6qloHKrHRzaLbjNN6Bw20qwlQ9AcaTV8yuB10BBVTJ8nT5npboUnKe54VfChDtmcrAY/6Bq2fea9Z49D6h32HMGawjtu+f1I0AJ7ZoZqLVPqffhZlbY7tpbu+No4F7XumwzM6gNdT9Tq6jgOmCer5RhdCvppf55sDZuOta05/t+iXrppk86B1QQ1V2KKYCZePQJCAh2HdsPGCzWzJlbL9Ui0mf9WT/TrZBmPaFl5gWZ9favt+98+1YH52NxpEIRItjaDK862hU2T0Qi8kaj3/rWt46OV1dXV4rQMYq7psCdxd4ZxW05RDVMyccOIRJS8OwRM8MwcEcpbs6vN8fnF3cfXn/xyFhz3jIRmIkiqCAjJtUoyR1nHAIxa7IUxQVPLao3s5gSQAIgA2IOwASICmgqtDo5Wp1dTvrBp8+2ScPiaIwKACrRNFmK5lIA2ZCRmCmPCMOSLoiIPnfhxYsXqoqlK6aWARXWqB+fvzVmJvP8xusPfvlbv6ZG3/nejx4/OV8ujscYiYK6AEAk6j10qQpMr7inNSH9dscBwG6biPB567QSzfOryJOzRBBxsVicnp4eHR25seQYlVI2eFLUarTgLrkg35aIvG+O13o5LiFmqDLzdrt1/3K1uEoEeCcjWuSnJq6FSNUYSzFVNlUVOW0c/RUORRuxlFKMOV8UCkv06HcLWIftNE27PnkpbbfbaZpExGN3blf4yZwrddHMkwvQXd4aNSUvuSywz62giZk7ysaJhwf99XxhdSJaxhwRRNQSZjxwn72Sus2MiCtXafGwgui2DGptRV+PKyFVIrSeVrgVsagX2v7h3ysl08rMM6oeYHgrNQ7ejko7iYp4fs7O8INgO+sxY2BKiRmrOmqljUUIgTl4+nfdxFa47MjErHBUd/SY5ik1hCY+R8yTbgn3OPB2jssIjGAKSsiIAGJmnoYtoskUQBOod+nSNO2wvIkKprQbtvFK8Yf7GkVLQdikOtdSlz3MgV3qEqIVxgCqGmKMYhhFN2PcJhgnvbpZT9NkioYIvhlmPj6iVggecpmyfy22QdV6CbzSCVGZusWyXy6Xw3JQVZEokotYPKFUBYCQQtbIXwkscJwiBAOQz+TF9SXrl62ro6J7JUhpfIetm6pCuf1wG4n9z4M0USjOpMqP6n3aNRDt+QVhfzBlC+FgPaJCiCZJUYyQjciCbm2Fq6OUPv3J0+5k8davvXU1Pgun3VUwXob/+J2/+vYvf+tf/q/+1f/5//h/+r//X/6vR3fPfvef/OOH73zlk2fPPvjk49P7d775O798fvNnWxM4v8E43btz/Oabr5+sjgJLkEBgcZ4I88g/RUsaQYwBe8hB5urRISKvqPZvPOl5tynkFMTAhLlTBSPSNE3dYhh44QNOFHKWacLuRz957z//4HuPLq6gIwsrE0IAxEAYgBASZUWZzFATKpFCh8dnxy8uLivMmRjRDMS7OzGR6+zc9JW1xiqoqjPshzKqB64lRVVF3IvM4G1JbIJou5geZc+n2W4WnCOpdyJGIuacUGNm8zwPi4U3471Zb6vLk2CHoi3Nv4ISb/3pzB/2M8cK0hUKuqXRVo5jr0rtsyYzTfcn07xyMQ7/KuCrYdP3/YGHzJ8e414b8XqEcNgx8mCbDt6iXVW1DXYvfwtiuh+u3L2LKPrwD7VcoWEAaoGKse3t2KgU7zURQts3SA6OqvxVPVXyiItmWB8C7Bciqn3mDN/ikji06lt84NzrTOveFZabb8v7RY+38dw/UJMYX89pH6qvikBWk6ZdG+yz33ZbiQiUikRCM1VxtITSYSKHXAzUq+FbadIiDBZb95VG2t97tFvZ7u/BaS24LEuf3Qu273v7/u1PeEs21Z2qaFNe7TAcdHtVdf1mpprmeX7y5Mlv//Zv379//8MPP6RQa718rdmV5feh0KHXPsdkoAgMLEhmBN0wmNkcEwp1Fl7cTD//6NEbX3vAzMCUVEmVGbkLgXCeZ9AUY0wqfZbRAsWhYmae01dfWX3rvPMjkCmICQAdHd3Rrn/v3Xff+/jTWZEM56QdoYqQJjCl0rfBiAEZUaUZraZgRHTnzp3j4+Nnz56llBTIzEIISBxjBHpFjwMsEQ9oAvK+FyuGf/jbv/Gv//f/h/PnF1dXV8+fv1gu+qhZj0LE0qYyqM6wj/wtCVQ+2dqE0PCN25v7yr2uDBaL7KvLPjo6Ojo6cveuXxtCUM2RhKqeMncF/Qgx+9DBCABijB5pgVJDQaUkrJaN3KYIaPjPAYvIJii2fqVcyipNXZ+qDsNgZiLqI0ncMJjnWQRLlM81Das1inUBVHKczMwRYbudp2nabrfznDel67jvwzB01cGN6I33c3hqnuesxMdUnZ5aIoSVyXR5rh23TFhLCxYPV1RsRMQWFW7jxgEXvS07DvxTLWK0PPk286nssUphKPLd9uX+wZYduP+SRCIy8+3LRUZmVLfSDTkAIAYCEs17XWUBkpmZz2ute7d7U9x1TbcmGuw9CmvNqjsUQghd11OpF63mvRv8vpjU5KBCrmkXU1RUEBVBVTYz0YgJkYyIjAREFSFEvrnerJbHIQQ0VDWvvMOC/yIiAIyMSByAQ0jj1hdfTVN7VbZai6stsdRtqvpSRZXayvGA9tXaBhmad7TgQ1CBpCnGOI0xASaDeUoxCpTmMRkRwKpEgH0VuUWsFnH9yxB2xhhz13XdcjmsVsuUEhQ7VVU9kZBLRe8BymIpQq1+iwPCeCXG1/W3/rD2nq1jqZ7Q+H5UX6W1WOMdaZmyXyv7bXPrVVA0nsqM/Fa3HT/12grG1rPVScdshpAsqQGAEoRgQWcl4qNwMo7jB3/3cbjf0xshpTRyPxjZPP+HP//Tl194/Pu/+3sf/vy9e2+89tZXf+F777/73e/+9aywgQ2dht/5b37n3/9//tNpx0zdw9fuv/2Ft/qO2XQIHaZkwESARMDgHjEF8Z5+2sz9bMur/C2qZesvq+imDhORmCHuRmajD9nypHnFFHU7x5++/9Fffe8H7z95OpyccL94cTPOokN3ZOoT/KRukpP3jKMFULPjs9MX9FJMDcEUicmM3QRjQEX0sHHLWFtS1CZCQk1qRItprTCuBsMBoTJjpe7dviMQYa3ybdlZqSdBIpI9DV4NYLnk1Wq1Wo03NzclhQleedhOzT3UJusHxyjEXd5aQUuC/dlBtymr/VB5maoa7uyx2yTQEleF8wE827doQfrK71vCqUe7KZXcWgbVwgf3NZL2Ju3j3PNdn1vVPkkTUgA0QAXbuVpb1QdLJo972Gru/kFhHjYHgJdtNw1psndu9yKqWnOPW85WrdwWLFKaqjW7fFgfgo3d3kLGzKBJJG43tC64Pu7gRaikPBzsGt7i8H55HTcCjTg8YLYHb3qAGA0LFTASU1IBBUME1DrIEYvP4jZqtQt+pSPgNkHV47PsSWvMeNhD3UPAtq98sKq6TbffGvaztRt8ridgC3+/Dg4p1K+V8/OnzPz6668bYghVSVUAzxsyAAOfqkydIapGVAUVREUzcXdb6AHAlJg7U12P8ydPzh8dpTfeeOPo5JjAkszZ94SEiLEMJKCQM3pC4AFz81JOs5mFngE8H9lETAwNwFTFkJm56427Zy8uf/Lu+y+vNokGnXMmGJiAGaMZGQJpTn3CimmgrlroYrF48803N5vNJ598UhFYRIgDfO5xwCfzpgPMm/Xly/NpnI6PlgFxHDeWlLhHRG0YEVEwi+3W35Y4FWNvM6491vG5i2yvrQlKqrparbzLZcWiEIKIMTMCH+j6qrvMhUojroJzGXxXbMId2hMRIrvH079hDgckZo1/0Kdfcm5ZlACAul2KaYVzjWh5B1G3CqZpmqaJqK+ZSsyMmINFLferSos7aMZxXK/X2+1WVbuuG4bB+3/6oIgKbTce3NSpNYoAgLoXwLSicTtIW5hok5oEhdscOMVCqZOsvO7ASGiJHRrnZosVkFWRQ90MIEeY21thI03qXu+Qh17RmvH2Mg5w2OW+KhQZinW1O0D5TcQqHPyt3SBscRiblkIpttbR7vWlHG5sc3O0OFbxtsrBCo0McAsKqm7rWurK64sIYkRDL2g2iwYYY9xsxnlOsHKcBxf2RFSbGwkAUjJif0JrBN7ehRa2BzRSN9EJk5uWKNhIxlpiY43Z0mDRbo/8CF3XaUIERUwdDz2V5h+aQY8ZZRBumUAtZA9W2RJbibxpCF0/BO+kdHNzw4wckMAbk1g9cL+i7wBH26fAflShxewWm7FV4BryoyZaWLU9bmqas8596571aNGI9js0tLi+8+g3Wpf/WZLy7eDfdu+rrdV1nd2AtwZCANUEEAwAFIMFiDCO89nd058/+jD+8Cf/4Mu/9WI618VRQDy+d+/HP/vZ5fuf/pt/9b/9o3/2zy9vrt//5OM//rf/9rUH91LffXL+QsVO79z59u9/Y/3HFz32D+/fu3v3DHRWVUROaVwuFkREAYFJISkIMQBDB73jiCcEQPmvH4aK6FKAUF2GyDlOqCUHmULueq9gxB0AbOfp8vLyr//2vedXl9T12yldXL2YjfthFUU7CGIICiklz1XymmwKyNSB0tHJyqMEzMzA4Jw6Z28Ae90JYGqY9e0D9xPotQnlHeyjlYQi2OewlQVbm6oHWnEMmnncKSUmMGIGQ8SUUpxHAAghcOhjSszcL5ar1Wqz2STRruvS/Aq0/JyjlSUN00ntW0PW+fxF9tSRFjlt38bLmgHv2ZywY9a7dNxWdLU1gS0ja7eg/bMl532BtLe8dkMPIHDARuqXB5LyAFYHtTd1MXsOUVQk83nH9ct2Gf7/FjK2z6ZftcId9zs4TcHwVi1EjUy2j3Y419TuuuwDHIadErMbUZV3p6Lu/jLwllV2AD3cN1bb7+uZlcQczrXwo6WdFmLtTco3hGhIBBl0/mXGCVUF9bYouTOaX1iNWGxkLbxKxrUn3MaovU15VVk/Ni3gP+fa9lm3saIupv23/bKC0fe3rOHQJdTsxatSyMyIaL1eE8G9e/dcCdbSTqyuyOFqBsk0+CywYiiamaOYDx5Y9N2Cg06yndag8t57L7uuO7tz2nWcUhIxkwQAq8WQUgKmQNx1HXL2zApClaq2y+AAxKCgKGbmyUwU+mEYhnWSR0+ePnr8VA1C6G4m7bpBU3R2T25AuhoE5CWnBdMyu16tVq+99trTp0+fPn0aQuiHYZqmcRyHxZKZBfb2pQKQ9ofQVrqTBH/1V385xXjn7uuotjparDep73vxkU2qUOd4ESIE2O+leRs9bD9zqsWEll+9kqU4zrta3CpOTmje8dKynZZd8/O8GydT3fFWRttXbKeiau++2fnWAW7paUTkSfiIOwxsOUz5dS9//gDh2/edpknLSAm3BDxcGfwqIuYQkIiISyvOqhMyM3kzzyTX25tpmuZ5JgAfGZ+B4+3UfZettAqJkbulmaWkMYrPIvanWE4iBTMwMfMyUdobKF/f2g2GOjGPS4kTEfkgomoz1412mFSUq1/WosfbUKqGKLQ9J2lP84F9lf42OvkEUdUEuUDcu434Csm1v/ZuCOgOBQCg0o/YzNNetBjVu0rCivL1iX6rk5OTFp91ly38Cv0Ea9Ve8yKtlHEMsSIdSgnioZbOzJJIpbRjAVVLqp2ZMocSK0ZmUhUgNMKYpFweEBKiIQQmdXye59mH0+c9ihGaGGxFcistGH3xbepim1FVqVKalitVaPptb7sP1Kg+wp/WAjYgzqARdGZS1RkSdoQq0ZSQiSgAgloSRQNBCKHP3S9zn0NtW/A7gtVdVxUNrNO4AYCjO2dHRytwKSRKgGmWOFkIIfASfJyXZOej2i7oRESBgkTxD5rU212COZfJwV9ze7wQQGlO4DhayJ4oTT5/hlQ1pmilCVsdfMfAoICKAUPXd9s0tsy35cU71PTBpWBRUk+cqy0KFjrSu5AzVTE1AwqMPmKy7G7rvEFEAHMOgAghOKuVGE1CAAMw6mEVsFcwQ5sX08gjMyrqfJNO+XT83vTx9Se/9/u/H9/4adD+Di5++/f+4PyTF9i9AUevR+7+7C/+x5uj6QZeEvV0Eue1Xm7Sa1947dN/9dr3/92fTC/DzffTcLF++97dN9648+Yb9y+vnx+jHifUy/Hu6cOwvPfsers8Ow3y0sHSUQ8AqkaIXdcZYpLERCHgFGczDaEnotMpJoRplolxBkhghilourdYzuv10fJ4VrqJisu7n15s/uTP3/3+dql2J45riaOqIdsMk5ElsJRShxqAF4zGsA1R+3QSjmOME1w+eOuBHo22DTYOfVpKVGWLfYockyZI1MVhsCGEEV08uFTDPEKn60KRLqlRTNUD7gBgtnNtUo4MF5O3SZisunrBTAAwNGCfWGygqmIJS+ACkBFAzFQUsetzZrWooKpdr6el0qIf7pyeXV1dTdsx9J2kHd+vnMXMAPiA2jH7Wat4UICk6jwxNcJjF3pKSWt5npmpARF6gRBhQC8CVM8e4MBEYWe/aU7OQkQkxsqLq9QxMwRyKq0SLg8CqqnXRaX1NRx4+GBfaFUgtPWr1fjhZuiClZQkalu8qtJ+riM07WGgSdyqOSeAgwICAHcMXi4ABszUda4zqFltCasA5laKGhEhoNfMgnerwiJSW4dCx+0a/KddPjOCgWFm7aBJwmpRlke1q7MrWD7uyXNXPOzjBTMAgGTEOarm3AzKQhAIEMwQjEyRec+AxMZ2cslUgemnjdMmA8qo/uqmackj8p31guAcTs+6BGSjRdVKr3AtLjYPZwEaa/NcYLQS0mFmomyJLULnE0cnmzigm50A5GMnU0qEAQAAtfJ1aBzzrbZUqal+2f5p+16hek7VRegwYLKrXypEAcWDbi2cC27H1vCoz+JmuGj7dN1vguK6CtRiVLO2CNkASCIqXJxfnz998fWvfK1TDGKzJERi9JYCZEAKhkbGtiRXjylhFyGAxxCTMs5kkQAl0ZYoYId8d2P4o832+rHJg/5Lb9wBuR5s7Dqb1y8N4nHojBdREGERuhPFMM6JexETQBj6I7DIpmoyEA3L5WS2HpN0nUCwfkl37q1j+vhl/7fvnT+fALuFzPPCjKJNMXEIBjRnnFIEQ5gJ1JIsFwswERUiSHO8d+d0uRyePXsWRbphGOctIAlCMkUmSMGJw8AbVHocAL3ACAAQDdGIQNUQ7UX4wiiby/Hq9X7Q9MTSejMjroZoyixBiCOYkRIKzsKxS7025N+iFhJh7bzftKduuRw1kYFW0aweJcGARGqAQNx7uYRwz0R0//7d4rQN2+20WCwkGVAwZGACo2S6nSfuuxACIRlp1BkDqOh23CRNKnGFR7OKmQVm57nL5fL6+jqpmXk80ELwgXsWQjCZNIkmATM0QiNJSVXilFarlXPjLlDXsfNRhl7rJJ+c74YAEEcv7bY06zjO8+ydhCEMlQGLl78S7aa5IqI3d1HVzWazXq/Pz8+JaLVanZ2d+SRAP2fol5mfNwXbTJ2lmKZp3m7SPDNzH7Jv1FUCly8UsnIROu4IiZw2NTNaFY3zEAJI0jiriMa89WIKYZesm1ULIDUUneY4eq6jx67NTH3STkk+QjJicNYiIoTBCosgJlVLMXY0EKAr3OYBTANUC0hqqm6Z0E4GpWCGAIE9zGcAyXLaRUoRANzY9hfvuo7oxK1cAPfXmapaEvJMEBACCNnMRlYCVN8719LNzIwQQJLt8BwIkCErzym3QTYUSYhMRNM0pajTGFVgGBY+09IMiLgLg4h496OiS+hiMWy3Y7alFRE7MDTtxFDmNZiRAWII3PfDsht6CpQkxmlk5q4LmgCxY2NNvXan64lXMw5dQIgGidkUUeaJmbnr+hCSKZogKJD0/cL3rlryzLkFPVEdJknVjZ7Ta2tCVgm01FB/K5Kw6YIp6tmztTRAJVfbefw9AJiqBgQ2S2ZARGSkKknFc5+q/oHg/fp3GmfzSAY4lE9VhvmLhcB1FqoVj3jtxmklnQmyJnHooK1Zc/Wc3cL2hW7LRltmCk0k3QgFTE3V1AhziTxi9pSAgSkiKnj1I6Ecqpv1M+wf/qCYotHe+quDE2B3VdUerHBtP7+C7raq4V8SBERTAAQEJjR1xWnR9UY2zlOMcRmWzOnJ42f/6c/+4p3fH+6eLt5850t/+C/++fZme3V58/PHP8cjulzfAFBPixjNBLquU4HtfPPg7mt/8M9//wf/7s//5EeP3+r5uz+QNx4ufv1bX/8H3/qF+eoymjw4vW9m2+3m5OxoC/G0y2PWVdULMFrIZCZSWvpmSuTgEyYYVEFAsfDQIGbJSIyePnn243ffO7+6SamDpimi+D1NEBmQiYiVPPLu8APAxWLBiP0wHJ+dvHxyaQjU8Tynqn0yMygFCx101qm7EjNiM/W59WIehn7gb8uKndGOQZdtbFGxLrhe235/gMm4n6XWIlu9SaPhaQhhsVj4ZCdrAk3tzT/roCYbpF32K5H54HNLaNaYWPVWZrl7dH3Z5leraIxNON10R7CVwVVHdftGVqyE2/TeMoSDg14V1nP+c/tNb9Pd54ClXtJeWFei+77ben7bjRARqYke5/vvv6+nYFETIXQB4MUthX/kBrZUMiDaR7Rcy5oKQys9yhHRdlx9t+zyRqVzskAtUj+AVYveB9BuyGQPgFIaKlaOUa6F9p7tAbAXSzx4bv1caRwb2LZVA1Dy2QDMfR/MbHti7X/O0b47/BfEFW9/81/yKzUh9BZudQ2ff9u/93AIj+N4c3NzdnZ2dHQ0TaOqes6z5tgaGIKpmmsHn/t03wJBYwMzm1M8f/78k08+WXV6FDTqlpWOVycqs4JlJ4RKjBOgqFqa5uCjaYg8FNhzj8xiZoLI3A1L5kG4S1HW2+np05cXV5fzPAfuwZV+5oq0r3xfVVXJKRLMfPfu3Tt37vz0pz+VXfsroyZCVV5TD0jg/8/an3bbkh2HgVhE7J2ZZ7j3vrEmVGEgBg4gwAEgCZGiCLZEiZKs1e7VH/wX/Md6LX9x23LLbdmyraZEihQ4AMRYAAo1oKZXr95w33t3OCcz947wh9g7MjLPeUVoLecqPJx7TubOPcQ8wpwaKG5qiJ06naREDIIAaTzz4Xzk2Pl58LZpT3tbi/KzK0bgn/XTizFqvQ0dj2s03MnJiUZFVr9HiShbTEeqOcwISKiJiLaBpgsZpbVNU5sRVoc5M+farwLmPnnTZnnu5On7UoTDwlJyLo5BlTT6vu/70gVeRNp2asXuaT4z65eIqKrg5eXlMAxN01gupdcYte2EnxvX6qO6UiKqdq5yKCY5YA3v0rIxpo760SyUVFzMiMa4+m0BRxkWnJSdmES1FILZQM1UbSA6sWBHYG1KWGVXe6RQVMjq9SxTmNoUQ3X2MkBJxUxJsGQgq/hXDegWFAquMQmgBHW4Tc0MjYD7HNTFSr2ApEN5l5ouR1eaUqq1i7MxzSXmOpUBXa0EAfbRAfr+xdEUETdxzgIdEZDGlHKlBqB50aCLAmZOnHCOoZ6qG5gZ58q1eIR3mFko7AJIVFZUZBQo5TzUeR6mVJpq+NWsqxAakBEwxNhwBs5JHQ5AEXDyWdnFc/MkAljGUVnJnBgNw9i27WazsZJKUHN7zNACM3qKdthGVWXuQjXqY6DGxRJ2JMTIkxioArHBGc3zLhbAAcYwnO91gTwwZ94iYgE2NkghCs4MbHM+Kr/7mSxmpUHW7u3lMEcGJAIWHnPYYNesrq+v3/rZ23d+7YunbfP3P/hxu/m//9ZXv3rv4Qf/+S/+0/nlOXUhhNjFlsehDUJdux/6vt+t85P1mn7/j3/3u//fb73/Yb65gR98sN+Hn372s599aX0j9D2FTjiNaVg167HfM5R47iwMAkYT9bjNaaAOimEY9sxqXWCQnLNkBkFUn1mgkePVKI+v+5/8/N7rP3/vyZCTcFHbYyBGTaVjZkaGQjg0ZMVifXO3XuUxMcLdF++c/+yZFOsvKRUJgFzSGANJ+XKhuXk6jk6sJCIkDfqfghAcJz6SO+fHLBkyrq2Fj/w2OLFzXwCwgrqWvFutVupJ68fRbjP4PAZT0/iLlXp4Ppw5zoBtWo7R0+VQJR0AlvgiE6cxpCtgUy1ktud00H7AzeHI6sQrVEceWZqHYC5RydzW42Vu/RBc9QWPjEc3DasZyxOBxblM++Z0s8N9BgAFXSObBjm1aIrY8qu9s+yhjW2LZWYAV7PUv5RRW89Xh+W0maUshJBwOgpaniMYSJihoZrDRWS22yKisUYLnlqF1Gldtnsm6Pjx/dl5CczDnriq3HaPm7kAqGPryLVY8iF+LSDh6ONHH/GYdRRK/SYcvu7wwyfP8xe/mqa5urp68ODB13/rt2/fvv3hhx9MWCNa8tPeK5/8FsFiwWHmIjsiPXl29c67H9zYrn7p1TsgYT+Op5sO8shZtLa7AKdhnyEiEuMIoY0xUoA0EIhQbJquTVm4ocxC3XagNguOQrt9euvn75yfnw/DEFaNik1IKhUdnyfOoipyCOHu3bvr9fb99z8cx1HbxKlCCFpVeMLT46MhBkMBEQlEKaX9fr/f7yv9hzSRTQBAEAKQUv3oORuKTlQ1fBEnu3vTnkHUIWyoYVpEcD742dmZ5vJ4g8sCxqRqg+B0jFCbFnowzjnnMamrZyH4mfqqN4/DkGt6syfjUgU/sYYfLujRmIuSsmEoLkHN5RuGkV08RQih67qu68i1pTVxf7fbnZ+fX1xcqPR8dnamurEyIN0TIyAwCf0lvNNVnWlqJclJ49X9UV1UFUKYI76JgirT2+ZXComIM2rs9wcqD/WKk+mW6OQTT+r9gRIRyEzJMTi3QTyxijGOzmJIRQMoa9GuiVxLQxfgdKZGO2idmio7E+KAnmYyKDJdCGtblOnmKieEEPSkKqiwKjzM4FVla4g94DAMw36/1xwKm7zfHMNfWz4AJGbkItCqBwJwkiKwqiHjmPu+H8dRpCWKDJk4M+dCBtUgi4XaQGn1OTvi7Jz//uID2z26S+O32cWsefUKa4KlbRROmoUS53qIICFnEA4UggAOQxqGkbkEkhzyM9smnGJOJgizWdqJAkDbtqvVyqwROO/m5zk3zEkYOonHLASGRXW0mUnDXl0HDjZtAy97kcdwnLf68XcaefJCm4lBHp0QkagYDuxBR7iJDsQ+2wFy7iabAFQZy27IIyNXzVBFGQEAzGOK2ESMSXjcZyJoKCLit/7sh9/8727f/fTtv/z7v/vLH32r5+ucBwoEA7BglCYwNwgYCbLkZuiv73fN6ebOjV//x7/x7b/6/r37+eXPbT7a7f/y+z/5H/7g9zft6cXT89Nt227g2fX5iHy5Rz1cdLWk+3FcNa0/U1UIU0qjcABAIA1+BmYUQAicQTJmovPL659++PjH7937+CpJs4Jc+bGQABACihCS6QVSbWlNE5smYBAIkIeUeHzh5Rfe3b6fRx7HIVIkJIJZ7VwRKdVQEZBIkUM9b23bwEySLuSSWevbzUppAYC2r3BgaXBYjvgoPvu8i8VlsKdkPdeiakqSdKu7rhtS0sl44rWY22JYvXyIMhzwfrsWIvjiBluskTBBtu2yGA8TAg4xKOdZBrKRhcOZ44G96eiO+a2zA5mNUAnOYmT7jNVAYKz0eccHB2fnj+DoKmz//TeLnVkcH9cwCiNT4GqU204aIXFkLTnFTEFomXU27VhRBZeRIGU+9X4T/vxKbZcQl5Y1wxThGdhoyKjN3x4xvdFkGgUkpf9wjDjbl5XM4mKNWO3Ei4OGA67/i1yLm+VAYftkNPR8ChysLm57Pk2YrfoTZvULLufwFTHG/X5/7969k29+8+WXX37/Xq2tMi1WlTuMoA4BwKlh9QF9MIGpWEUbEXl4/uzex48+9fKd1eo0759e7oY2BBFGFqKcEqc8MFAITVhRRIiBAjbAktKQGYiJ2nYTO2AcMYhQZtwP6eGTZ++++/5u11NNpsKDMKr5VTz0McQQKCVs2/bWrVu73e7999+vgUsEAKWhrujGzkLKAQDpyPEVvJaSFZKzFglGnCVZ/aJ6u6HkAsDsT0/clBPBAVRAqV4uzBzQXCsUQjg5OTE/Q55yBZeJZ95Ys5iM/jmOI1bDZd/3IrLZbBbCklGznHMaR3CefzssT9wsgoyIVuutnqwShJRS3w/aym8cx/1+3/c9c0lxVCKpmpg5Kk3/16ozFxcXV1dXiHhycrLdblVvVB1DcwhV6/P9pbV+qSYrknVhro0x2PWxoNphwppVmDsU5qTPrH72ayFNgjVcn9Xyjdq5kVEYEYL+VyJtBFhK270FNbbLlm9HhlUgt7P2xlk790KQsSqfMqtWXXTampZsHjkoc3AF/AVKyDq6OmRSWLudtWciHniMblPJAhObITPnPKY0JYzAvCIOM+/STlOCrYeKHASHlAVWHVi4CH0GtIjSdQ1OkS9lqjnnhNzvx/2+36xD0y5pddkc0IBEFMk+oN+fl+nk9n0V/NBgW+ZqCM8zZUyGKYLi2Fss7nq9drAH9YUsIjHlkLkBAIGY8ziMkjTcW9BMWCIikFFgAbILWlAmVwUTNaKsVp2GIkCteKPnVAOLZ0VcZG7JWNA+cfMx+SCEmUFrIQ3YJI2+2Jcm5/lttRGm/arNu70gYjNZTLW8/Ni0c861zcAM0AtqIJrVBEoZCb+9RVTCapRF1FbMxbICCBqLTNAESOO+ENm26/qdfOu//ODXfu9Ln/7yi0/zRY9Xd1+6ef74ceAGMrAEyEANZcmj9HEFN4JE7B8/e3D66ou//se/+3d/9f13P766QfFvf/rmr776udtf/IKEq4GlaZrx6iKsQs5gJkNEzNW4oufLJWfS7VjbYtNAIBkTCSBSQxgIUsr7IYfV+tFF/8O33nv/8fXQrPcQQUuxKbYLV6sMkZTjyzwCcoyhaULoQmixXXUUA4WwvrFu1y0/yRhKi3ctcUwEhYMLZxSGSUoWmejLghEa3fQwtqBZBioeelFDkknBDwtVFFAYVGFbRzJ2cgjz4ERbDSNUw+eu74WzNTXxlAWOXTLXH2CmLcx0wkNhAhx183K2MhIlQ6lWkFusYoFiE37xTAfzu7q4yoRdN21/24IWLSZswxqWmsvI5uOnvdj5Q6ryCZc4scZvwtFJkjP9lMEXb8CpFJtNw+j+oYKt7TeWm3ageKB7r92q/3hZASqx1RhpjT+xJxa6loENO+8xzLfC/o0xWg6huGCNcZxyWQ8hcKGWfwJNNrnHpBypgiYza0BBCHp/NfS498A/dB0eKDwfcfzmHK7LdmAxuB/z6E+L2zyoH97wC15c02g/+ugjRHzltVfj97+70E5FBJ1HVZy7CbG0EWeA0sJY65PXOweWVbfu95fvvPfhnZunv/TqnU2IGSmDCIjWgkiJh1EzmmIXNgMIIUDThCYKAgMlIZQYYgcZMgSRmFL66MGjH/z4zfPzcyJqYpezZOEYG5XviY5HjS4wdLvd3r179/z8/MGDB/40TV3hgxAvANBUVRtK6WtlJRLn1t5iqv+FTRAL/F3AgwnlcExXnB3ZZAGZGYOUkmw2G3QmvDrbWUEXj62eVnvSarisNFaVLpMHoFIqDfIEgEDT/X62fvm2dXqUKhmr+DsMw27Xq3qmnp9xHInCarXS0qBtG1Qb9B4IncB+v7++vk4prdfr7XZ7cnKyWq3UQIyI+jjW2ASoUo1qnqbXrbTYnhPcwZEdDZiydDiVWu1ATY0RESs2YzsgzjVqfBbnBcbk8IKFfXDG4BZAhYhY624YPC+4nj93AICAAIAyo0t6Z4DpZqhyPiOklI0aN7URO0vS/dTRxAnYHuAN/LzHck7tC3At9lNESnZcBXvdT2HU4/OYZY/UUXUTZqomMyM/LyQesNQ9ZgnYj+lq1283TYwhACNIsKQMMQJbji+EqW6tzcdjmZ2mP3EDFfveL9wfnDaJAYBSSqtmjFeQBgDRQA4iEsEoTDF2jE0SYmERQIohhqSWLUfNy4znauGCrPlzIoIYSf3vGiAKVawJtfSogTg8xyVq+LAQxL32uNhKnFUTtflM8O3ppl3kYhIM5mBO9xd/2ks9Rc45eUDE6lZiZgUanIti4jwY/hUWH+uxkZnV/SiABNNaiAgD7vd7EYjUFJoOKBkaPLk6v/7O3/7oOl5/5jdeHmX48MnHkPOqIYKQhSVCaCLTECI2m1W4fjqOmTabK7l++Vc//42bt/63f//nT+9fSoI/+/a3Xz47+dLdm/3uKSTedKtMqV2tgCix5MxZktQSW8PQi4j1V0hpVBgWIgjEkJmT8Bi19AVDzsgSEocHF7t7jy6epZBWm+shZc7IGTgLl7QRjWQOFLTjG4uEQG0bu1VDTcSQY4yrTSej5JRpHXa8O4kgY86cBTJQMWlrBz+VcS0yGwC0LLUdhJFm3W1lEl4Q1xPwSfxYDIQRAHIe0Wk7/pQ9FVh877HJiKknhWrf6rpus9nscfBE5CgDWGBoneFCDoYFbONcyvTmKM+oDp/yP0k1ni1uqD/hYh/wOe64BaLBnEoamzRDT70BDvEIquDr8dSv1DMHmLtrFgTh6KLkgLHZDcYvF4NMC4HDLSo3U42B8b+iE6e45uv6udlPfplHgQQRwVEVRLViikgpPmlGRz8lz4HAAXA5skl8wQXiaDWjBdweop796eTU2fb6w/KA5E8QHNA6PP0H9SWuWu0R+f0Q5g/R+fDmozd4oLIFLp61exZ4AXNEgDni/LdegqCerAcPHlxeXn76059u21Zl90KOhEWbqdZDRzn+xixCOjctfSQAAPvE2+0qx/Hh+cXrb7zNaf+5T929eXYr530BiTSkmlc2jHsOQAOllLbrTYxRqAEKENt9YtjzToBDe5X4o4dPf/zmz//+hz9JKa5WLSJyToQhhCBFCGMGOpykQ5wcQrh9+/Yrr7zy+PHji4sLg20onD0fpauI6K047oZKYEvUn8Wog/2jrL4c+FR64Ni5zCFkQWn9DQbehzco+BFNCMLMMZIm9RCRzyG0qYKD2wWYGawaXlONZk/V2aiN3bWK5iSXVxbWxCm4MbtkQqgISzWxQv/c7Xb6ImZWl+B+r1pnr9qaiMTYqF9Om5yp8dRmq46KZ8+e6evW6/XJycl6vbYwhBhj13XqEtTMzxjjbrfTSFHL3VInhwbEQi1cWXeYLATUE2ezGixoF1S/yCF9kAODF1YddcF39PJZf/7gjP/aVYbi6ShthgsSaq9mZoEZz7X3knjHspcKpsebWs6HJWnsnifAXCL4fHSVWVjY+IL9anCec05pEMEqyJXXWXkOETYKpp3SEDFQcUTVjtkTOBfDrAhqXdASRyYUCssrjXAgSC0lRaRR3xkCjUPa74dhzGtG0pB5gCxMAurdZZXOCQAAncTod15DQK3gtloWiIi5OMY9PPjj9pvDzG27mvoxktphc9/32rcTXBlOPcY4ZkRqg3CfJSWu3SaCSK5B/wACWBsFYGnmBgJZqvgCoO1eC4dIadCQNvVOepOGl4ds6kbFFJaMQKCzT9j9C0pkGGiAuPjG75pHHvtJ6eAC2vwkF896pJID+sjMtRX1LAYA56LMBOuylDUXE/Az8VNaTKYkBQEGbABAPcvM3OH67PaNd979yd8O32tO6VNfuoMocUPDvqcYkIFHjm1oQtfK2MZ21987u3n3/fcfdS2OT+/F9dm/+B/+xX/9D3+ODy/eePzoWz/78a2bXz9rG+7361WbR9DORaWalnl6Kx2IIWoJLwt0TOqkAwbhIEwkyCEnAQhA9Pjp5YcPzp/2aR/iPstAAccB1WsKJX0FEaE61xggRIQ2tKsmtAECNG2bmQNRxgwEZ3dOL96+zJIJSww0FKevTnDWoNKoJDrGTwAaDustGgaZWKqW4fxYCYA8bCygRWpIgD0iEyMsMLmA/FBbLVUUS2p6PDs7C7STyroWBOJ5l3EsqdkRBpww5zH2RoPhQxwxBqDAb997ucQkID+I7r2HattYjzJ+GnSwMsNidpbRxYFKLaACLhHFXuexyRMrz/D8ZBav9nu+IE22D966ebixfihx+4yIUI30Nk9y/q7FtP25LGgUF0cuVqC1RlhYwkQhaJ0MHaNmIeoczFU+C6L2F85yucnmQ7Vx02Lf9NX+fGvWzaxTopkGFlDtBzRMsRukOr3948HVp801DRgARPJ8Ncv6OjaCrRQOKDDMsebwKbvhKD33gLT48AteC0703/Ssv1QsxkDn5+dPnz599dVXu667uLgIgUzvwWJTLp+XM0Eg849VP6F+j4hMoc8MGIWaR08v33jnAx7HgHTjpGkAOXPaDyK5qerEkJJqlRSbKChAsQkYusQ8Jtgl2MPw3kcPf/jGm++8/+HlDk7WLTNobVUVIpXSMsJRxy+7cvNt275w96U7d+68/fbbNUoQAUqSS86Z4pEqBvqHfclzb7n2OdLSlPWn+XbND4uILPx2AeSG0lDZEM3DLMFhzYL6+fGVkkHF4rZtt9utKja1BADUJUsWm3YhJrlmwhuRtFCFPK9oZbhmIqntj04sxhhj8f88DztMrtCRh2HQaag2uNvt9vvBisqosL5eb7SnovkGpWqhOWfN57y+vm7bdr1ebzab1WqFaOlhpE9lVxdHUxNtY01NNTFvQWfsJ9sEi4OjeeEA4xf+xI3Oe0bgSaWUFpHW4WAqtaKCnzvrAm12auBoy0Thj6n3flGOsCy1NQAgAaiboL4fpb0xhJSLUzQ455FOjGCaKnPS3Q2AMHm3AlFAFs45hJg1DzhYHZssgEhYDxdNXdc3Zh5ByAISuVb2ltLecFqmKR8VlSd6nsbMklNi5kQU7YBSygCAVOA5gLp7JDHImMch55xBWkbBDIDMzCqC6iZnkJKBWY/Ve4NFxDJXF+dl6GPwYNiEc3ur/WTEJxSIsrS7QiexPB4AJO72IwAlwKEfr/eaD5lHyRDCRPTLK4GIsBaBACHAie6ISPER51FFzBjjZrOxZMeyETXiyAoJmijJxXo0CU82MlfbsFFDL5EYvfDiSH3vcuNsv+xLclUWbIYTWXeNjKHuhm290Qhbi8cZT51DCCYre13R47yn3fmgKIU+NeaEeoTOjCGCKTFRjEhKHGOMFFvJIpDgmk43Z1eXz771//z73x2+/Nlfe/XxxUOhHDvOKPvUryg0sQnYUm7Carsb0oufemUY4tXl05NNu15tvvFPf+ev/8N/HhN876O3vvjktd989dV8vu+uIUqUFTAIBoo12oSZQSqo1Y1SjTHGGCKFSJCZgwhASyQMDJAYktDHj5+899HD6yHvWrnMe1pvQJhAAiJo8WOETChAiCgIAQGEQoNxFSliJtlut5fDBSTOkk9WJ3devvPx9iFccwhN3SsqDUsk5QwFsaurUOefc9aqeqqxmWAhIkPtYVrhDTU3wbQ7BUwnBC9FPQ+Biy89gHmQM5KRXTlm/XWz2eQkaoX1VsDDYReDEx3xhC/uBNCsTQQB/Q9rp9WJVCEwCGvsLYAyeL/Ags4CVRCf2USw5gAbu9IjUAOtn1s5PqcveT7qmeX8w7QWco2n0DE5Tx9C7dC12KLD/dHL61cwcZfSd2gxQyyhko741PF9A1kPLVglDK4BNp5t2NZB1alWq9Y4IiIYoHIxQICx7ToNtN48WFYRtA8hM6h5TsNacpacs4akLqDX5sAHaU7T8mssoi6wStszc6YRT3sK5kTSdsYfihzLIdd3GVW30yTClMqdRFrpmBGPuMo/+fKv84TaQ+zhJiwwWqpne/Hr0WcXDy7ms7jheXP4By+qPZSvd7urq6uXXnqpaRpmjhFB+U79p5C7A7RARB/2XMsR1JkgXe37lmS9PWUe7j98mocxpfSVX/tCRwKJx8QNYtM1ASSLUIwikgDGxCOPDKHFrpWIbcwJRoSL691P33rvO997fch889YtTlk7ujVNQ00cxzExa/+Ao+vNWUyGXq/XN27c2Gw2jx8/VpBWEVRYc7iYpMZflZbyKgsB8hJowfK4AIVFtYuClZoDdqw4VqnuXsdZEBADM3/63mgFB6jn4c1/L1rfggQRVSE0/oWl9YvOmZOwiVX2du8sxRrOJ6Uz32gFP000OiShSgnbttXmFH5dR2FbuZvRinEctTTofr9PaSb1rVarzWazXq/btvWcWkdIKe12u6urK73z5OSkbUs1WiJS47WiwH6/B4CmadSd6FvSk3MA+tQD+x6dfKiOUKM/dmSLkzLCeEj/j5JTYwomLddhZ/IDVluGMTV/HMysjjJ/QLqTVqnFgxnWmAo/PlWaLVUfJqJxGJQoS8oIEHGSlilMlRor1WU736btbChTnu3oDQvErJxh6lbg+YhF2DnnMCNSdvGrfuu0Z4YuDhxhZ5DyX5EMue4nEKmxRwBEamqlMI3AidXDEbSmAtTMfMLqMgERRopTMRjjVvo5VTnT5qlWFUQxGJNazNaEEJ9FgtUzHEIgahUfFc67bs21vLAwVEMwA0Akavd93262LdLVh7vLqyuhJvGovbwLiNZqbCKCOAmjAAGxcGINFA5xymgMIbRt441GIYTVaqV/qiXSggRkKiKX6oDJFtY0jVYEhtqBVFF3GIamqT1YmBUf9dlaha9YdEyM1qACqJXHdaNzLTBtyTlqMBMtnVtFigVyGpX3J4ENLvoQmkZkmRYeM5swiy/3BMKwi6q/KOdMYRYYDawarBCRpDzkAYBCaABQV7pe0f7q8sZqG5nP713+9C/eWEnzypdefJIfA0kvI65BAu92vSRsm1WOW6SIITSRVp3srx6m66ttu/3v/vtv/uCvvv3BTz/6r2/+6NWX7r52dvvyvft3N9tx5MypFmJpu66rZ5SbpjG1QYk4opBQyolQgvCYRwbU9jhM8bqHt9//8P6jJ81qS9QMuwFpj8MQQQiAkAAmoXDksWtXY7/bpf2NZtuu2tA1zUkYx/H09LTnXdu2w3V/dvtEQuYgDLlpGiJMNCQei2ksUJZZlU7P9oLzm1G1gNa6c8WSZIEiWkM1JVZIM6DS1EEbx8HqEYMClEq5CoqeDcs4JgAkCsyix80sfT88u7zcbk5v3rz55MmTZ8+eYVU58JhYgIjMBR+NYS/UHk+DAEDvNz5kwGl01utOdjp2sxFxgRKO6xdr9xs8G8vEqp4tliCc/VQ9vnhMNPuu7bNRMOUdagj3jFBvGUdZgIG/x29CmTwBIgAWCQ8JtOcUoGRO9g1quSmlSCA1aqyYm4hI+/vhvHQbACgLMXqieCS1JpBNSaq2OQyDY3WTeGGefP3TE15bO7iVaiZ5oXswNb0dx8Hvj5F3BQlf60xqG0Ylp5xLbzQTZUQmsLTkJa2IbQDpj0/pvy5f0VDdBSYi+DMy1VS/sUyesfREjQAw9JapL2r91RbJIlJ5JGBtyGknIpV9LPDL/7u4efHngs7DvLrP4T2HI/ijB4c4h/B58Oyh92lmoEkyUnVl/PSnP/3iF7/40ksv3bt3b1osi/aHEGfqMvyVyQDjcLO+sqxUILPsU26RYlydX/Xfef2tZ1dXv/alX7pzthlx6FZtt9mksZfMSLFtGghxN2QK8eTmjdisL/fpeuzbk1uPnjz8q7/73utvvjVmpNAOI2MN5xMRbRVgXRZme1Kd3qtV2/c9oYzjuNvtTk9PxyF/8MEHV1dXoemIiEGEQQhDaJiZHC0lF5dkiv0EkIws0qd+HUWZY9OMaWQNHqEQoVQbhUKUSEIIMoo6VUwggVIyrREHHgtgMPZk7RYsxG7xSIzx+vpyvV4Di0ApUnXz5s3VatXEJufcNE0IpFjWNC3BVElFR6BqbZcqdtdSLlMd/6Zp9te7XLvU6L/UNIqGOpoGrVmgJiKmlIZhUA1KS7xaoXz9XiWK/X6vh6tSR0pJK8GoeLnZbNbrjcaLxhhT6k1b2O1219fXIrLdbm/cuNE0jfkGtUaocg2NblXKoI5BVRqNLhkB5BoBC/N0ZayJQkbBLMZP65dSDVfJOV9fX+u2WHcuqARBlyzVgmZlcrxlTbdCiT8iApawT70z1wBXXaknF0p1A5llsBBJrNHRtsZsLQqJYMHrncqv4JFL9+as27Ver6+urkoEWQBNHxv7oc6fvaBrdFvXS7WkQoxxHEfTi2yLUkqQxqZpVIEXKTG9XdflXBoZKCQbgmBoOAmD9smEYRz6cSAioFIIZ+gTQOmKN9RoZ0DBQIJARIEadTaGEJClT4O4JggMuOpW45AeP3m23YTNClkEEEIIKK74jShOQUOz/Atw5XZtNwzNtZOwHYrVZFHrM7tAPJNetNWwZkHlnDNP3TsK2w3KygufjWPiJBAyp1FD/Y5YLwADAiOQlKbFHgJm9jARHtPYNM12u1mtOluPHESjUWlbOUvGBdd42l7h5Q+jg/ZrPhbZjzjrtAMug8hYoqIy1M4egMgiKqRo6oyu0xNWqMYq+8ZH/elJUxapzmg/VWORJq6VAec5hPbZYMKDhUjt2WwGaVJXIQKABKXHOP0qIrzP/T7m5u7pra6hhz979rr8BFi+9HtfeO/ZOznm2DW7q/0qbLfbk4vHF+OqbTCixBBg00lGlrTvOV3tHn/1n3z1jW14/acffOsnP7r75d89u/VC7PM1X2qksQgylB40QDSmAZP5WLIKh+MogeDmzZuXz55cXV3cvn0bgJ5e7GJ3Nqb4ozffePODjzLFUWkcCOYUQgggAQCBAYRLUI3qDRhCaGNs121cNdSIkEp4lJXXErVdF9cxX49tbJGRijJFgERCABDJebCr7UM9hAZ1hXoiVTvQ7LD0V2YGIDU224mHMOvSaY94yPGWKqmRLYvxxXkS7PR12iolxxhPT09F5Pr6WimCiEzCzxyolO5b3wJPzQ/FWS+R+92w+20VdQkTs5k2VqQoOnWZIpPNbIG8epmFDJwKTTSV85t40nMipvxy0InL+pMvYw1OBdVs76MSth9q+ldmP8lcTfWP2NHP9vzA5L94nRzzHdkFx47MCSLTG4+2YhNNXUIEpeqMAlPVu3qTHhAZxbOF2NbB3MJtc2DmQBZPNZt2CIF59tSkTjznYueMtQ0k52Hz++NxE6qpnrk0/XaQbA4KhmNOm0PUODxfo9ULTuQ/exjwIxwCDM6V20++7FnbFr92OACnY99MyIiIaRzX6zWlvNvtPrx3j4heeumlnHPTtKAeQgR0RnRPo/xBCJQYlsXq9AgQKUseGTKGBLmB+PYHD5t2lT798t2bZ8123csoQt3mZBSi2BKG9Y3TzfZGFroaGJp108Z3P7j/vR+/8c57H1zvRiBCimOWRqNnpLotCvnVwz2SQ6iCZhOpaZrt9uT27dvjOJ4/fhqoSN7CojU0/uHDEBLR9ttU3z7lholISmpBDoSNtnCsu0dEwLWls0dwv738fJA4BHj7/vDQVdilml+9Xq9V0aK2q3aZyfCnbYEWusEnkLgFfbOFmP5jaVHVCj95gbwutOAUVCpOjVdX16oQ7nY7bflANbO667qTk5PNZtN1K6sio0rFMAzqUcw5awkZFT7RBdtrWRrT3Iy8QLXEebENnAnSjJ6TVjAnZZ5PSXG9Ji3HSq57IRzQUgDIuYSA1TwUZIac1ayJIcS27ZTPAmDOjFSeNQoslemjC22dYNYzozpbT2Y9UDFzZucRdXRbLXRUZBsE0LKIkzwjkHO1CSoAME9lF8gsI3PWZnBlu+cVQkTcrE/0Bi2hqfMZhsE0dBGRPNndcvWXiqsaqrvn+KkqY0dYko6P2mgxAdfl55odmpFSRETKWVLinCQDImchRTd9RYgAXBsAepTxCGtn5xFcI+88u9TPo2s/5lUejYxLKWlConUjNMFMB4Sa3BhzzpylT+N+6HPOggAkKKjEMwOTGrElqA4lSuoLzdeBJg9syhkAuq49PT1tmkbtRp6nogthMrDzKw+1cwjXcsMGCrYGGy2EYAZO42pSFUUAMCO033HbdI8zJrzazXqPF8Tt1wXyHLyCDP18zBI4+mj4ljNX5iGeqCr+VypQ5oJV4J7NhBClAk0gEM1d1SnRmPabVZv23J/3Z6szQX72zuV3h+9tbmzaF7rQxN0wdF3XQZf3qWmaIbcSAjMAp4a46SBhvu6HHvKT8elX/snv/PWzp//xe9/97c98kbjbdut+fKwEjoiYp6rBiMgISTgGUg1E10VSwoxEcMzCRL2E65F/fu/Bd3785nsPn8Hm7HrIQ8pd1/WpD3EdQEgkCGRgBAiAtaAbC0Fs29Wma1eNRMjElIiZBUVQYhObTdycbnYPeyCRLCzMwBIIS3lPNlpsl8mv5FK0CVBd2Ol5FRTyLO7XaH3OGUucntT/dH9C3ScIwWyNAq6lp8GVI1VL1ouImk+43W4RUQ2o8BxRAFSem1tYDNr9+LYPMKdH9tJDVlEBG2wQ8rEoshy/zA2PT1Wd/AY2PHf4eGw6JNn27+J+j8g+vNaLIOYRXYzgxzncq6O7d/j2Bf0BRxBsYjivRM8HR+Nne7h8mGkFS03p6IoW3xxe7CIaQpj5ghZk7XBPDDzUEOO/sXh+fwrMM3ruh2IXV1wnE8gV5pU5Q/U2e6nsX9vJiFOB9Ik5VTd1YjbmEnTdlx4pjgLk4vqEQ1ks/BMuPzG/LThXJD5hMosds88ZJKX0/vvvj+P4mc98xkCuqPUlRMyqp82QseIRF2MHLM5RmzRwFmRhZGkwSpDry90b7300CjTrz5/c7CRBEl7HNudEsYvdarU5azenF5f7PffdanP++Mn3f/TTH/74pxe7IQNEaig0zKMCFQCqjxehmEUQUbVUm0lQAS6npmkIY2xos9m89NJL19fXDx8+DCFgdbMDAGB43lF+8hVD27ajqh8m9jRNM0Iy+wMRWZcv85AsUCDnDA5V/ckuTvkouPqrqH8UFIPOzs7UbYXVipdzEbRSThruZLqQhfbhPAbShG97tX5jTqcQAhyoKE3TKAsGAF+6k12Uo4oWIqL+Qw0TFRHt6xhjJApaC0cbylusvjr6QoD9fm9PaRk2LWwBFeapuuM088IvylGJZYKWlJI80YihrtdOEI6Rd64FV4dhqIES07M+7gNqXL1ttamO6ByJXnODGliHiBbzic5cS/MeZiLC87ZPJgxYBo0/fajcJxwzOgCL1qMCXwMPGElTb2rH5nlSADOTTFZzgw3bN50SuShTcykTEVKxrRtk6tHHGLHUlJKRy5kGqytdiZVRS8+a9U8FwPpTWabGjoJw05REUFSBVIr9iwVzkkCQk/Tj0LYxICJgSkl7Xut4RWrHSSFcIKk/C54HlNlxG0CWja0QK67ijo2Tc9VdcaoZwaU7K+kYIhJHzoKgRXhHzsVspLQSWFgsJlkYNbDBT1r/HwA05ImImiauVisr03LIgfTSaB+ahy/DnFNO855rw3pDpUeOA80tl1R7AvovF58NDvw8DT5gUiynX73lSeaSZQgBhbFiqS2E5i7+xbnabX6jjKTiXPhmJ+Q5OBYAENQkWwQoNWZFJOfUtltIefds31H70s2XY//o3vvn//Xf//VXvvnrL37xhUADUeDEY86xazo5CRGEhzHvBcZV0zQdNZB6gD0M7z3+8Le++ftvwrf//PVv/++/+vuP+l3mbKG/Y06QBAA4lTzMnHMIGFz72jZ2FxdXTbPe3ohPrvcS4Rriux8+/Ovv//Td86s+dMJ0PewxhC4GyAlRsEoWJEAAuYomiTMLUwDqIjTIBBnzhloCIQIJQAEixpObm0u4RkRBEMbqmEFAZJgsr0UeRVS7B7OIVVAsbdULcsLBhTjFThs5AyGEALCMVrKz8wBgQK4hpp42GbB52DD6nnPW7CztNrPZbPQ4EKeVejDjGtiTa66sh3aPAuVfmb4Rp5ixixRdYI0f0Pho5lkAp6F8mFvtF5vj9MyaolYfxANh148sTjBFJ6f6LYU50h092cOdsaf8vwCzU/O/+sdtf6Zf62EevhGrpgIOTvywtgO2mTJv5Gj/Hj5+uFK7YXF/LUNn5GtpYPYjg+NSUnj2BBjkXL4iYkZZcNCF7vIvMoezQQJW+RUqmHng8eP41TnYI8CpiM5ztoOtk+3i8kDo4erobc/7c7F1i0kenxFOt9m/Cyj1VOXg7cuj9zAptQmhiDRNo1Hor7zyynq91nYgIAKABMCFCM9Sy0zc9CMvpkeYCUlVy8wIEJAQEOO2Ob+6TO99RLEBjLdvbEPEfYZmcwLtptlsOXQX+5SwwTY+vd7/7Xe+/+OfvXVxuW/Wa2DJWUB9I1oED6CYqhHBFbmxKzg81ahFQGqa5vbt20+ePLm8vDyEnLp8pRJHnI2HGwtVflBR3hMH1CjzOawWvoMAc3Lk4eGQXLCL1bR7PAwsEDaEMI4jUNCJafBkTXAgjZHTEdLIEGYjS01dMZEdXfKSldr25nW9p2kaTklcSQVlWONwRbXYo0V7qW6mapu6Ppj5+vp6v9+PY1LHoCVUhxCbplE1r6pYxQA0DMM47tWXqAXY1uv1oo6xOR48jfLEaoHjXmP3d04c6phRzAaxVC5rX2Fx734+UrMETSE01doTSbufayh+5iUNNzXVo6cJlmrcWQCbzdwED3swBjq8TUQ0d2lhs1Oj37SrMtWg0rbsqMVmcNooi1Zd2P7spXqbJVhdX19ruK9V2dHLK/OxkVzzxtGlUZhtgpm1mJmCLcAkrrPKcQiIk/8WkEVIhIrVSYJ6bogIMTAIADJIGllEQgwqrTJqha0SdU5Agsh5Soy0g1sgO86Jla3RH3SoF7iuVACgPob6UwEVQ/Da9MveFUq60ZDTbtilNJQXTzWHS7CESGkn5zgKITLM6U7TNtv1Rg1OOWeBHEJcgI4RysOgNUVmQzNwOSp+U8RVEWDOPp1jAcd4yGuPaFNom+h3X++3xtn2jX7w9M4fYSCySus6ebJi8W6j7Aq4DOVCJ796xbL+NFl0DKDBBFwSWx0iAkJoumfX1+u4Obt9a+wHvuzXq+2LW/7o7ac/Dm+swupTv/ypy+FikNSs6fL62TbexQgZJElIzDlgDNTC+qwLPdODj5+88rmXfuUbvzH89L03nr37pTuvrK8i4GS8LGqGsOZehhBEcggBMuecAVl40+97CQ21Gxno6T69c//x3//k3Z9++HhHXQ+hHxliG4gkpS5GphABgyQSyiCkihfW6lLCFIhC4CBAzKRGoEQBOAAIY+TN6RbpgQAQSknaVyQHBIA0JNNtihUqKcjVc2fJkD3HXUCjiowhzGLxCWeuLXiOS2Fx9FJD+Iz0eI2IXZU2e1YfUcegFvXt+/7p06dqKD0cX6rkZ1zQG2VwLkn4efp/ofpOvRm1vmRWSAAq79GEusPLXgcONaDGNXkGU3bjmPi+2EyP2ofr0u/5wIO3OJ3FkhfHdzjzxSB0EOLrVzeRoIOh7KV11QIH71qs1H/2Hm9/dnbngsGIprcjigCW8pHOVlWCgIzsHIlMW+CF/VToUqg1wOaWhTBvTG+xZG3bublNu+ojR2z5KoP6wzKkM3zMrha88Q4RccX3sxYSru8VgCW0HD33BX4twHLx1CEy/v/3miDKgdniQsSFQoiIptuUDbTQ3CZeX18/evz45ZdfPjk5efjwERQGOtnLlZ/a4x7eDnGq/pEAI0DQUg0glCQAhNhSTunxxe4n73yIsfvql7/00p07nNPZzZORM8QuCV0PeXtyky93P3nzre/96MePzy9i17Xtqr++GscMADklCg2AHq2TcU1JBEYMlkBY0geU+5OcnZ2dnJy88cYbqnJwibw66rAt6LA4goUwIILDMKQmCQNRNDqWUoLK9hHDYvznkaOjl0yCU4H8wxv8496kAgCadKfIYkqdJyBUUQYc5VSBEorFeQoiZddgUNFZDcGoTtHq3lnsWKrN/aAW0zYvkN6jv+52u77vx8SWThZC6LquabrVaqWBr17nUary5MkTotJXQ4upQs3og0p8LM3MrGl4XPQqDlKqNSn0y+A6hHnufIgLUmtYYNV7La0aa0G+7OoP+fwpG8EkTxvcA4lxZKjMXZUB/xQ4QhGo8YzS2GJ2RQ1zTWQNIVjIov17CL3G/hAxpbEsUKaoGQ0WFRESmMzuJIhggR2WVagvMq+DUXXdyXSebt682bRhvV7X/UyIWtGjnAYRQWTOpaq2PmguaF1107TOjqF2VQwhJGaArJIOupYYKSVARtQI0unoRZCzsGDO0vd9SrhuGoSMIRJgKUNjuglidvmchl9esPf7rFvrj2nC04MCeHU0MJsmTmKJHnYBg6p4owjHzIwxpDSqC15IGBkBMyfQAkRlCwAAQZaUEasjIuccG+q6brXq1K6TeUREwKXipCdhjTUM4OqeztDSQ7a9EZw+ltKoQwEAIrlZLS1kXmIwYuQXAvOMFDnwKiwwk2rLHXF2nSa2BmT+2Kim9CxwUrsPyVzIsMnYQoxgMc/uLx8QWWMc9ZRENNkTEUNsL/dXGMdN1/QpB+BOujC2p9w+eXP/I/kpjfHFL97BVq7hsj1DeDIABWkRuiYx9xrHyNitNk/PLz73S1949OzpWYQ7X3z5/Y8e3l3fONkH5kZBjUIgRNUARViYVe8oK4UMAlfXA8b28nqQQXK7unf//t++/ubr7z7I7dkAq6eXQxbZbDaS+zwOJ5v1HokASYhEVF4NoUEKIoLCRBAa0t72QggNRA4jjwUXJCdJq01DTUhXKeTAwoycmQE4Q9amNIYqnpaZIWBhPLMD9TBmI5SIWSGiUqxojilLiQHnioqGQHtIMIw1288CDimEGBqpbSfU8grzy95S10emBBrz8FXFuOZLKDgZw/C7cYgR9V2zXz1TXKiOhsgLjgLVjG0MwO+GqVGOkE1EzV63ONbDKfnle6RTBczPc7GTfvmLOS8O2rZrQbv9zSwHr3MD+706HOToNOx73St7kOeuYPtXaxTV7VJiMqvYuVi+WRMXy7HBTYiprz5C9ERkHMcp4rAaPtFdC/oMLsHGNsRGO9wBdp5hsJKPrjeGf2pBfqGoOkc6Zx5enwwnbt+OjOP37XAyn3wZMbH1gsPHwxXVzzPsE2fhNToQYmxiTGnY7Xb379//0he/cOvWrQcPHpbXIYDGGM1n69dyeHYmOBAz8MgoIihAjMiCmWHcp7bZhNBc7PsPPj7/9Gf7Vz99Y7vu4grHq6tBAmAIXccUP3p4/r3v//B6yBgjUuzTmJMQIAEGJMAgklVaIR/3e3guVS1UOhBD+/LLL7dtd+/evTyVgNd9Pu4lni2w8uX6CirrLqFVpbRvjJHyFOyAiCX4RCtMqeGDjuhgiGjodxRgPC48j3zppXqaxvBrmU1EbNvWMNfuNBQzrISqBJZBqtgGVUr2JNePE2NMw2AkN9TifyCiyXtm+zMdBpzCptpgSmm/H6zezGq12m63Mbbb7Xa9XtsMU8p932uTembebrenp6fWV1BLs1jKolQ1z9eFPoRhcdYonNddMznNHAD2jcOyQoFti1RJCyG0bbtarbRKoj/KqoAtC5DqBFI1uFsJ9KJI1DRUR4FdzfzJzFHDd6kAWE2xnigVVm3QwlCJtFf5UhtEAQjTtujyNdcsUSYia66LiGoyTikF37MGS3xfDIGIYgiz0KPakU+3l3NO4zj0vYj0uV+tVl3XNW1YrVamsdOBz1z31ReUrrp9I3OpQErlkZlFgAGCfkOotK9w2DpHvbKgZMmZxwH2ex6HhtcRM1BtMOFTExfw5hWfBQLKFNs8UXgvXfjDtUEQMUZFUtV+a/WjgDlnwBlkiqCIxJSHQN04jn3f5zxCmZMwAhluMxTZgrQmjqEKgttxtTZpWSQiJc1HOml4FmgYBVOEIfkFew63IHZSvb3u4KcGFR6T/RvNBOW2+Iipw56ieSSS/1Vn4mXrxcwXU8Vjkhw6lc+mhPMygD4xb/EUFscggOZ26uuwZHsiImds1ptB0n73LAZq2k3KnHZyo7nV8dXHP3n63fz932y+vP2lLoW+OQn4KKUhpwDckEA7jJkzBAm76/Hu7Zc48+0Xbr96duP+j19/6XO3PswPXpOXAEDLRIUQCFGPqGliTknbsitC6dLGPYNgJrzqh3v3H33/jXfevv/g6QAYw56hxwgIiZGANKA+ICIDiVa1Q6oVDwGABSBQ08TQRCFgZKDQUBhEEIEIJEvOabVatV2XLjKJFQ5hkVJGsKl9z1gEtDHrPA4KnPrRUGAn4CJilTSmGDZj/0qgQ5y47GSAqr4vm4/R4glpXWicB/4FmfAEJddyZ13X5ZwXsnh9fGpjIHOOZR88omVZkhhbrzjhz77XtgTPw/fFN1g8VLMR7KLq5z9E6sVs/T6YbVIvL+V43X4xpkdDm4z/cvHe6Y1h6VP1k18ckMyb+foHfSiIzdDPx28gHFwVeGYJId644Ef2C5zzmKma9CLswj213EmYaRczVY1qOW/Fo8X+SzVp6yMq2eQ8C4jy93sjKNUiE/brInTfcoEWp4a43FVFCpnLf/4pnEuHC5Sx7cUDIfLogItn7cHn3fAJl4efT4BVO2s4riostW4jPjnnd95554+/+Uevvfbaz372ZlkpLEFxsWpxjBUACsE02IOkfU0A1GMWVCFMDES4ateS5PGz6/fvPfr0p/uT05v7/hlDyIlD08Wme/+jB9/9wY/eu/cxUmi7NTMPfUJECgDITcCEiBhFMqDaryfytdi98k2N74gxvvDCC8ysJVWJiNEtsCwN540XGQ5CcBdX27aIo+o8FUQtWm9ClpKMo65XJ5saGoYQ+ABsFpu/gMnD9eqX4ziu1+s8JiI6OTnRV5jrzDMjrB4Mk7ANbXny9sz4iKHzgmppUVAdp21b7QnBzJzTMAzmcrQIW9XZTBXUeFGpSpf6BtXpF0KzWq0sl4+Zd7vdxcWFqqw3btzQvvNSvXNGpqrAWevDV9LhN3NB2/VOTW6H6kZb/OrRzV4n87Qu3QdNfbRseR8pauqxf9xETSOD7GIrylnAZIM24Ta7JoQLeDA+yLUngX5vA9r5Fr9lDSeyQYhIc+Q891GiqmsUEY80ExRVCGGZHLZtbAxsbFZqFLDy2lzzMEUECHe7nW6Odk/Z7XbOguOin0giRk2K9ZvgIUclQ4AlJ63XXA0BLBafOk9ERAIW1moafT8OqRVphRkJQDSLdmrMa+h/NBDSDsijueGdVH5qrlQb085d1doKKQ5PWcuMmC1++jfmnDEoVI1ZWK22pSO9KYQiqk0SET+HCBLRarVarVYowsxtF0XCMAzKDmwocR2iyHnADPT9ar1aZVYKu9THqu7BGlA05eaZtI2OESIiudA+j3IeXj0oGF31FLYM5SLZoAbRQZ5JXQbczMzHjOUeSfxb7E9xghQiauoLOPqOiKrx605pXXAA0A5yYy/r7Xo/XqdxpDZcD9c00q0bt4gx7oIQn39w+e1vfefLJ1+89aWTjy8/ejV+fodpBJZISAgYEKCBJmC3XZ88OH/8yqde+hf/6k//LF19+KMfnty9y+c8mx5qw0+iEinNhmzlZDHsrvend25d7nff+9HrP3zr4RAwrFcPL65xdRrWG0mpH/tNE1YN5ZwliFY+hPo/2zHUE4wBo6qNApKJCLLuSdDgz9jFbtWMgSNERi0FByW0HWOEMJEDDYKv/VIXJx4AkRB4EjQ95EwC67w/r4XweUwW18rJSB7OA8Rlzoq8lc7wXykjYQlfMZhZrVYXFxeeuLihJtZlhMksDgtaiTWF7PDXQ+a3oGW2LTpVCpPAOse458mp5S0e/okIZEYfF9fhOD7UsMzkwA7nZyWyHMeTWj9y+RIKTT8kI4uZL+ChkBeXR2Rj+jdaTY5PXq+nSOBU2cV8FtQMETWkvyYETMswwmt7Aq696oKlYRWkbA4228lJPm/oulqttLHhAvYsymNxmehmVg8Ffu2SZ+dLNBX0wgOntNRI0QVT+AT9y5/OAmAW+7/Y8AX2LVRof7McKFTwnOMu+3gwzoJW+PceYvTh9/5q27bvewSIMbKkDz74YBzHV155xY5VuKRdqHSE4Qhw2ltKgwo3Pao1ohGJKaAQiOZT0H5IKIIMw+7q/Xv3Prh3f7PZpHy52p7kLASYGN5+650f//inwzAARiJKXI0peeScqAbsKB1eqm/18hCmSQ06ye12O47j06dPi/SMYFUP4Qjh+oRrZmXWeH5VaYZhSCkmIiYO5NQG/ccBKjubte4bHaMzHrw9EPovjS8oqGu2DqdMNHUjMIQy1xkiIgSV04JLERRn1cJq0LfLkNG8iDrgbrcb9nvtBW9I6lPpVC5X1lk7xJTW86YWIiJSJCLtYaiOtRCmPvK6b9pNu2mas7OzzaazMC6LKVX900dCmlRtxNyTQ9tVqu5No+deFj8qT4Kjmezak1DNPDTGpFeozS2qYADeN+CnIVXFreVzglE8g23z4mo7H6r1L6a35GkJNiWsNVo8EJbPTiGsECWA5Nu5ISIJaBc46hoNFpUpSyWraWCarUw5ewZaUqUmwwWTtGeqAZGWAtpsNifbE203wgzazANx1nmCMI5ptIBEX8ncBsdqpJbaZXRieaWAMEDx8ANAiRe1SbYRpDQokJGzkX0NmjX7OU8OgFk4YRWdax2Kir92Cuph9uiv6LDb7aRG6FA15+WcRVQuVZwldYdLtTQhosXpFA/hOWPLcr4fno6Z4jqGMIwjiDQYMAuwpocRIwAiA0Sm4hkPSBQEMmBElO123TUrYGEl+iwpj0TEmQ12oTLynLN2VGNOFmKOSMxiSaWWRKTr1KJMJviaAB1CRCS1RlhGg5FR5lTZrQIcpjEBQBMNDnKuRTjKs1DlPAAE8DmEXrDwH4wsImLf720C5EzaIlO4rdT5N7VlnF/sIUEn1wccSVTpBydlShYC4sSscKNCFQMAbAPm3b4V7GglCSQwRH5Cj+OqucTLIY0Bm/078ef/7uLWb33hT37jn/9vzf+0bjab7la6xryndbdpEYf+Yt1e4ZNHp5cXf/KlP/hcu36hfeFh89n7z9q/eO3jzQ5e3DWfwRs3pZUBriXJJl7DyBE2q03e9bIfmONuHDGEk3WD63bc3vzBew+/f86P1meXI+778WyzDZKofwqSgZCJeiQIgVKfRUZmAAyAQWKTtVwAX8n1HvY3X3h1/eJpL/2W1mk/XuFV0zQAKxlkTHmEuP3U+vLsevfk+izfaIeW9txBgy2kIH0eWuyslUQIQUoLu1GPDRFLeR4EDnrKweFnAFaY1bOjEILaqpUARQx9ykb74EAOo5qNoPV4TassEkktxs01J8oIohGLjrRQcHlLGkcQWXWdMO/2g1ofDX602ZHWrRbRhGNARAyUcxYQNSIIgiCwlvHJM6nXQNQmY1hZ8JFARNipxFTNBIoLmTPCLKELsQQalBeV1xXthasSi4hEmNLkf1jgo9F0+6ni/vHmBLrboVaq1Am3sUkpJZfoT0RBfZXm1QTIzFHZf23+7tEcESVzpBCpnJpkJrVG5dkkjVIZJRGayvojIpUQIHDpWrMPWrWFudgLgJCFBQTIFYtnDiGwcDFSIpByM2HEnDkDABKCqP5fN4SBSMq5sbAkzhypMBJbr4lBRudt87XKn8kNIlb3K4ypV1+6VZ8r8xfMollqNLUkERGRnCQECtSklNLIANDEbugHACCMaEmDSQGGhLXKViktpkIEADZxVXcbAMaKdACzdgWA2ACA6adSCYKtDg4CNfH5HgZ/+S89nT+85/BXKRKJoaQCuRkQl/5nouC+ARFeDLjwoQWRCNI1xCmHGB4++PjiydNPf+rVLsaUchYhCgAEAdPIiJRL32Cpnq1y0D4yyNQpEMmhqXjSQx4JMQRCwpHHtm0yj4Go3a7ff/DoL7/7w9RsXn1524V2t7vGDj6698F3f/T9y/6CIiUZWQvcIbAAECBihozpCm2DMJTMJCiCjEpFJVZKzzLvCVEI2y7cvn37cnf98MkTatt+HBkBQFsNMWQVtjCE6MC1pIGICGAhJgAgkrgKcE0+J4J9Or1/sbno0w6D4K6TkXPEHIkCBxEagFOLECjuM4t2oFWCL5I16e4APLjUmYymUAAgUemfPo7J5lOxDwAwRiCCnMcsqWtXq00X2yYzMwi1XYhRQqQWc87jkAGgaUJKiTmLTDGW19fXm80JaMtVCsJJOCPEGDoZR2zbtm3PTra7q0sUbgJdXFzkPO73V9vtqmlIJDHTMOxyzsO4LyYekEBEGJiZBVfr7dXVVT+kMfG+H/f9KIIxtIh4dnaiTr+Tk5Ptdkul8AzHBna7/bNnz8Zx3J5ob4mQ04DAGCMh5pxTFgAIRDmnJhIiCKdQW6YBSLNaqT4Jzh7ntxERgQACICAhIaGw5sdw5owysTZEpEDaSLaYvRDiuGJJeRwg524dM4/9fhBuJUPbroVDKZaDgSEhhlFGjDEnbuL6arc7PT1NeT+m8bq/ZEnI0kCH2BARZySKOe2tgx8DCgVlRsDCkoFKOyKuDVfbbisiOScRJIpWRHq1Wu33+xACkvR9P6axbdsQIkIrkDVwVNMNkqTK9QQRLUmyaRohpGFK2KugizG24zgiQgAEAcQmNDUmSw2steRSypKyZIaUJcRWFZoxMQuyIGcGSIRxpCyMbbO6efPmjRs3tCfhMKRh2COiSE6peBf3/Z6lRwy73fVqtWrbZr/vh5Sk1OEURBDgZB4ahoCQxgEjrU9vdl3HGSlEEMjVjIqIIUaMUQhkaFfdRsYhSW6F89VJuHkToZdxSDywZEQJocEQswAzUxBtL6jbnpmTZJDSyt6UAiIKFBAYQ9T6zJkFsQsxUmwEkaKKl8wA6hHCEGIUATJbUs6ZMzIyIGTASKShuFB6/ErOKeoR9mlkZuUNfCyrx66iiUVL55AYY9OEs7Oztm2ZNaVYdCUUlm4WrLqv1EpQzFNnDHb+ayN8tinGkv1PRhy9aQRqvQE+CHjwqwDHm01e98P61y0uz/hNGPICgZ+P3WBkxXZjMQ2TtLypyQQRxOL+W8wT5l1T/TyFi7TNzALAkJEAgHa7XdM063U3pPHJs2f7dz843Ww33eqlr3/q0fkzHMftya29pFHSyWZ7sr6zjbh/ev6bv/WV3/iVr55//Oj22c1/+c//2bvvffjzn31wGje0Wr1/79FHT+69eveVF1544SrtxiRqowXCHAkZkTm24ep6l2L77ltvvv6TNy4uLnoOYyZVVABEI0UxYCIU7QwJQqUjJ5EAcM0CaorqEGMkKnYRNUAEoBgjNIjEAIi8LGYrkkvBp+p585vvt/FQCsR58bE66HM8SFg/OFB53p+LZ+3VVPux2nvBSaIAmGV69aGpaTGBo69mZwg38Z2ZWTjgDM79JP39i5n7G+wzOt3MDIEzS6G7bBULr+liRTb4QjQ3TPdZl4cltE1VAAEAAElEQVR4t9gNP3N/6B5/7e3iiI8N5Q2c/JxIV3s1uRgen8Qvzt5smtLzFg6OpIiIKtvsLLJUc3KMRMi8HtWCIoGLdzVDpql8TWzEEWR/Xv6AFiPXPQGoDk8iYrEdcKcv0zINFP0Z2UH7nxY7LC7Wzo6SrO4/8OKnCj8yn22hq/7Po6/zoy0ef96dh7ctYPuQmNg3ixehyz/xL/Xf+GefNyu9lCOnMbNkFNjv9xcXF6enp5vN5uLiUo8wS0IOnzza0TciIhzEsRu0F7IjLEI554uLiw8//PAzr/6qMKSU3nvvvbfe/vn5+bnaBWBe7G2xav0IasCvnuc6k1rHEwBq728iunnz5tnZ2cOHDy8vL9WoYeRNwcLgxBvFDCYNVRebiSm1bazYN8kGx0oaT9u1GB+ef46OEYB/cPGnfWORKSEESyBUu2TTNE3ThCAANI4jSGJmztaXb6L2ITRHBxfXZM+cjZrI9/Tp0/1+v16vNbxTf8o5D2PvxTOdCQhdXV1dXV1dX1/rOEZ/zs7ONpvVZrPRpEejRcx8dX1xdXUlItvtdrPZaA90yVNoq8nWthDyvrJ6GR2ze+g5kRcGIuaW8XBoI/jw+xAjI0UMlChGkLGUolBQ1N0OIQCyJs5RQARSzy5L6oddGnqWnHNm0diKEUQggzARMeJUAM/iL3ItQ2pbAY66Lii5La3C2MTmcs4xNP4ee5CZVSG0cyycN2XDl+L7oiAigjkihWrACgftAETEqg0tqglQrVovIsM4YlVbmqZZrVYaJHx9fc28U9hDRAM5HU0Px4r6iLsW++DZjReo5EAiEhER2vVXiG3baO6dDMNwvd9HHFYdFgaMghQRUUsfp3FngxiT8iMbppc9RP1SdHyWNAwMFomjJRcLk0UASePk4TePMQDwmDSNyzM4RIwadNv3vX6bjxXn9ERcREKkiooJANo2ag80riEHut2KR5mnhdnriUiqHxmcq90L7oaTHkthTuzM+asTNle+wq4lmJrfXB0mBnBohhyyFS1luEPGrB/sRSZaPY8x67u8HwAcvh3ipH72RASdxFarwC93Y8F3bVgDMpaqoggyc9s2mTklRqGuaYZhfO/n719f7r72pS98/vbLj64vnj293t482++vz6+fffmzX/iXf/hPXj2702bIuxGu5He++ps379xBya///VWI+ebZyc3XXkzt5Rhpn8dh3wcUtdpypMQIjKISb7MaWX725s/ffuf9nmHPMjI1TSDUup+ono0mUA7aQqPkXRAgsgihBsljdTqFpqEYYszap95+LSw8gCCu1+tn8ow5iTR+34jIQu6N+kvJfUoLWbwwnnn0l6dih9fCruEP2hgGVEIARbHJWKNZDsmTMSdx3kKBSTK26FMjmh7abQRcKL0wgYrNSkRYOIbo5w9zCnW48MWm2SqO4pRHnMNxbDLi1AC7wSZj1+EgMtfWbAds+YbyRovsOLyFiOf9pvx+ilOujOMuZrIAEvvTM7/sKrnbbSEEl4U7Y5aLLw83xId+KSQYPysmlfqTP1xxApM4Q9uCfB2u6ygK6OZ5RdTADKo7y2DAPaIvwurvEhEWYCkFyliAS4t0p8Itdv5wVvrelBMetP+u8LMU+MD5lj/5OgqER2F+8fkQovy/RwHej2NferHGz2cxSfuXDxi9Psg4FUxGwP1+f35+/rnPfe7mzZsWha7HCgiMSzvO4QIPvqmIY741mVGSimV0eXn585///DOvnt26uU8pvfPOOz/5yU+vrq40r9zOa/EWQrPI6FvK5S01uqkeDUXk7t27Z2dnP/7xj58+fQqFSgDAbLcRayfG+c57mmzgbXgN0C4wVwhRUFi4ZDBMR3D0fD3qHVCD5YbbnYcIKyLMJQurbdtbt25tNht0UlkIgQhVl85JAEqxHAuCrYQuHn2FVAbkAw7HcRTJ+/1eq8IYS1KBfhzHrutUKqvbjpxBFUJND9MNDyHE2KzX681mvd1u1YtgPf1yztfX1zlnLSGjSn6MMUOyo7EjMxJnZjKTJL04h5UhkgswERfgd4i2dpv/1WNihhwChtAAMqKG/wBiEBEeEwiFiBREQMa0H8c+CYfQCIdMSCB5TOM4CvAwJJGMpFUuRQRBkEAilmwsWzXUFOsFjVUe53MFdZs1FGVamsz4oDgJfQFaUNPLxRVZTP0Ax8iRFZU0juB9Njqf7C6PC7bVOefMmavSi4hN02y325s3b26325RYK2VaWqZpgEoMuGbr1GnPDs7TWyKYgYqUJqtzTBRmDpFFxszMQonzKBgbOt2ehJAIAFRJyjLmpC20NIbI4ESK2CVtO0OxcsAAmauCLeohoZKBiYbgWWTq74HQck3QKzQwl6hJs4+ggGQWEUKMGEOfxmoSmzL6MmcAp5NIMatRmM4VEa3W09X1RdM0zJmImiaKSEpJGC0K37NMEYF5z0CbnAaCS7XOejO5Rz97ysBoMT6XgIqZYOeR1p+lNxoBLJkuOLXNvjRBc4Fmiw/+RA+xaDGyn7yhitEvmJjl9KAdmccZvz9qLQgRgYOIFK0KpG1WV9cXY05d190+vdWncb/ff/DevfF/ffb73/yD9mSThuuMEDaN7IYxp/46Pdld8JPdy7fvfuVzv/rx+cfvvf3magV/+if/4o2f/OzeR0+G9uzmnbMnT4f89Pz2+sQsH4lgCIAirAizPXly/8G7H328G4G6FY4SYyOM7aqjxMJZhJNwAGpCxEAZSs59CX2GoIkiSbIQCEDTBiKiGAAYCGOMgiAiwzAKY2ACgO12C8gZhDkhzDVttbjMjwUPDGlGNVIN6DfxHREDTaqX33+Zi3HLX502YqB7yGnAVQnz45Q/hdEV0zMC13UdHfTJlZqe4UdD1LTfskCqaQYAEOi5Gs4Cwu02PEAZA29vK7HX+RnaIHXnjuDLoWaywBFDHHAY7eeMvh6sOwtwUqMNaPkbi5tlLrotsPKQwhwlLIenSccKadoO+/30Aq6fM7p98BPQfAkiWsyH67UAtkNTpV5Wx8VmiNUY56nT4el4lb5sIJiqP1leEWeyl99PD0v+ZI9ei+1a7P/hmeo0j87f7//hs4uVenQ7+i4PhzAHmMN5epL+PEqyeJeHh8VQh/NZgLFeEzcEHIbh/v37X/va1+7cufPuu+/CBDa6IfkfKqqyvKb5CFl/e1CYV2qDALX225MnT15//fVPv/aZGzduXFxcPHv2DJFEYBgGoIhTSbvZyADaVYhErBjMgvXXJHtELaYiALdv344xvv322+M4brfblBLMd6bs/8HB+T/RhWZUKTyISBrzOGYfRmtjKszbnPyvh8blY2uZcBAcfhn1WJyvBq4zc9uEm2c32tigQKRAgKgymQARCQUNH+SsLXaLdJFdk4kFcfDoKZVFVocMeaajhCLloipYjRZV4VJK+32vzQMt+yvG2HVd2660II16/1QV1MzMYRhiE87OzrSLhmkUC4Og+UkOthH8IRrVonnoilQGvRC3DvHRdMgFAjJmBFW6BTAwIINwhhibmi2FzINIL9yndN0PQ9N0wiR5jGGFkoseM2ShjIxCQpQQA4VICCAkpe85EkW1noSAOYuW+NXCf1gyHTDlSSGsWwQiUhUnIiKEoABQz44PqRwiQo2TUjBY3IPzKB7V2A9plFU7l8qRtfSOp+T6rx5uE5oEZT77/f7x48fqOrbmhFabFJ18ZdqdvjQUB8DEL9w5gtTi7RpPm3kEgBIphtPC9akYkYIgZiBU910/DNs1rbouNhq5jMysUV2ClC4vqhG/LC2FlFIKVKELIXPKzMKCRIDJViEQRbJOu+s6AGDJIoE5SalNwF3bMDOVkQQAchUw8pgAgJoGK3cmosjMal9BLMniiMhVNvA6oV6W+0sEhpyK0kSkQRF6WhrkZpdUIaYi6oRL1pMU5lkZZp5ZoOUhSbVT9JTRP2h/2q82MQNiG9yTUZsGzNHech09R2dmqxLhX2GfwVH5Q07sL4efUwURgJIRjwe8il1/IayGf2YWGUKM1v8aCYE5JU79GDHGtm1ig4KNoBA3Mb737Se0+7uv/NFvvfrFzz7NTwYZmwhvvfP2v33/yRdvfOqbv/l7L9188enjx48f3D+90X3q058/Db/6hc/98p/9p//y7s9+nrfxzo3V9ZP9CWfJWQAySAoiEAgR+iQiPcQ337v30eMLaiNQAJTtdquV4jSiHGtJKmYmsl6mIgg4lVFF5sQAGSQ0EQJhDBkYdLtQEIUzA4AAsaTVtiuZJAhB44igmCQMeAhwcejMTAdKBVeOuOCFRw/RMwyZC2qmkKCTXBfKm6diBmAe1AtI48xeYCqlAc+CKR6dqgczrCL+J0h7Ng3/r/KzQ+C0+z/hJ3AqEDOHMKlGfusWdOB5WOwX65H3cBv9UKFWfPFjLtZ7dN8WE/Pnbj/5HXjeKSwmmV05ATtEG8fTPSOVxtSwGv7VMlqzmyZP5uIEF9Mg5yv2r2YXRGrr8lYMmIfCYqW6iGQVPitxsyVnIgtCs/+g5hwKTClqy89Ht9Gfl98lj1OLO+k5nkAPAP4bf8Qwh7TFaR5+ft4r4BdQdBdj2s0m43ricJRM+Qkf/smq6rEAAAMz87sfvI+IL730Eqv0U/xyIsrTn2Njfd7a7UVTgugEBlpLM1cYg3EcP/744/VqQ0TDMCi25cQppaZr5ICeYBVdcP4uRATU5AO/ISUFehzHdr168cUXh2H48MMPFUHGcdTd0rlNzzjPqrhLVZpDiqTnMgzDrt+nlBADCGRmxCAOSgGoljmYzTy4mmRHz9G42OFW+H9t4VpkQmuurFYrqeEPTpLR9zZNI0SU82hRJ1KiECVGz9dmE7aZqBY3DEPOoxGiXHsYCJToLRXfLaQNEff7vfoGfQ0b9fys19vtdtt1DdUSOBZPSEQnJydnZ2fm9RKRcRw1KBHnaefGVmxpE0Fz/sAFMNvJel6gn3nufvRg5qkQIkJA1gZcRIEohpYwUizNohAhc5/ybt3h2enJMFIWAYiXF7vLiytqEbjlWiUlQAgUSiJteS+Y7E71gur7hWrwtRStnLMyqDnMBKJJdq3QXEsYIFsFMr8blXRPyK5KZhBA9dsL2EaJCDUtARKSZrajKPhP7ZEN/n0CJ1ROl1Jq23YYhj7vhr74/cZxvLi4UGvvjRs3FGxUu/GvxlqK0tZm2G1rsfqUWHONfbylQYXhnE24H66wa9o2NA1Blqvd9ccff/zkCQDvu67ZrFdN18bQxBhD28UYb29ODcg13TvnnPPY971IUTEEpro4SYaqLk195xEp5bHORK1OehzIYxKR5OQQZgYNE5jzLAIggFgKIguHEMpBE4KZz1F9o0G3xlVQyERN13VtGxFxTD0iilihp1J4RmsH2yYabjCzt2yZibpI4c7fbRu90Am9H2CBnIaiJgPZjh+A/hEZzmYlRdCZXlSBANE1tj5KiGUegOfxx0tj6Pi3URk8CFGwwZkZEfwq/K+e+tt7c84qk5XBhVAKD4kUkQgZNGm+wVZIboHc++nD1elbX7nz1fXdk931XgLdunXjt7/4m3/05d/75Vufeeetnzx4+OGnv/QpXMHb77119dHT3/8n3xwxfvT4fzm/Hs5OznA/3r94erNtGKQVoRAbkoAIYYSMT/bjz+8/uh6Btt04CAAGaoggpRRAmmpJFM3uSwmwxB/H4ubHnAVyllKhm2PTAAkSZchYPX0xxiAjUgxEMOB6vQ4RQWNggqAQiwgLhKm3gQBo6yo7Ox3KpwcoSVjIxwAAvBQo3dEUUAcQjfyWEpkGRGp/YkQI1fHufIFT2KeP9/OvICKoZfoXoOg9YB56DTvY9Y1g4UVZI2OZXrg/RBYDdZh43hHPlRyUHsEqNBiggkM923/HbKZQT7vZv8hsnIu5ieNbtmS/M4u3y0EwAs543oRfOoKXMzxiehHB748J7h7G/AxtbrYiPx+/usX3WA0N+qVJA0oEtM+V14TFyYLirHX+WM1MYM6NMFUnmxmDDd5MBCkLL3P2FooKDwGELUlS1JeyILaHe+hBzmbiD8j2Hxwa2r8UCOd2DoMfkSOUHOZoZa9Y4MUCruzBxXnZCB7qnvcumAPz0Wn4Fx3CxuHnw/ks5q8xY9r8STgj4v379y+ur1557dUYI7OwZMSgmdnmJznKiZ4zf1t1pXig5icEAISAWKr/hYA55zTCxcXFOOary12McdcPiNS2rQAsPLoGCYjos1IdFM2iZA1fcs63bt165ZVXHj9+fH5+3jRNyRdAOhT4PGYZsmB1GS2gUUQII4AkljRmLeLFCJIRCQUn/LUwDw/D/sPC3+KO+/i5T3G/c2lBilQj2sndy9/MnJOGnCFAKSull9r6RTAl9k5CG9nWbvSnInu2ApjqexhTSRpUCVu7/0EV/VNK2kJQm0wowVytVicnJ6enp+v1dr1eA5Q6oqmWA9Takut1BwDWnEBExnGksIQEcQYsc4yYQS0fy2JQYdIzIE8b/ebbVljMHTgRVESAAosgo4hkxAyCGIjCmJiZATOPQ9vCq6+9+Mqn7mYeJXPK8uEHD9746buRkFECEkkIITZNDAEzqD4MWn/NJuDpP8xD36l6sJUyg6Nm5jojKtqjmg9EJCdABIxydNVa+21BbAGAxGPlbIvsskOxkT0WeOgyq0HTNMVpnGPfjMMwqOoxDMNut3vy5EnTNCJoQZ7G+0wGMEbGVgFvbkAs4kTmEELTFP3RakeJk688niJlxCC6vSBBZD+mfb8PJPsh7XY9YtDXEUUiutF2AIAk1amm0JJPTk4AOQRsmgYAhUEEoLajIyIN2yYqlhdN+hMRAEX8mnVpKVS2RkYRjLFF1ArAgMzCkkRykjikMXEmIsKYJAmV2qakBfTKWrkKzJDzqEamtm01W3ocx2FU65cVHiQDJo+KBqkhBKiJTx7NuIQPBX0iZ0lJgydLHrMOoFCkBcSZpzKsMtchxXXwMyB7niRh06tQUsZR1+0hmC4SfjxYyDHhwAARXTgW12hmtXmb4RwRREwiVOLFUHqezIY9nJvHVRGhGvU3o305BGpyzjnVtj+hzTmP49CMMY7hze+9McThi9/4UnfWnZ2sf+VLv/onv/+nK4D/+J0/e+v11//4j/7xS7de/n99699/62/+66+/8sv99W7Vbbr19tHTB/f3FyeUzzaxISSWQBAAIxKC9ElyP7z99MHHzy4kBobIkGPTar8MKXZcICLt4kkCWSTGgAdsmSETEQZGpKYNGEJsOCFjiJAm9hYCA0WMuDlZk3a9IqXFk2nTiGOFLlT3+iRQzvV5L8QvthqcvaAA2Fxe8T+ZS9z/65U0D65mLiHXDGp6BCb1yd6Vc+Zq5pMDIfuQptsaD1HDT9tPzN98CJB+1f42U9v8hhze7CfsB/dCmN/2xXLsHq/h2AfPYBaLtRsW++n3fDFzdH3V/Z+LRTkS5+XvJTlCpwyz6z3l10Uu5NU0UqkBXYvNwdriz4AhH/ToW5gb/LkvwIBoXhS0Tt5v74woFVMlu8edtlm1AiIt2MiaYkCksayTFlq9hSAyqf1msPPXYu0LyAfXh92fo+3xIVYuIGoJZAegtYCcw/thDtWH73oeMPt1LV6xsKzLnAcdfvZw4l9KmsodkFgSc4zh8ePHjx49eu2117qu2+97zeNmhOYTfap+2+dUpYRr1nMoShdRw8nyVnR1QYRPTk5E5OHDh7HpTk9Pn11eCUjbdkMq4m8Z7fk76dEHEQNKqQEGjFDo8N27d+/evfvWW28/ffq0VjjTmc+W5qkWOq6KtejXgpggIudMYYbRMiXLoDBAKX2GUL+fcOeAvh07x+nsFtdi4fXLDEBN05yenmqYma8qoZiNyCKqwRbl0EbwlsQFptgN9tIQCrWB0o8RWaZARNUGfehgSklTB62kBRF1XXd6enrjxo31eq09crTUiIUCqm7Qtq3W/NYH9VjN2jXfsWXIq98oqXd6RPP7rwTwkHov9sFC9MUVd0FEdUcxBBbOCYYhjZkJ4ziOwomCZB5XTbhz9+Zrr760769SSjF0nOSD9z4CDgJNgyiMvOcYY4yBIKsrSe0LDTXg9LoFYVyQdABgFrXFeLiq6vrUlnBB1harhjmyGM8SEXa2ck+T7fLCMDMLFagux4SlfERSnqWdYACZOYMIYcQYNs16vda+GtfX1yLS9/3V1VXbrmxu4DIGF6yBmXM50AkYTDsNpX1GcQ/mUgUzACJMLW2nDWnbNraNSN6PQxAObZMZh15Otx0gpFwToAQBMiJfP73W3db2ebUMJz84fyoisaGua6GYOXIIgeIojCGEtm0BMOccQqMGEUREZEQtWarMl0Pu/BkVMBDQhhzqY2dkjIFYmDlyBhAK1IQQkjD9Q2n0uTZvaZqg4aNj6tWJqfiPiMyFPio+2xaji/+sSvsse4p5ovIyTxFUD8YCHBcw6hG7bVtbf3DFHvJByJ/NbQHfChMUl1XmDGf0sxePFtScXdsZvW2OKuzfaw96FWVhF0S0JoPPZXuexolIbDBEFBFJk30ohDAOA2EkCACgTuaAESIyXq0wSp8/eP39fth/6be/9MLJ7Xvv3f/ru3/77NGD7//N3/L19W/wV2/B5QDwq1/56r/5nX82Cl48evrqSy+fbk6un11e8a49bWlM7QAtQSPQZOIx5X7od8NP3vno4cVuRBoTU2hXq00/5hACoBAICjOIFnsOFAJh05ak2JyzMIMQBSSMmTiEQEAUYwihRHyihBBYOOfcj0OLQBRDbFbbFQSCEs0oUPUaqZWNjEYAlO5VXL3KIqI1iONzygKBD39yJ1VwD6bbPBexvoJGqbmGCLILwNM/lfl5mJca7sIpaVj3gjmllACX8d5wwL/rqyeBxusGVmxmsd7FgItVLN5V8X3K2l8sxPBUjkmxXiOyzfE3mKVj8RTUapkLfPHLMYZkkwH3Lv+IV8AWNMcGlAMLl983W6M/SuO4tkyjV3WjZoqWTVvPy7uvi6BWObi9KNT2fUp77QhCCDFG6x9lizXwM5uXuIALz1YtxxucR8K2opKksm/VxK5Nye2MZ7Vh9YPtCVTbvN9tv/msoXcHUh3O9aX5hiwNEP50DsEDnqPdiWMiC7hdTGDx7CEiH468uO3wwyFW+n/9NUmi7oI5O/NXpXuNcM45x0AXFxdPnjx56aWXNEaLmREzAFDQljB4uNijM6kHqus6orIq4MVAqOyACBjv3r1LRFdXu5OTkxMK9+5/vO9HoghTN0FavMVvtbj4INBmAQdqFSKenZ2tVqsHDx5cX1+fnp35RNnD/YF6vl478sjuASaLBIghNCE0IFoRmgBAyhFgBnleaP6EQwfih1vCDK5gDlqHwKbPtm273W67rhvHUcPnioMCY86imUTirsWsZrNE8YHb9VcmCt6PGmOkMIlVRvQ0h1PvHIbh+vr6+vp6GAaNaCAiLVGhVWRS0iqRSQVCm7YlMKsGqImFIYT1eq3VMowyG7X0Hl2/24xL6gEOOBdYDAfp/fYT1dKOMg+zzyhERDHIKDmzigE6bcAAmCt9Tfv+6vzJA2A5O73dxGL3EyAEqk48ijEioAiqGZ2ZI0Zb6YJ1YmXudrjoIt08Fa1XWbvfN6yWC3BUpeLXMTH4MJzK7app12IGvjBDJdtGdpGDKg2WVQB37bptW6uOozGiwzAoGPvz9eGKfvJ53hTdb4WqggV6IefSXZNCcRyUkY1nVfoQEIEF90Max8snjx/95Mc/apqwWa3W6/Vms1mvtu161TTNptXqR0LERBnQkgmzAIeA3QAiWduShYiSrhAxBk2P5L7vAYqHXLXKtm1VNQsRiSgP1zr/hjSbtDF8IcBxHEGQGazObXx2dSmEKfEwJFtSrcyRRVRkIcQizHHKq9VKa+AMw0AB6oFNgQHMWbm4L9VIteXaOI5933fdqmmmCuY6S/Vg6iPWcRhKl4wlXbNfFxBmIXYevk0RhYnnkcl2VOPODdOMXUkV0diZvjQKwmLffSR6rJ1YPYTZ25l5HEebiYdIQwPDHENFc3znnFmyJsvWtyDU1k8qnoWg81HPMnLVq5s2CKOUYj9s3cxzzsCs7a04cWhlE1rO/PTB/lF+eNad3mxP20/f/J//l3/brUBglLj7T9/71tkrt//ZN/71HvZPP7j38PH5Szdu/cq/+u/HiN/90Q/+6i/+y4P97s62ayWtAGA/4jgO+3HVbX7+zgcfPb2m1Xq86iGGJnZjyiRAArFtCQQhg6p/zIhCQvv9nqzSFwDnieaKSLdpc86CzJJi1/S5Tym161ZEuq7TZKMxJQzQtjFf8aaLu13fNl1AYNIE7WVdOMSSSKA1qSZIUEydQnYVMEqEqwiotFTxTZg5IMamsb6xWuhFM+CJEEBMoGdmRCJqtAqcBwma2ltN5gy1j/gMbCJSNNntdtqVtWkbrNm84FSmBaYAANCsgTtONsIgeeJnBp/GXTy30+lFV3RqYiyO5YR5+VxFB6o5IcZ7vOjgY78t3UXvV4UhuMo0ngdkV4XYZq6ESGU+45cWQDKMgy3NcyMNrzIkNcTMteWa3WlcxFZtlIpcfKl97/U0Xa9XPhFnlWOp9nG1s7AYLX0kxqkivJE7vUdlIDN24vyCGj5n8/Tae5jSEbOmHlleotn+jHbZbjNziJN5WG1c9qdGVYWIIMHYqjhRTLmU0kw8qO2ue5XSMgbECzqeltYB2SKITFT1vWdsAoewB+5CpwN4H4ttuwdFOmgfYoOgi0bxe+gxCOf6hn+7vZQPDDG2ZC//gWN/8JyraZr9fr9ddSIChM+ePXvjjTdeffXV11577bvf/54SmYjEB7qQH8QEbr9kEcGZlWqawzAMXbvq+51QZGaWvN1uh30PAJvNRqlEaBoAODk52e16oKgIoQNO2FRJAWoulAhUOkACXnfBaphbrVavvvrqycnJO++807baIU0Fu0l7JNO31UToACa46hRevNYpERQN0LgJgDCrYZdI0wZcmXg7XE8zYVkl1RukZsuxyyjGQgTKaez7/vbt2ycn22Hoc86r1cpsQ4RSAslysQelfeIs4ziOYyIMLDwOaWxSE1vlfSmllMeUEksGlL7f9f1utbphdFWTklbrNqW03+91K87OzgqPdlrcs2fPnjx5stvtRErf6fV6ffv27bOzM2VPTdPsdjsA0ZIhRsd0czQFSzfZflX49/qt7aq5LkMtQSki6qGyAFE7YuVNllih9w/DoEKsEQFzXW42m2EYwImUlahOAbcqTEzQknPKPQLknN977z3B/Ysv3CLAx48fP3jw+OTkZHcFAoEBTZOPMTax0fhADEREOIqTwwtzybXSplFUdnaNWnsfc85EWWVLXSwza9t3sIY0idVGaaxE+UvXtikN1YEWrWzsmPcGvXrWqurrTuoW6buKjoeTk1n5e9d1m83G6InKM2Y6D1EQUUWv1WqlvkGs0jI49oeIbduG0KtmYQgbYxyLhjlRUfOvxgj7/nq1bjebjc4nxpBSothgCVFkZmEey24AEoVA1O/3saHt6mwc9oCrjz9+9vDRg65pEbHv++1227ZdSqnr2tVqtVq1Xdd1q6bruhgJUbqui01YrdrNZtO2sZy1UOSGiBLEMQmzIK5DCERxtxsAGGAEvK5IAYjYhNiEaEJF16215gsRQWZ10iPimISI2m471VL3ZEWQCbH2wJ3IkOKkOU+RBDBUeT06vjjxM0UkY3sqLqv3H52twiBMy+7jXFmHg8Qtu6iGdBoCePSzwT3TNfroX6R8ztOXIjfkyTJhKOTlHj9JPz1wzgSeK662tMWHBUG3X9kZUWwz9bLZ+lfLgf24fM/ThLUbtf4UEJEEhEIImUfOsqIYV6e7i/6jH9w76banZ5sbN2/t6NnI+/Z0/aMPf/b0//Z//eqXfp1G+kJYf/lXfrWJ6+/+8IdDgF/78pdxu/pf/92/jW1752xz9awfd3seKY/cD0PCcNmPexaIUTCAiCb1xBhz6oVEI2IFS0sJANJ+uMr5BSMFAWWbJAkBEWMTYowD5RgRAoccRC0ZwEgEwBiobVuKmCVrFVwi0r2IFJjYhITg8qOSq7dpG55S0kYwXsQREagpyBq+gq5YljcBcs2qN3q0OHFmVkCQWubLhIMFOhg4tV1XC7HWdG0i5VIGAB4Y7BIX9gO4BMJyD0xNCOxLOhbE6GHbflrAoS1kYofzZ+1avJS9B+8TXSjTig6wzN/p0dBkOP1XGZXdQEvL39KJxHkpbdsHPzHbNE/HbL2LPUEn7i8O0U7tefufD3Ib7FfbVSPmtj9+cKrVII6O48VfcCCxWJGb8Cxd3M9kWj5kUHc0iF/p7J66ezbhuoTj/i7/xudtyBILDm6Ag7JG9iA7T7Wf5OHRLKB9AY22+TAH7E9Y0QJy/GjPW+bh5+ddAqTFV5JWRxMEgIuLCxXNc86EIYOAMFHUiR4dh5xL4ZAgLGYI86IATdMwBxV5+76/utoNw9B1nSDlnAXyarUZSx/CWfXaJRLV8YlIJAMSuBIsWmqxaZqu627dunV1dXV+fm54Qa6whp/qUdItTh5Y2N04l9w8EWQkAAKZknEYAUUARJhFsjhvm7gLnEi9gJb/1mvTrQDl5tnpuuuaECIRcI7YIksIGENACKxlBZjVu68c01sWzE5KRCESYISqRFnBEs83AUvklx1TRecUo+aqjJo3WGyCIQLAer2+cePGZrNRmVWHijGK5AXB4bk1xBg3EcmkCc50bKuiaQ+Wz7hEVX/01UNeYvZ8hIXnI3isj1FRUKmYuiQzQK1xpSUlCQMEljCO4+PHT/rh6t69e5vVut+ny4thHJmoFSDOYlSLSNt1gZhCmGvnFfVBa4IMowgwQBIW15+WCNfrtUWbI2Lbdmr6DLXRn76KJt9JqHlubCKK7ZWu2jcXCXUzdXs1SNhra1I9FkWgVTO0KweqmxlqUqivW0tEsSkFrrG2PFiv16rHEk2KEFWLM869LIpelfgAIha7AGgYZxQetHYmFF4m1cOfVcvxp6yDc4YszELCQThSWHWrs9hsmZ+OY0DEYSBE7vsh5/zgyYXqROpfVaMGc26aQERtFzUvDxFDwBjji6crXWbXrVer1Xq9Xa1WVpnJVDAiiDE2TbPb73pSOCfJfHV9AXCBiOv1hpnTMI7jCEBd163X6xhj1Dg8LVQLiEAIwChLejeBdcQQkQKov6T+OkF/zlmq0O7x0DR7nb09a4oNznUeccZvcX35FoitZ7pQmYIrB+SoD/vBbTQ58AFi9Z+oxcIMDAZD7IooHMKEJyKLTfDf+z89KQFH1PydhcLCJByzC0k1GWWxCjSFMMNEgBCFWSCDkGgIlwgiC0loKe8SAJw228Dh8tHVu99/Zz9cffEf/VLzAq1Pbz48v7/C9r1HD957789++Ze++K/+zf8YYvMX//E/f+tvvv3rX/vNL/3GV9pV92zcP3z08a/cfuns9MY2njz5+Ud5n6/64cHlxS5nCQExlsouOYXYBoSRGYQRgQgIA6JWTwbgoiGB7r+uRUQk5zRwlqrgZZSgCQkszMwSEQmFgQLErmlX3QVfJGGMGEJIKSMSBQL1EgpbOTLHQICcGgUlBngCrenOPOVnGplDxEDUj6NSFnQ+bSNhHgiN+4tT7JU0e8YDB8KWzCHQHIx2swlbfi0zEAXxf84A0oUz+beUuc4vRNTMXnCqi0kGR19hgGpzrj/NzDf+XR6X/Td+W/wcZiutiKOKnx+5MPI0MSqPufbBz3wxKz8gOWeXiS8mJ/nHbX9sfL+lXgC1+XjUtlcUWle/W4zjd94mY3RVDpRnM/EuLgV1u3NBfGx/DORknqBoSxBHgf3uhdBi1dOckK1kDQE0ARjqh5mC9Lwlz+bvQGmxar+cw+nZgEdP34/vQc7j3eJFC8hc7P9icHvvYj5Hb37efH6Ri5klVs1HHSnCDx48IKKXXnmZmZGAgEpcxjxezs9w8VKbtruzIi8AaMhxDY6ITdTiIrdu3PzUpz719OlF13U3btx4+PhcRGMKYpUap30zEm3zsSXoDs12qf4RY9xut1pR5v79+3CsfMvhNh6FK3AnbvcQRRBCJKgxFwgBhUQQoZGSxcACwmqBes5h+ZHnO3zEmnCwSrAlIEEI8fT0lGqkd0qJ1lM7OITab4YRQGqRoXJGKsqrtzO40qBQ89Y4BE1MUscjIgpkrCqicUbmlHNOiURQXW273e7q6moYBkQMEbfb7Y0bN27fvr3ZbKg62Jmh6zotG7EQ5HLOcCA1iQhXV9JiW6SGihjXqOx7hvh2aQqSBYnYIIazC4uAnZF9UyiqRAQIhFlAJGsZdS29gyREFKQRzPthvz/fPzp/2oUIGEAakI4wCKJmx1CDGjCBBDXJMxCSOuA9MOi/XuT250X1THUheigpjWx5OlJIeN3YwuLlWISL35m6J8TMwEgleVKy8JhL4Q+G4pnQ4qgMwhWujPGJiOaU5pz7vldLpSkRRCWwRXmWKm+mt9tlQpFfrM45Z64jWJkZbdot+tLVqmuayJwtc2ext1iKHhIRNU0ngmPKCCSAiTFg063OTm/cjQ+fQk4UAgVOubiy19tTCwBkziIAyAJ0vRsAgHYDESFNvQM+Gq+KKowxhBBCo3+dnJy0bbtarZomhBDaLm42m67rVh0honpZNamSiNoQnz7bt20rEsei9csw7FJKcUG+RcR0uYoL7PhRAQuVvMvPnEx+hSmlB7E6ZINLPhnHUWGobTuo4UDGNUXE2s4aNOjgwSW0eFaq1MEuIxMLgPDYa2YJW6aRKvvGJE4VsMTJYfag5z14cJkg7jU0PMY4vdBgP1kdCNsBmAvoC9KzsJnZHHJmEDX6ihnwbFb1uIVFCAkQQtesqIWBOEkL3c11ywN/9PaDi/HZN/7l7+4lb7tb69Vqf9Gf3Tp5cH35cPd0Hbox4u//4e9//RvfuOj7yydPvv7bX3v40b3r+4/v769u3P7U8OD8erx+NqYHu+uMESIFDpwRkzRICIwCCCyljjxhIERCxpzmLNC2jhkjaotOIkKSnDOyBGyIiGs9cQx6K2CA1ap9JjnnsaEVoDBPVo+yGyixkoNcq25IzXWZBO4DZiDVWGAUB6piFohk3AMiICMRgmgGAAAMwwDALNVti0IBkDCNM2VeL2un41FVr77vGYvF69CIYEMdWkM+QXTwIxyylqOPewJikPm8y49vmGJoVSWGSXdaoI/xG1uOwbPp5HYP4tL6wzVQ0M/BNrwfpz6o4iQJezvMT3+xWHudxsCYHRRqQKZV1bN98H/6t+g3Oc9MsH7TFltaCM5cnvQEYXEKOn9yaZZQxZp8UHVmesopk0ZDTP2jebA9M4dIniabRLiAMbt8iKbdtgBsA+nFiSw2ZLG95Tao/5t5KpRET+du57gwRB7i4OIbe+nR78FB+OI2D1G2k4fnaKMdpQbHj8zdaW/5hJtFBCEwM3D58/79+xcXFy+99JJijTqLANRMTujME0eJw2Lmy2+0hjkKC4fQpLGHAvn59PS0bVenp3Dzxu0h5QcfPySiQDHnXPngcnu1Ql6AifNSbfAFdhYw29ibN2++8MIL3/3ud588ebJer8caDioyKYc8E1EmPLX99OTar1Gl51JMEiNCkGLsAkYw9e9wvz75ZN0O/0IAYLMdx9Q0zWazAQBmbtu273styWg3V3ljthbEgBgACECYeRiGIpfSjHDpT03TrNfrtm1jQymXfTOSSAQWiNi20Pf9brfb7/carNs0Tdd2Z2dnZ2dn6/VaGatZDInIF7nxa6zpqVNSAzNDtezZrhp99oZaqFQl4Iwc2YuMcFnOguq99nbbZONBReOqOnOpgsHmjLJYkpQ5CbWEKAJIgaijEEBSzn2fOMaWsEFsWQiEgBgjhExVGNEJoNrQaT5/46p2QBYGpdMbazKecatc0+T80ox0awsDcB169Vmp22iIANUqygJJmLRODOc8isYD61mknNg5hCyG2YCKq2dPDQe+hZKIAJD6maiUZomqEFK9bFhbpl9RzplZ1eCkMZZFPeMCyZDHGLdEpM7JCi1ci3lO5JooEgUFctGWgRgSiyBQbG7eutt2H/a7PYaGGobMLAwIF1c784ICQIhl62K70RcJIktmARBgAJKWM9TaoQzQKzt78OhZCMGafqmHMMa4WpPG2282m1XbChc/6mq12Ww2p6en2/VJ13UqDTFzNDypWzwrXieSASbeg7WLaNV2poSTBbkkKrTOnrX+kjnnYRhEJvO/p0Qh+ByD6fvskoC5XjlnqGH3BgG2ogV1XvAqcXYdhW/P1cRi7mVS/3jukcPn6HJw7FoYU+02jSkXkarv6PwlpRGPsvCqooSavihVcDH8J5qYtEbg2hbZ7tlCyAQeZCJKOWzWm9CG6yfXfT9sTzc3b91e3V299eCt//L/+Js/+td/tL27fvjkwenZ2cC831/8+ff+7o//4I/+8R/9swi4H3YP7n30uRde+dpXfmM3XP3NX/zl9/7qb966eBhubUYMj86fDW0nQYC1n6CEEAIEIRGWJlCGDCUtBBGCGn+n7sUsQoIsyCIiTQjIGAhCwCbEhoKUvg4UMEAkiAAkVp+l26yEkEH0z8Q5MEoS4UljZxDLFDa9gkHAlcwV5/q2O7VZjRHNYglGZGYNRTZ2YvBpwffgUsABIFX7yAIRqBrhzNR3eC3Y3gLkwDFRZw4oUvLzBsRfQODwlxb78hvomSU4lmxw63EWnL7hWbJRFeNwnvH4tSz0BJuGDaI3LxqsT+gwJyB+cI+5xlRCE+17QzRxF7sGoV4jAkdD5JiU6ae9OF//usWXC4OF3aCij1+yfjg6H7+Hi8smPL1xbiv1g+ScyRVcXWzs0UvEXgHMYjnSJVBHRErVMSWVYmv1i/KIsHijfcT5pT8eLtZP1a/68MMhli2GgvlhHSx8hikeNvy7PDx/wuv85W0H/sHnTYNZK3tJFo7MAPDw8aOHjx+9/PLLN27cePzkSYuYODdzkXGxOhOyF5tQ7hHtFosANasiCUzQTgB51a1v3LihIRKb9WZ3/uTq6oqZA0HOGURVlLpRrLESVdpDQakTmy8X57SOmV988cXVavXhhx+qrJmsrcIxvPvkTT52yiFnTmPJXM05MyDnDBAFE1FEFK3XoKUA2KXb+L31Bl8PFZ9wmP6sPVxtt9v1eq30U2VoZVKiMRqCOStNICKCNIKjXbpWRLQctoAzTVjF9+KxaUgtCObAGccxpQGx0WqQAEAUrbk8Im42m+12u1mfWBFUJdSmxug0FqRMF6IetsNNWFAD2w1VITz1MHQ/pLTWCE6zQmzk7Dqv2GUmTlN7qHaQChgEGUEEtP8Xa1LMZAgTaIACtoIp5xAjhdAEakWCcBDRGGMgIkZOkjApThERIMzyiTxJ8dGSeCAKmlOu6khMhFZ9RNUwky3NSkKuLkbbNACzTfBwS84SrcvU4xaXGqN/ZuGQIwYSBAwlX3cYhzJbEIqhadvYFrGKiDSry7TuruvUuMC1LqMZ08kCd52bCnHWh6NIYpocm1JDaAVXmDlGAlgyR7+x2rcTEZEaVAWHAobYrdeCtB96BslJWDKyIGJb7QuqdxEAAgJgHlKtA6KIj4gIAa72JetSoySKfQQhNCQiI6diXMoMYwbow5VoQRr1qeYsIYSu6y6fXYrIqtsYosXYrlarOHmvylGVAyMilcFtW3Vm63UXQkCUlAYDdA08AMfVYLKvF7EvhNJ8cxgGD6mGQhOrEKVziBAQWFhyEgSpjUMRhECk/IezYAC7TLuzmXBNYjTKyK5UQ66N5sEpb0TEabTZGvLb+IfM1eJLDd9kblpe8E52/kOaux/hmNSSZUqGtAGxxrjCgfRjIXxstX0ljeMYa5pZzdYraskouB+kQcZ1DJAGHnjMG1iv9usnP3/2F//2W7//z79x88UXL66ebm+unu4e/tm3v/Xg8umXXvvCjbjZUPOZFz91dnpy796HTPjHf/TNEJpv/+13Ygy0iU8CX6EwqFMCRCQCtk0QgMyjABVeTShYYpA9VwYA5JIR4ImLbV2GbMUnIGRGAIQQERIy5K7r1AIkAKnubc4ZGGb7TKSIavQipcRmbtTpHZMPlG/pTNR/nnMeh6Fdtx4MsAp5NPfMGOyRayVvnz1sGEjrNLquS1OHkpmp0kY2gMf/Fi8BayMdmvIq/VOHXHYBeOhkI/+9fwvM9Ua7Qcn04ksDdZnb4233DvFxcY9tKbjqi4cyxGIrjE/447Of0FU/W+ywTcBQNYSpJKYfcDFncSrZoUKrt5mBzAYsRJtm39g+5zxLvDEO7UvCLEjN0cshCpGz6KEjtn73jB765Sxu86/TdRnKGPc5CoFyIPKio7T+3OfzX55LffC5x+En7OEWHITjHMvsp8VxHM7K46MfZ8Eyjs7qE07K7jRWyM5d8Dz019J8tkkqql5dXd27d+/rX//6K6+8cu/+fUTUQhREpKnoC/UP5418jrxGRGwmAKDwicUirN9r8tjjx49TSqcnowiUDoGCauqbdDAhAwfP98vaYXI1L0piAkCM8TOf+cwwDO+//76vI4CF9RxA18EGLgB+sbFCKMXTiFb/RkSc4oGiIiYiIgkfIXdHD6sCwNFjPG4wAoCmac7OzmpOVEF/LiH0qlczgDaExOoVnJWtsmGLbsaT0B9cjTRm1jqHADCOoyaPXV1d1FbV2t6t0a6Dfd+LyGq10g4T283parXy54glt3mKRPCG0cIccRZMUenelIfvJ6+GBiMyjgiwHFAA3TfLizNZN7hmj6YS2GdTOO0nAEAMwoJEiIyUkHRiPKYhlNq5KBiBgsjAwOOYGyCBSBSBUBiypAycIWPpHicgETEQgtoXvI3PqITRf6N7hbUxAiCFJVeV2vghhNC2LbmsE0sQw8pBKjOaOJoRcIrB8tGERRBU02POCMA1YQ2ptKZ3XrhyTFqnxw5d56NVEkzo8uFRJuFbjTpVT7A4paZKP2UHpIwwjllvizEiRV2LZuipJEgEZkYBhw4eI/phUMBoaqijTmB7ehJjHDlL3zOnQMAgICCJqyY5Y5QxRn0DOj5FRKGrQqAQCAgXt8EwpCqoQIwxxLLA6/6aBSk0TJEgAjILjiNuTm7u90PKsNuny6vh6upKEXAqzCoAMidDMC/JRQFCKMKuosGC6BscGEniUgZz+tPjGBzYwolIGPywVHP5pmnM04qk5pQpxHtjg71iQRY9oQFnNkDHZb0cBpXDGfJ7743HPd0UmAez2YCeMR9lJ+D2f2G7slXbTBbk0l7ncRIAiKaG40qjtYZmoYMEQlRqigADIsFqv9v1JKsmxk3X7/YPHjy4utohN/lC7j998JfpW1/7Z1974Qs3P774eH1rQ1fjD9752Qf3P/6dX/nqv/i9P7qJ3Z//h//Pf/nP/+mf/u/+5adf+ie/+Y3fffPBxx9/9Ciw4I3T8zd/ztgRIhEkEZEE0BCqG5qREJAFkJmFgSAAYZAAUAsGSgGMBmFkVuYuxSqBgDKqtSmgiIzjwAEbiAGjMjwMhCjaS6Fov2mMNMuVYmasNhGDQHDGMJAJ4MEKdnHhcIfiiP/gYX4xuMGbRoceim7TBNwgcmD/FudO128WjMpP3lBgIW4YjxQQqSXFPILIMQFFL5+r4IGT52ZdP45XmD3YSzXBeKXRk36/1cF5ojza2mh+/uDIy0K3pHmYt5+kHSI62SK7spww12/xQOUAAGV4BhU2Q/86vwM1fW6mxS2sRfYIM2vfUXGsxa/i8Oz8YS2ozdHzVZOMvV2cwgwujkinWnizaLTLpO3b/eAoHgADiNZ9By32py1gisCgBJBBvYVC2pQKlgkOs3Udmb+K4E7Udpt/RPhenA4c4MLipQtF0UDRW3M8NTB6DrAkLIdvnK8C/PjPu54HWs+71I+hoRa64U1s9v3unXfe+cM//MPXXnvt23//957KLd6ygDo4ALzK+AFKJhJLXSMRppS6NnLmnFMI4eLi8mcf/ISZT7ZnL7z0skljiCFXKs0Zaha8+N1Gk08trFSDGx1tDygnJyef/exnLy4uPvjgg6ZpVMQsWDxfmt/qwz08ilb6dhUcCrWJjeSMAEBRpCicWIkRIvC8LQrMcdN/WBA3/8jhUzb/GOPp6akZnTS8cxiGtl3VY0UigprKXhcSFCRsSkZwIE/z8RJOygOFUutCp2HeQhFJKWlnwv1+r3Uluq47OTlR76X5puwsahkbleyn45BKnHU6BoSH/3qw9D95kz0zW7E9PxQAWGEbcFU6Q20sAXM+6y97Y3kpIzICsMY6AYBAZk5938fApNIIhRBJpBXgzCOMKtbOKuczJ/WHMzMIR4wYSEgyTSWp7e1ezPAQIiJcKi6UfbDzwur6029i7fomIrb/epVhARRCoGaHwdzbYd/LPG7TJqa7SkRW74drDwlwDhhTtkXEZqWMRmMjdW6qwHNJT2t5HmMI1dafUlKy770pZV+KWWrVtqVQreVbeaLtVwcALOqTIGbOPMYYgUgk37lz5/TG6fmTx5JGJImx0SjdlEZQ7VTfSKSqeJ9GnZI5M6V4erXqvn6DRJk0+LdpkTlzYpE+jTKqfi1Nt2qqeKmau4js+z6EwAxtiJlRICK1nPMwSvSCGiIKaJklNMTTK0S0YrKFmFbRzRo22E9YVaaccwhkn00tyTmrscfOScFuQfI8LfN02b9IP4orVWSiicdnIwFHOSJWD4zhNpYsT8Ywkz790owD+QENatFZSQ2I0YnUC4rjCYpiZql3VDGnWN2a6DmfTdhvETh+oJ4WE8JCCAKAulhg8fIHskAOsAVsUu6vYY/AIonHLMN1vx9v3bx7vn/y3uvv5Y5/q/v1O1+69XQ4P92sm/Xq4mr/+PqywdWzp+e7J8/+zZ/86Vd+62v3n338ZLy8/dorlxl7vLz10gsj1g0UyMg55TwOIQTJWXCS7ZgFVAiEAOAb/gIKBCIMkFWmDKVmcdu2Y5QEuYT9pNRzzwFDo4QGMcwiDIsVIM1sRZPOL8VMYGDm5Tyci/LMzCkr3mKNmSlh1U0zptEsDnCgV3hk0Qk0TanpL06r8RKkRwoA2O/3QsFeIbX8l8fHT2ZUAIBwRLq11S1wx5PC580qu4Y/5GJU/D2Lt7DLE2bXidH2fHGPf+lRtPLs09AEjpEXT9AtP8ojL0wSyUQHdMAsM8Hd8HdKQ62v8zADzlfpN9nvSf0wyR9wEDm8WCO69D//ry3kkFIF127ETG9yUIZhekomEQrmYoHMY/vt3LnUgynIpckqGqbi9r9U+bdNtHMXVz3LH66+NM+rqh5S48X8mVnDC/1m8tzOsrjfH+ICd8QpCYvv/Wn6Gw6nBHOmBgfcwZMgc1McPZ3Dy0QuO2X9/ihJ0e8LR6YoIpBLMNuHH34IAHfu3CmF46EI4toH0kPgYmSDZP239G+YllD+L8ZIqCb/qEklOefz8/P9fggBLy8vT852iCWMre/H2HYikz1iseQ6k4nq+m0vyAsMgJvN5u7du08unp2fn2tRU7JcDIAJwN1mfvL+L2gsJ855SmGq+1BmU/5kRmQ1gTDPNPxp344FQx6FpU++YqT12npVl5QtP22ioJZASyqrkkljZJkZ1BOVs9ZuXcpXmpRleXpVIZmovSqERCSStdDrjRs3Tk5OrBWZxrlRbUigZUJCaFQuVSodXIsRRFQFCeaqKR2cu96jkZ80z13POWtOv79fL/VhUpWt9TLdyXZpmszcCmOPaO0Q3TwW9gU49EPKOWalqJBGaaLKAyxcO6WZmCSc88gMCEhEmu5iapWnVF56OaB1mroi6l9VIV9E2rZNaVQpVzdK4QERlQN7Ut80jSmE9lMlrEVh45pIDy6M0/MOhRntMWNwwjWsL9fiQPZvPeUS3Yq1tLuRIA1FNA+WVqaheZa7fda+hWbT1xzCXEtgennMEHlxxHo1UdMcQNvdMxMzZ8Szs7PT09Oua/YyYkaKCEIsuaEQKCBiFmApKa8g0sSo6UsBKagxFAAEYmyd1s3MrJH76tssYeeF5ocQqO/7JnYhVHUGiShQCGkYiWISSP0gjAKEgRggtoHUJ6vHFsrZDCGElLLU9sScRBBCMzk35aCDsM7ZpFvDMTuhesAgUk4rlLxe7QPDiAh5UBgKFEIIFGJKMI4jksQ2qP1ARCgQiKQ0opAaFEIMMbYAJILMEkKjniWqBWeh1tQCx2L1A7seaG5DNQJeZY6s3CWPLJlDCJFCSgkEYghqxVdyGYC4qovsTDUxxtLfQACBQjX8j32KUR39QgRN02Qe9/s+ZQQAYd06BMAQ2hAAcmrCVIUZtFGkYBoThRBCVECpGy4YEhLERmMAOCUhokCEAIFaEOLMzExAITQkdA1PEIVQWmlaoBCIQT3pw+Xlx02kU9o8/v7D7zz44df/4Bu/83t//LPwZ6uw2oTt+z97+Fdnr//2L//6v/4//B8zP7u4fPjeW2/87L03/+Sf/nHzj/7R//n/8u/eTvtX//DX3/nLv5crvtHeyjvuqDuJN6721926BdqlMI405izEoeEYEkif6KxjTklYY65JIANRDk3sNozX11cbWHeBno27vrmEVd6161VoWt7EvuGcAgHSXlpsz3KGHmW9TluU2OddjwM0IkMW1fMRm9AY0UwpAZKACDNVQ6CFRgCL1qYSEQLEGoJvfNFAKEADAGlkQKvHlSqrI+YaiQGBJaUxjcMVVgsTcxZRuxSY+8JLwIiYCQIRi4x9T0RN03SrFboQaKkt45TXesJqfAuBcC7hYMnmpGA5plr0hxGFQPKUWyhAiFTs7wJVwTCSrdP2ueBe8LWFmPmtUt7JvVa5WkAkJfjGWkwJV5ifq3yIJRzaVFklUFM1ahFgFq3AW5hNlPLolNQhIgJE2QUXqT4RmkZ7p9jRG8fyJNHz5hCKx6CmIujtOYRgW4IIMSoLZ5GgXjdlbJVQt5VqmfiCiBRCwGo25pocYjPR+mwqpUk1OgQk0HBtxNgU4Z5Tidu04zApp2lbL1uY7YxcCIOZDlNKISJkyHnyEBrJtcNy9DbkVNObJaM2iWZmSVp9LkTU/RHImXPmUuEAnU0HqgC9EJf1LDKX+iCCUKKVAiHrbhOIOF0FFuBq30gtfmPfG2Yplvlz958Nwv2uekOhpyH2wRZoyOKtCR6bxMWnGQ7atqCzICwQ0K7EozKjQXoiwqbdj4li9+xyt9/1v/S5z59uTsb9QCyIeR2bnmfWjcW1UKehVmcBXFbxHfo+hNA0odcKgU17ue/btqXNC4B4+4UXwnp9cXFfaJ0FBTXUrOIXTTsAzBERAoFUUoCkiawYwn5/fbLe5DQSEUhGxFs3T164e/Nbf/1XPKbEOTZNzpIBs4TivCzdZwERSYBpcpXYcQCATLl/AAJYcx0i7tqOKIRxHHIpIEYxIvMIglq0DARAgggCInMB+0r99EUCgFU8VnAqUbL1hukRVSvU71eCbJm1uwDLcHZ65+x0m9OQk6zX65QYGEK13QRSDQcoiACw5LHfCQgFHoYh5QGJA1EISETD2IdI6/YEqjwaQmhW693uasgpMSQGhiKan5yciEiMcbfbNZGaGO/cvokIGr/adZ026S0EliTlQWMRtXaLFaHpmrZddev1GkkyO/8qIoZQsrYApB6AePOKNh0BERBEyMKIhIGAMeWMwiGEgGI+BnCYm5k5SYaMUWvPBABARklCQEQUIBAQAOSU1c3Ytq3eECCQEI/MI6e8B5AAoW1X+z3wEFvaBGgaCiKJeSARHmHgEGOMIaT9nsgypIgw5pxTP6SeEQMKoVIMApZRcmqaRiMrlaWztrikWpxMSoTUBLd5JCzflYxQgTY2kjMBAhIzj/2gh9vGJjYFxkAyEGiNBSx+ewAQZrUqSgjYNE3OwsySORCdbLYiMo6jUAhtoRjmCtKDHva9RvMKCKccKYSmTSm1sSGkSCEgSeZh3+uDFJrdsMOR2iYAcqSAEQmyCARUlRtjbIFwGPPJycl1PyBRn0YQFECgJmVh5qZZG/5qYQkEjkG2223f9+p7NAVYW1/UDn4p5azWjZwzxDUKhoyMMTRtaFYoPOz7BsNrr7768YcftBSIYNz3sSUShFXROChQrj0qCTGLhK5JnHfjCDkrPx3HcdvFAtHIQFkEkwycsG1bbSuqzTZIQFggAGVJaZfV5aZRuzlLyCFQzkPOEmMbm1jsziCxnuWMdyp999FfKmtqS0dj4Z4bHTKYKjHMTNRQbWDGCRaM0w/l/cspJaqNbgygRQSnd9Wg9onpTmKKcUS1VVjALtXCm5ZXbZzVM2bPcXX+XhQQmYS/zLnWkp4tJOfsCwAuuDtRGSHnrM3fqjFG6/eoVCfMHJyd0qbBGq8Yl4GvJgrYZSKIviKlyQqoN4TSEUVAe1ARolCIcYWrEVIiEcnDODx48OAH3/3excWz3/wfXw4cYoyf/cxnXvzsyzdP71zCxds/+0mM+fOf/2JG+Ltvffvr/+gf/8s//dP/6f/0P7/yqRd+7+tf+85//XtCuHn37On7F+fXT9sY98NOWmAUzQ5FqYaGoHK/IAohkcoeDCI8jhkCAEDf79Y5hja2qy6FHgVTSlpjSQAT98IpYtu2LcbqCOKZvdaDq53IoQBnAOMhxJvePRJ5cQ2q792fe8UgU4EKVbVZeXOjPzv/FjtKkwXtV3Y9SGxdUnMXJx75D9mYxTFFD11+gQZjhBBcUTg/YXL+MaMth2bvxXsPvzHKY4uCCv8LPKVaC24xzgIj/FZ7fLHJL/RSoyd2cIcXuvxkW3L9fqYqLIDw8FqAotEZa5vhCa+opUwtBXmKo/OqKVT9sMQ75VnZT4OZMK/97c/dg6htmq1XP1um0DjOrANURc+u63LtyelXEcMKXWgW1k7ETdN6wPsEODl6zdFqWecAHfc5PEr/4ej9HjUW2GFvX2ygH+oT5owugwPnHPbobvjxF9NekI5P3jd/r779+vr68vJSKxCMwxCmGmbH3Z6H33iKsbjB8NTbINRk3DbNfr8/Pz+/vLx8+vTpMAwaanh0XYjol7VYo0c6ROTMiPj5z3+eme/du7fb7ULbxBBEOKUUqLHHbLYBkJ+zbf40F/tskySk+TckUHcDZmVa4cAb+Qlw4l/qSa6/QarwvdlsSkdBmkJdsIZo6QyrOXIwQ88hVTTCYlKy2hyx5pUNw2DFVzQ2T21A4ziC0Gaz0UKHIa60qo1pBeocHoah7/vr62utNyMiqjc2IXLKVmkGq/jqF24bvgAAgy6lb1w76fm6DxqMdwg/RARhNrLfVW+CAec59MiIRdpEo3sWYA/FnTUJpf5kPSDZErSoD9U4pupZWXJnY5fBtVWbWeJkeouxP679vf3G6hrVzwYAfrZ12xFgCkCo9tmyD2auNSiimhMIjmqN46iGS12j7dJCzvGsJLhUHd2K1WrV9wNWZqcuxxCCglbZYVE7sqjK6tlQSoPqe13XWUEdAy2ex1Qu0E3BPoQoVewPgbQZxsnJycnJSdZypiyI2HUANVJSRDqRDBM8EFEEEGIgVKIXQgg00f8ajE1aeZGKHx41D1xdFiEE88oiljx5EHFdNIpAqNVJolTJyVAFqjxhnmIAMKQ1qPLwOqfIs8tsWnX7yOrt1m+W1RQn34MzHnuxA6uIGUKoGeMIMEkeFTimmCWT6nxYmiEz1fTfornN9QTPw9T4YbvkqYM9gnO1Qb882tPcYU7Bxpzz0WJZuiYiAi4eAG8JVtf2IRHEKhfahtirPScwANBQ3oxVTQHJtZZPE9qcBSQRYEAad8MHP3/v6fkTePFzX/jCF85OtrdfeWl79+w7D7/3vW/99cs3z/74D/5xZPjqr/7G+q23nz188oXP/OrnXnvl4aOPfus3v4qj/Pn/+y9Pw9nZi7d4J9vt6f2P71GO6qggETX5AglEyGkARAy1JaAUxtznFCKKyNV+d8ZbVexHHJCRmRmYmhYBx35fRNWm7FXiHESBoahhtm92rI6cTc4HDzz+TttSTxo84ykgREtiDcVDpRUFpkh6P5oHsAXpQZyBkxEswxqvporTCuyDrULmjHMB//69dT5w9BKnp/n5yP+PtT9rmiRJEgMxVTVzj+M78qjKzOquqj5n+uB0T/ccmJ0DxMxiQVnsw8qSL3yhCJ/5nyhCCl8oWL5wSZHdxYLkAAsMiMFgMFdP39Pd1XVlHXl8md8REe5mqnxQM3V18/iyeyB06c6KL8LdXE1NVU1vm58j6ilQFmOhM1pwvvV64M3iNSD9PfavR6OHp0Gy3blEDiw42pAsIqEaYMZQUAWF7WGWUtHgs9mhb0OpX2jLklrirZDZohG/n5034TRUOIxpvrIzPNufftX0cYXZIpBQRZnlnVaDfPTjGybZZahaJ/dGkIKrL21Wn1xI6igJ+Vnbl5Ufp+2zed1tVzMIzMkD5gRvt9k3SzJezvTo1TzVzNcvkP/VdkmPh2ay7Pwp/oNMN89+vby8/OSTT958883z8/OXL15oIDSlJBT84J5OXgH8cu5erbdBUkqB6HA4aP7hbrdriKT5sMTG8r36eAhBMnZd98UvfvHm5ua9996zYYkIMhdeEkVJuRIIwHFWbfh0OUEjRK5ZcDP4q13tU/3hFgLwM8X57tDIwAYzfd+fnZ1pmXqG3MAsIoCqEUVmHseSVOWz0O1d3okGlffVJgyhU5UaETUzU/MtmTmEsNvtAvWbzabrurOzsxDXBvA4jrvdbrfb6UEU+q+dO7darXQ0rOVtJsSKNHAOF65J5nyMJAxXlrhoWPWp/n4H6bs10Kx3g1GUbdxWtGZ1KHa5VSgH3ENtXKnjM7NGvA02mac8mLS0bExwPbpNizMaNmIwSV6TBtFwmHMOc/XbXioyUbI4xPoaeKjJqzg5hWdJMVRTWsT5FKDKXk0+R1cMUrwSteJRAVbe15xhQ4vtgxRn6o2IdF13enoqcqV+BBa2cymGYUhcnNGaXqlz9bBB3XRijKenp/qsXxeYM7vXAEUkafOeUJZsHMeO+hgDM282m/V6/dydaxC7CHHqe2KUkITVt6JWmC4ZaL/TdHAWeCFdFPJWH4AgChIgUgjlXRVFhUZi6MT5C7hWz0a/3bqJlW3Y9n5LNKJjySFYPXzLLWrObzOFbHkhImFRaew2U8XA9fBUpHRdl0c78HQW92iEl7Glb1hkEKo/wOtttjzaghlmgh4bmWjAi6ZwLOS1qCLi9giZawn2FrtKIWxSNlaHfSSicT91BfRYyjkTTq/zH2C+K+jKWo5HnWYuef+h03QcEMkoIikDCGj9LkIGhNDHVZAkLMPVzZ/8P//89L++J2/jf/jL//jORz95efH06UcffHv19RWe/OBnfztc7X/z2795Peye33z65psP+s1v/fTHF1/92pcvnjx950fvP754H8bAPa/OtofDgEwIBCKEKEEYOWNOYyLtpxwQAQkEixcTEUFNeCoJ7sJqNRIUkxbUDmHMkxoqkgG6EEKm4v40d4DxgsewMUyDc65dZMTtx0tHwDRmHQpc72a9y5O6yTUbU98V46wG0j5kzcZzXzYeDZ0dz6uwjDhtjp4yG841EkKncHi6ctKsHcQ+yzGFqRnttmspUvwI/sUN8EcnuHydOK3LU8LRWYAjDxEBmNbCP+Jf4RdL5lsLVF3qNpFoSok5KaTWzvkJGngp5WY0T8z+3+x6JnuUQtXzvCSxCXqhJ85hZ9NH1/NNN/oyJpCwNmkEZhhqRYfGJRDKsaIYp/faRBq+w9ttpCWJwnw7EKnFig4/ywF/4SVzAe5R3SDTpG4D3i8zPril+WWYpVl3OMYmDVV7HkEsXTixanv2/dXV1QcffPC1r33t9ddff/fnP8fq0QCapc42cDazgLmDxtQRqQZSM4iqzrbido+469UIAYd/GwerAnB+fn737t1PPvnk2bNnm82GEcZxxNiFEDQDVBOIDZ5f+CKPc78KFdZJOhWnFdp8a2gltH4KG/YVswMnATy9eRZGRA3K2Z3MpYsJlmMkUJuLqlpPhF3XjTw7/9aoxcJTNhQiakQihJBSSenXtDJjMd2JYuhXq5XaexRKYpTmhd7c3FxfX+sHVSlXq9Vms9HjKFarFQGKyHgYBIGINNG0bHAA4npxF7DZuepcei1CsSERIFSnBggwJ49boxZwVOfRbh4xH0vwvM/Og5lSYtYDDJNIeUStXGYWCfZ2e1AFPzvHrrGPD8naG+tQxfyzbUKjZAYbuZzzhlahyP8jXazZRVaa7yvYIMI431YaBtQ79U/dg+YaCxNZnAZESE96yNmOiVIBXlRWiiXmwRlSShEJEU9OTnLmm5ubnHPoYkrDbrdDCCklk1dYXTQNB5kM1xMsQKYSM88Cfq3FWVbWSdjPMQYc0tj3/Xa7ZRHIzAjMHGi1H3Zqa5kfgZmlWJVIRLrEKvoIUGBUQ6BR59IAVhNUsgv7EEKA7DOVMOfMCChYg9g1CbQKmNKC0hwbSnwpJ3Ld9jQ8iIja+6uRRLbAS8GBztUhtTuC0QEAaMrcjH9EhGYbIVb1xb/Ck6NJW6O5pV5uPJbnPQ88/Etd3AOwZHL/jeOoGYfY43TLmdEoimctiNKmL7NuSP5mqWaDUaFULTbnDDTLwatgz3JRTC8pLhkqDXLUEQIAPCabr6hPtAiCLnQxI2POAREFOQNwimn97//ff/nVf/Arb6QHz4YnCXZ3H5689+LjP/7Bv/lffe2P9sN1D+sPnn3459/5j7Cl//Uf/ld//eb7/+pf/atv/eY3ui5852++H3H9fP8s73AbT4kjCBKLkABihjFBJts6WTBEAQ7YiUiImGs77H69kiTMHLoADDFGyqTV59RFjarGvsO6BEQEtUMDMZlR52nGC0cjYx/Bg/kOYbtOI2FdOooJ9FlEyAbxK96svmeHxpul7GUweODZdRIypsjzEsQlDS/pzYPnWaNhLhEBd7C4/WpRKUORPc7u/CUDqYHEpnN0jkdBali1Gcf40cNp/5p8O4qlZhAASKmcGynz8s5Q6/Q8BqxTVwOJ5XUsL5l3tbVkJC/H/JJ51QHm1GUItL3EbEIvORvHR8MaPI98Qt1Bc71wfmSLR53ML/OnqLNfwYuxt5eiU8Q11bZZL1jQLVQSyvVc2cYYzgt6+GUuWRg2/l0GJB2L9C4Zx77/ewGwfEoWe5DBgzVN65cc/yjA5RUAADCO4/vvv09En/nMZ4ydYc6GDUhHh7VFMSAbMYLOGmRmYQ61MYkpu3CMsGf4OZbFwFM5SVED7t+/f3Z29oMf/ODy8pK6qG3/QogAQHHqWuzn8wpz/ui6mCjWmhG/TDpY+Wz0fKyZk8dVQ4r+Xc6NNbEJ15M8QggnJyd93+ecrcDSlqM8C3kcWaTEzWIMnEAYEAgEEQi0kkRQvyQMCMScAQEQOJsdIsMwjOO4Wq0QQtWXgshA2vUxw7Nnz66urpB6lScAMAzDzc2NponqI13X6VkUelh5SikNo1Sprg32vCJH850LoIReTdobbdtCGEN5pc4TMyJq5Z6J6Eq9AaD2k6zBN728Na4v0qTZcRztLhFUpcvyI3LOiEEkGzwA0HXR8AM1Q8T8+N6Z6E21XIutTERbjwwFBkxu89SQHBdasZHfJOKCwS8ApNDaN1JsDLFtxWwKnnvbzfixxar4B08PNhG1/BU2dTpwjRLrOMKccx5h7Lqoptdqtdrtdro3XV9fr/oNzAW4Fz62W4V6nqE+GGlmbhj8zRLbThe6Xu0uROxqEm/O0nW03W7v3Xut694ZDjsAGDmvmA+H3UxPq+Sk61UCvyFoiytm9SApDQni5PlV0zGETn0rUvuzpMPgdH4YtUwRSrvm0v8GhEAQAgBEct01se7lJJPS0HVd3/d2uIdXXDyavAm0JCYNy7rApVT+PbIwTZq+/pRzJleBKtUFIlwfr+00qRZAzrMl0YhJ7zfdRelPw/fs/PF2v9+i7BXaJMM2GGOJLsbk2h/bezVl1P6cPtRo72SQCzdMa100RKNnc9wq/Lcp+h4DGkyyZaUpkbr0kgohjOmAAFhfCgLMIoRjHrAmG+QsDJFzYpZ7m9c++uEHmdLqXv/g81+45nSQPBxe/n/+9F9fX9y8ef7Gve3p3/zNX/3gJz+4++juDz/3vd/6ld998fSTP7n8t1/95hcHPPzHP/vJNsbN+bnsIeRArEXhjMKMDDggdgGF1S3EWTCwJIQAQlo0qj62IEFCoA7hUExZYUaUiCSBAHI5T8baCCEwc4YcJXoj2fjcarRkrnV5L5c4z5DxgpeesLBwmBlxUtT8ULaazYBYFU37yZZY7+fcqiDBtaiFeWctfxs6xaLhOJx79z08lZYyLEQkTvdKA6fncT/lVxPt0Q/L78lFqOwnv3bN9P0NDUhWc2jM2+yRzSM+i7iZtXc/2Q1ELUpffdmrvRvVT6dBS0DLuUdmZqntYbDsOhQxEAFKiQXRrEmYn7tXvv2LPOXYU8ovTiWdGfx+OfSyDcVs0ZoPVkKL9quHoVms5k8Pofi0sblnGhxX/sKFsAX1yG+QwPMw6StW065mLv8JNxgwDdc0Aqfh3KNvmXE6iBRRI82SPX78+HA4fP7zn9dWAuVd7kXLYZdzoXnmBSwO4fCfiQjq0cFObZgSp/2s/V+ymJqXgaozrbrw6NGjk5OTn7/37s3NTezX1g8zpRQ7c2wxzmA7vi6NX6ZZjmputchHnA172xot19Rj2OOzLtkMmVg1mdPT0xhjThLLCYFOw67d7dV0EZm1ljBJaPvdUOtIpe5Q9quOMwzpcDiomqH7EdWz2ono+vr66uqKAgB2fd9P9l6tNjw9PdWbtV1HSkn7yuQxmQCJMaIUVdC4z2fQApTuaCZ+TU3yKbv6U9n9UdpZY+lg6VTcADDtj3BMJoBrRC+lmnrUCKGVVvrkPK5XQ2CmB3pGdgGVCf+2I4ATSgaMHiBBtckc1xRNa6TX0JXnKXuL1A4aRu168mFjb5dGtcY+9TLZi/N2CR5gLU1UVKhfQGG2rrBKYNa07DCWgkPORW3WKr7VanV+fn5zc6MtHvf7PWG0kjcFrAaZpo2s0nmwbqtmyhq0Jkn8nmLuVBGBzIlUMSgcxCLM3MWwPTsNIVzf7GNApWeB7MfHjKKNeupCiwgy53FU+wQpE1FHdt7J5A7oAGIkW2W9rOUKUWQE7VCnRnfOWbDGlrKupnakmddKgasG0ai0G3QWkfdo8qJn/iswixE8TApcq5MVpppT+ZxWxH6q5EgpFWtK67PtshnBYkOyl/qbbb/RuTsOnM3Ryz5/mxciVBsx+/2g4TobkNxJRMqr1j1Zx1MYAEDJhnCmJBm0fgnsVxFhzuZxMYBhUuAK8CodPPr0cKcMAgSCcMipC+WwzoAUAgGFlNLwIr356HMXF0//9T//N89uPv367/xK14WRh0ePXvuf/r//8re//q3/zX/1X58+vvvsLy7O7p/vXu42gI/u381pd3qn//Xf/trF9fMXnx4uP355RveJMUJkYRAEYiYgxhACqMTRaloWVQ2AATph5t1ux0kAKFCXZFShEIQCEQTInIUzIvTrFXUxEybhaGlRUjYGr20sScJLeXHFbw3xNxxBc184VgsBYDofRZ8WERbzXk9C2TOjwgXOkVkcH5VhPBNVkphuu81fM2O9Y5l4yw0GEZdakThlq3nRURTR7bELm+YSVKl+GVxk65l4gblyVkV8G79tZuqFO86vBnKZizt0m7FHkZeTNiZR6YLuh/LCYXkRBdMVsjtc3kKFDb9bBNIDj7XJDVb/MdeaWL+TLU1f2+rQiVwPtv3UcI1JFQ8eOPaZFLiq26lDKqVZ8rzMzbnlotyGOlvxo3Ton23WtLkaMvNf+ll7PMPt5pwnP6Pho3d6mXMbSA1iDavNCF6qNPAfAQwELH7lxo8xPnv27Pr6+u23395ut5eXlwWM+eOGkOYVzdvtX1uj5UzL9zXAxa4tbUPb80EQsVTkNcskmgaFpOrPer1+++23hzS+//77ZeE0xQaRZ1u2nqdd33X7cR0NLaGTh6ZiNcixf232zbrYDU0GQfOWJQJx3ptEuez09BQhqAuVaiSAayyOKJgNY1As8ewpyh5XNVTtN2bWxoyHw0EPFey79TDuu64DoL7vAUDpp+/X3arY4WryaYmaJpraNqFpwyq1CDCEoHmk6/W66zoU0DKzCdQqZwCmc1PB7REe+d6HZfQ8IVmIcSI8XWrPxVVeUS1XVPmmcR7RWI2+tzZNzaZy1IjFRDl15Kmb4ziWbvzmgeV6rNTSAyUianZm11FMP/vOKF6B9J3VwamI5M4P98SWczLBVe+c6lxkqinTF6Wcp7Mo7BUe7X7iFd6St+J5J9SuOZYqqN9o596Ukjbkl1SS3UKI9+/fv7y8vHh5ZXWAzAyGbZZKvWgMgjX1xiVFztKAbRaNI9ikEwmwiK6gTiHpwdcYYiAtnR3HESmKyDAMsSuDe21TWcA2fY+uDCMRJd3dWKQ22e77fnfYHw4H9fJQPVuhD2q7EcYiJdQaDCGgpkIUNkHU1kTighIKgTcbsAZPuVT/T1njy+VsBFZjW8tszwhN9z8R0SOJCQs/m0Sz+JWJaUVZiaclE6VTayMqBanTWnrM4rwGr7Rfr8XcZj4Zk3AN6RrDhBAOh4OJSHBeqJQS17bR9t6i/c91wcI/MG1vhWlJjCBqbaFSGxERyEQllmzQYrJim5k5ZbQydUAomyXk4nGYBH0ufcD0IGlhEUaQ2naFiIBAWFJKmKXTZqSCwLyS2B3i9SA//NMffPYzj06/9PD6kN/95PH9zz34u+fv/vCjd37/9/7o9c88ev3Ovbfuvf3s4sOb50//9/+7/+13fvqDP/+bv/nt3/3Wn/7rv+Kd4FVCFCTpRHswESNA0A5EBEHlcNmfy4r0QUT21zf7/V46AcBxTJtyZBzFfhOCDPkwprHDjkIQrGlIVILDEWIHnWZWGMl5e48XpYOy8IwcRb4hzbNSpSjfXszU4tn4xj7gdi+jQK+jBwrstorp+xCYp/zAhkgagG/73iZunGufG9VzYgVnCTfaAzjv5kRUdYSjMOAxXdwP6NnZZmqvxnpYrTE1zNvViBPBMFcU/KLDojepTcHG90gz5Hg1sUjh+am+nsyOLpCfuOHE/7n4MFGsXxWpShsAWKqShXqWCutSsNhoMpfD+o12RQNn31o2h+HcG8xeuVcsqdrXdStxeW4Oql+gATeXKYh+FhV7s29+If5lYU01N2BVjJb2mF2GMc/mt3GlH7l5aUO6zffLMZd3gleXbzc4m88hBO3z8dprr61WK1XowS2Ef8qvjv8SnMPIHle8DcNgtCQuLIm1QgxrJ3DtMNmwqhGwg6olEmbuui5QEA5EtN1uHz169PLly6dPn65WqyyIiBkkVtXCUF3e8sqV8mqxn69p4TqXBfMakAXUo5k+r6YTf1tFXQFJnB6lYROperaIKUWzM2DAdSgRmYobPXkYSMY+tmTq2VEDbxhSzjkgxhh3+7zdbkPQo4PgcDicnJycnp6enp8p7w/Dfr+/UTHSdfHm5krNy5xRjxAvr6BAFPo+rlZd30ci/TUjrhDFGgiTaEMgyDwr63Vk1TYJKy4kmjCpseG5a2nmahQRlf/oDJhQj3HXQLT1W1Ys7ff7RrLRwkFmL1Lk223iejXp+XhG4aap6okIRrQ1LFnyKnUcC7Wh26o8WYLb7wwqmDO7PcXFP5sNJ2E66wi8TmL/ajTYSCi7enhyyXpcE0yUcxWramObrPCDo6aGVfjv3LmzXq/TswuLsjIzxmrIVYeX/plr+xZ9b19PVzLzzxOM3uCoYoYQ0yGLCkcsQpt1H7v+9PR0c3rSdd16tdHDFTnvDc/sHMd6DqctmV1SSyEAgJHQrcLhcDgcRk5ZQ+7KVqlfmx1ejBeKMcZxHFX3DyFosmiRErnWARs1i0ytmUK9pB4cqc1XzHY3OrZv/NqLUwK49iLXPqcll11xwUgYIcA4jpmTHkHoJVRRguuU/MIY6jUmHuqp8TFGiz7bFDRvoVnFo9LBltO0PXtKxZMpEESkS1Z4AKYBram6zEMi9lIiSoPaBhNpqvSYiy0w7g2ASdgBUyim6zorUTBgEHG1WmkaBhGRFdTONZi57VFIGxFDICBk5gzl2FxE7GMHJAQgGQIE7PDq5UW3wvv92cuPLv/4//Y/f+sfffN3/ui3nt18/GT38ny7+ed/9sfw+/C1L3xFEj9+8snuow9+85vfDJv+/MHZzXjz/R/97B/957/7P/+Pf3p5uRPq1/16d7XrKIQYr25265M+jZBFUEAThhlZGIU5xrjb75jzMCQeeXO63csOGCTKarUiVo9LphgC8HijEkGypG7VD7shqlBGMGdbqidVQu234RV9L5eV97w8lakF8CzCrJxS1QvTabwKXtJp0lh6sukjlqDiIifZRCHVpHxE1LNGDBgDcr/fqzhWyauO2/1+32zn9qwRZyO+2fXOmeTDvGW2Nx5wriB6CasfVOyiO97AQ+LZ0HOKAexrIbg61bIrqJOqoOht6mnWmw0/RvAmGUxoWlYJzyNj/jasqq06UMK8TEJcbZ5fFx0npRFrl3CuHbGptjVqUGFT05vtFejOCrIyRYVcXUh53upTwfB3KlpUCzdBt9zePAYaxYWds8/QazLZzk2yXdNn+8Pc5eHFoz5CzvldpzDzoToAYH7b5EQ3vcc/kjhpXozRZ3DVns2UwVm2DVY9fZrd62smm3X08t/f43mwYRz7qeEOA9KYyBR6nie0y6K7r43v5YD/F4SxoHQmKEIIl5eXP/7xj//JP/kn9+7de/78eRFxcz8CHAsd26L4ifincj1oy0/QbjM9wRpj2Cz8+PUns+ImZ5xUpXMYhrhad113c3Nzfn7++c9//r33fv7kyRMg4jHrRqmJXuwMuQlmgOw85jYjqk2elvRj7MDMMYYY4+5mr39idRDrf/TzsqlMQ41eBPkvvQBUrxBU6a23aXSCmU9PT9VU22632rEyl/KChIixI+HJgZ4zW4AFKquGWpKgxrka6qrCKmb6vteSuXHMq80aEQnj9fX1er1OKe33+zt37rz22mvr9Vp7kKZhHA8DAa66HgBAICB1IXYhAsswDspZXdf16369Xq9WKxV3hR8BOWWe93nWWZfSmCpI+76XWvFlvJzr1XUdq8INQZw8Vwmfy3nxUyl+cAXnXmpp4uJ+v1ecgMsIxWoybTab/X4/juN6vSYqh37jPINDAdbYqWXP6qtVToZ5SxvbINiZc57FDGB7NtcG9akeFJyLyTELqE4MLlpMJH6cEAJzyHlk14tE1RglEovvKWZUOPvtwzhUixJzzlpA6DN7/eys11TiPI6jNqEdhmHTr/q+OxwOInBxcfHw4cNnFy/feefdEEJpOjqms7MzRIQSy5WcC4+EWtZIBHqkQkpjF+M4jtbSlupRGbEeX6eqmulpoUTkYByzMFvXH1W9Ts/O3njjs59++ikJwMjr9Vq49/tp+VdkFXt2jbj1yPoBUuyiYjXGSISMoifhpMSa5k1EgvXUihhtFYq2zyCyF4QYi/0VQx/cUQuzQg5wO42SBblOM54TvOLlJSDPo9LGS27DmKIH+oEw6sl7UJTpWAonF61gYSFwVe4bDP4KIRD1pnA06pERrheszetsdkflsubQVyUJTdfBCbCps6WN4JHj0WVjY3Vgm1xrFAt25r7XS4go19pFvyimyOI8XMPFkkR0Cg2UnTWXoUQQgAgJAjOrVcamggEiEtMgOeMBO6ZtXo1P0wffefzv5c9/4w+/scOb959/vJL43/2//se78G//4Jv/2esnD37jG9/eHy5f3Fys1vF/+Qe/N4zpZ/LhH/yjf/DXf/K9Z+9dXg5ydn52c3MNQ7535+7VzSWD9igDFmYegTEQha7TSH/XdTdX18MhBQjI2MU1CQAyQABgcGdgJkmCGs23HtkYCDEjOPXCo8ghpFXIyHkQqFaEN/fo5e60VM8p2QDn2cV+fT0BNPlC6DLT7F0NffoP/t+/72WU419ndLtgvdmrvYZkDOV/dduAeCRApVtwBpXtl36mhrQmcwEql5ltD1V2NSM0KHKSahZd0fE9x/m32xSoFmn4GXmKMsmJziYhVwNs614HB3vQz7pZYvuwlGxm7zkZO7MSDT/i4rcm/arnYubNbZgCrK9VTVvQX/VgYgXZrQ5ILQyrT5dyJkTweh5UUs85W2KVfW/0Yhj22IPbObfxX3gjank1Q73izl/m8vDbesHCvDm6vn7ido/niIaw/eMNedvnJTy3XTlnZH727BkRPXjw4Cc/+UkBnmaVZg1IDTw4N0T/XteS3pob0MWuoTYrt4WLMQqzSB6GIRBuNptHjx6dn59/+umn4ziGbkV2wBcAqA9aP7qWBrrrLYm/0VUaOL1Ys88iFsyYB17mKesNzYND3VKINejy4pfqOdKIhBjsGC/1VxrHMdcwF3XkTuJejmzYZuemUcbXBp6qy47jmLqiRnd9rw+ebk/u3797enoKABp2NmPSBBHVZFF9qaYzdF23Xa272IW5e0hnBwsfHDMDUk3yJC4nYAMAMat2owIBiVS4CRGBkMBMbuScmcHMSL8EjSPDq3am0ItzoRJ2utI89y2GEBChJpHOMpVg0UdQXDNJe6na7Y1BaOtlA+oHE7+H3cFG9uai9wd5Giibl+q5OFMDRNDP1OvPRs+2jdpQ9kgu7YhG2z40pq3tZGAuz9FUIMJcSgQl50w9IEJKabfbb7fb8/Nz9TgMabxzvt5sNtf7A5Q2irka6ZMDTsqJ61Yol7Vyyk9B/0z1JEnzlXDJYgga1610RRCIEJCKd3Kz2RDR5eUlAAwpBcjoXMahustNV+FaZlyWHkZ0YVgQEhL0RU9CzMLAREBO9yAiwiAq5Whin+JHQFCYoyG6EeVKNBaYNkSYwiHVqypOE7I1s5ULs9LkKYpSbmPtsilz0pltnJMyNxeI5V0ye9wkglTfrTj9rNE2vBpkgQuZe/6Cq5yGhZz1Xxr8JulNCasun8kMtnGqnjepBYiTV9vJpYksDDCcd4dHmNQ4W0QPqmEL6obq4ddLnYh162MUC9UyoOEcABAQhJA6yQeBBDSGNa/h+vDsJ88/ffbp9u72K7/zqw8frMb9Hrruk4+fffri+e9/4w+ffvT9Jy+fv/Glz74Yrt9av/XFL3z+Bz/4u/uvn3/ua2+N488e/+xJQOFe0jBucDPsR1ptkFCwyheQEGKH3TAMALTq1i+fv9zfHO7S6Y6xC52kPOvuDZJBBCCXruUjAGOIhiV0nkK3UrNd0EjalriJF4V6WJNp7UuJLDDL//SpUyoMcb6f2eNcYlnRltLgMfr3FOI+t6aL3Xn0Wv7qOdqG9bKiIc4GXYZkcsElmLOw39cN7Uu2sjHNUASYCcrgjki1R5TZTQTJPAe4maZ/pIHzKBortGKUYL4zA2/SS6qWo+UlUm1Cc429Gpl+QFuF5drJPNVT5vuxv81mbRFs+8bDDHND1MBYAmA2rQo93RrUjdqsuMw1J09jWHuxqjvSHLFcI59+FYxeEFsdvXmd/1Mz/3ButFT8HLFScJ4iuxzztstu8E8tR7Y/b/vcvMgzSDO+H7CREh515PyzR8TCLdNSUfPBBx8AwJe+9KU//dM/XcIjToU4Cvarv3n1dRuPeJIGI1dCqNU1/icRyWPiLCcnJ2+88cZqtXr33XfHcYz9GqhkzZTRCFFqys9sF55S+5qJ3wZ2s1geJLdqk4PA79rguPK21/ml96LM3x9jLBV32fdxAH2jxlu8TRJCAFFRRvY/kVJwpe4bEWGGnCUlRgxKWcyJiNQm1GDgKhbbck1rQA4h3Ltz9/79uyKy2+32+70eNmjGp6FXqqKi53prrG/dV/Mg5QRTrkSJ4c8zHZg50IwFvCPVMwvVGK/I5O7Ui7nUkNoCYTELiVnUkjTzzy4R0ZLIhiliKG07uriSmQYIXj3z4JkjzyMHqq5iQSRwWXg2WawWYDOgaTsy34Jhkm+NmD1G3qIVc2KbLFHUihjDswHfMK8B76FlZiQBZERAkBCIAiCJ/gsggIwkhIAozJmZUaLuF6iQjAlAhmF4+fJyvV6P43h9fX19fT3mdHZ6R6odlVLSYzaZRZtYWsRS6U0je+ACYEc35cpHxd4OU7MrMVrS/Db9O+d8dn7erfqXH7xcr9fMLPmgIUfDs/45VNdAILO/ChURutq/qMtKkibvgJkbRHTY74zgcznZCUhKXjdCIKIMIoVsMBqhzAQQiCJF+4vKpCdl4zdENCQaKI16Z4RImrrHo9nTOWfOaqmWpwrR+LNi6iCKJu8Wnuh4XqxoKuA4jugaRnlFyr5UGaSPeLYxSeTVNU/QImI5xFxzPyqhTJ4GP4ucs9pT7fZQHJlgDIkuw6E5ZhormzYsap+n2yo8njk9NioOp/kWBIqIHdGCgCIoGREDCiJwkZk6WhAk6Adg5JRxpIBxg7jb72+epn/3P/1ZEv76b351uNndxKFfdT97/N4LeH7v9PQHf/anLw5Xn/niW99//7vv//y9r371V5+/uH50eC1Benb9/NPnn9xZ3wXEl1eXSJ0AZNH1ZQCIWBx4zCyZV7H/9JLHm0NHr0u6xCSsDVo0AgOifidEiDGutpurOAAQZlRpkjj11eLSJDpbO49DWOjWnh685DVD0VsFdZFMThSmNXYzg4Gdl86/2obyRAhVS2te5IS7ePb8+17ifJPN3oAYmqxIe2SJOrsaKrUP9iL/rCGwoU9r0QZzprbL07m4wjlYWNrNTO29fhyTGNnl7nsBpUX2SxjAtUQG16tmpnvPJGRr6VWqgGZSWH09ONcs9ZG+5qgUh/Tcc6REptB7KpV5TnvjzvBLtlxN+zO4Bs6Gxpzb2K+BB3ObROb2ScOM6sf1IPlV8CzZvK5ZXz+4fdMQ6uyRyrZetPIrg4pLFOHitBj73m86JgpsFv7VBhguTCOb1JL1/OBwjAfh2OIevYhIG41+4QtfUBdYjDHNx1yC18C5JJsG4OVTR79vVtC+D4EAijHoR0gpIUCMFGMchwMAnJ+f7/f7Dz/8MM99OuDcrOUtbnf2b9Q1MifsK1CnAj9CZzLkVoZa0KdH71HJeRv2bC2knt9ARMiTUWQyRO2xvu9jDFp1pg6dMQ0C64Z+ygZQfevmuzFZV5RjignT4XA4dP1q1WltW7+Kfd+fnp6G0F1evri8vLQiN8O5vUVLoTRBVBV0Zo7o+juIcNVGp+x0mdF8g2dZlAj5STEzwCwGqBKMuQ0YMAuAunTJp2tqhp6my9buJsGEMBEhaGJh0FxQxGn7AADNxvROLkvnw2qfYLXx1PzTbEYzKmCxIZo3tvnANT5MriuBMUKMU8DAC9iGGhGD9rzAyUuulDlzvIrT0sEl0HrVtCxK1T1rBmYw/b8BgJnHcRzTobTr5DSO47g/7HY3h8NhGEYAuL6+fvr8xeFwoBhSStfX1916Y8BUCEW0R0m1HaydzDiOwTVpM5phV3NuwOtotcIZAhJwANDWGIEIEpOInJ2dnWzP9sMh9l1KScYEUJJ0ZOGtQ0QM5L+MnQAQAYqUHnWFzYk6ikYVtqCBqjQWTJy1cImINPZOKBDKIzkLYpo8uOSCG0hT8RK4ncZEDNc4oQm7RlwapzEDIhCVoNbkEuAp5cA4UNFOc8Fnoxl9G8BcY1lGu7ao4NSaxr43B4lRmNcyTb7bZXNZit1K0+4GbOWRVPNS6sEY4lVtt3GD1j+4CuBqmjl1vJ5LwS7sYA6Yxo+ocssRdFllJ80nXVx/GlOCypOqjSpeAqKI27+BgFAEDpIxAHWUM4MkgrjGDQOOz4fv/psffubeZ9946/Xd/ooCfPLio//2n/+3/4c//G9+/du/+X//H/4f2++e3f/M648evPF73/6DT+TZ//H/9H9+je78Sv7i9/7ixxcvX5zEO3KQVVwPSDmPDIwEXSDCyJmHnHOSGkKHm6tdxEiM6SDSTUEzAcnAzByABKGvuQdENPIoYi65WVENzTPuTBbo5YPMRnKW6WGLYvdP40yOwOyEUe34EmYlpkYnXsTD4hIRcjLX30ak6b1ylJiPDnX0zyUX2Nf+fiO5xuJycmBW1QBOks5Hnu4xxvdo8ZwFTnB5meMB9inTBokX6M1Qfnx0uGXXWcrgJ6KcwfZXz9rNjOzKrqeOwZlz1jocmDtTmw8eJOfTOVKatVy+JSR6+VRbE5vsCgL967CmmDaDYD3nSmp9jjliaX5wuV9Bz2gmezUOIPOtoVkacDyiBfENMN5r2TLjcfKfXUteaNjwFfj8+17/CUPdxqqvGF/mBu1Rkih33o4gInr69OmTJ08ePHiw3W4vLi76vr/tdr+s/sujsvEXwr+8GuFgM1K/oZJIc3/OOca+67o0DiGE8/Pzq6uri4sLKi3oGIhiCJknfi/EhlMz8F8IcHNxPbHdO2gqW03jTXxdDZ7s6qL/k/GGVX3SIiv9SScbQvS1Xvv9fr1edx1pc4dcO/ABzlBhqPa9JAwerHqLatWZU875cDh0XdByJoCoVXxXV1f7/aB5mMbRWl2mxlKMUVP+9ESKScCmRQofIgDoERdqtknmhMmQZjwrVXE1+cC1xt7vDuD4hXkSWSZPzEknIkRBu+iLaMIta+am+sSIQowlrKLmTRqLiCu6+8h+Z8HJ2XekaskLT8Wbme4NuxnZEJG92peKkctk8Rq47ZXeYSdO/5fF9o2IgOy/Z2arhXGYnFHRUYrFejSa0o9GhtUSya6KVQlPjeHLq5uc8zAM47C/vr5+8ez5ixcX4zj2/Uq3jOvdgZlX3VonFRxWqzZYkKCv0OpBseIImLrKG04a9QDc/qWtTgBQ+8YzAzNKTQtKkrfb7Z07d4hIXSRBhU89A09KoXu2+MSEZEIAGMZclXX0ahgR5cAddUq01exCPb5FGBhKam4IXYzx5cuXCAFjUHEUY6+sEXGuPfDCe2Q8H7SxpFOPvAJhokecEsPMasiKS2IuJOJ2JuNGIlINwYYy/mTmXFP7lO3LaNbPYJ4gF2NkbuMMykKWP63fmC3kJalRMFG7sZkoMR0FcZqIsDRv1D+7rtMlMT4vVFVuUIYUEQFsfcnoLlD+mxseVNP97eaJmFy6QjXugGtvD7UZ0HsmtEWqbks5o1hSu+IBFOcYSAiFKQPGjoAQSPjAAJkyrXGdL+TlzdWf/fd//o/+y99/9Lk7GW8O8vK9i3f+L//sn/3Tf/pP/+Ef/NGzy+ff/t3f2tLZT/Y/++u/++7QD2k7fOE3vnCTDn/7794JadisT9OBBEAQEnOAkrjOzDwwAARATgIIFxcv05CChMR2OB4jRnEiLPujiqrYJSDIk11nMsKz+nIVGlIx+1BcfMwzarmZJmeKEyh22+TCMMFnxGwU1cDWgLQUuH6QpWLx6svAMO4m13nP39O8zkCyt8srI4f+G3vEtszbprNciBmbuO9tCt7n5WsUxdnM5LJrjkohv75LYMQZjbbWPPeMhnnHLH1FduU69q/dYNNZUqa/GRHtvCk7K8gMvCWW0Nxq9TLAljAY1xwFyUdQsbrYmFkJu0Em1wJ9T5NSI7GwIHIiskoPg6pK0cnH7JfDE+GMQuYV2jBjouMuvyXdLlfh6OUJTJxi56eWXXmwB9sWpXnqFfzbIO0VZHMUmF/murq6eu+9977+ta89ePDg2bNnzWgNuTZ/Lufy972aSTU/zVYcAWCGjRjj6Bq8nZ6evv766y9fvry8ubZscyyvYM1cqm/RsY68Dn45TBrNe9mI82QBG9bKfvzjNM+4ax6BI0ierTgiqm7thiq/qrUwDlkL+dSmUmNP9LQDBji2lGHRZMtmClVJ63K/298cDoftdhtj1LYL+7i/ubqG2orGImlcU900MHh+fn5+fn5yckI1F13FmsxLQ8v0554aEQEWqYp+sxYNeg3gOsEZz0pNPHbNzGrmXt2hFHJtPWJzMTNMMW9mGELx/ZnagPWIAq9I61uafUecm0y73IOr/eHaSQudNWjvhVpnBNWmKgDwtEF4m81/Xu7dXqQo0nxMqFILlSCuy5XzZgXVFhv+XYoczXDWgjWah3PU/NNGO8MwaPXpzc3N7ubq6urqyZMnz58/E5EYu9PT06urq8vrnWUzaRhQ3GWSQ80zqrFu/TWEIPNMZlsO73M0KmLmEHqojFw+UIRAiAhCArlb9ffv3z85Obm5uWFmClEERTSbEUWARd0QyCxZBEDQ732iTRBFpGSHiQjnko7UhVJhCFL7BehpxPUcBK3C0BNogGKd7OTH0QaGyTO2WmV22oTpUoYXYxifrwzH9iqTp0YQegXqEk+ODU8umsboSWT6qZIpz1vZLgixPGWOkDzvANZIMeMH45k5f85EjwFjTGjvrwPOks30FYU/FwSE1WPtZwGISotKQx6H4ERh82cIQZ1Zhs+i2s6zv0w+0nQUnhiLMnPoIgCgygkRBCBABETRwoG6aREChkyMXc8CSICQBSWPiTiGMdxbvZYoffCdx38e//wf/ONff+NL59s37v7O73374XvnH37y0e996w8ywOPdh3/1yd/8yV/9yY8/+kne4Lgauw28/Y3PX16ML362w5uOASAQQCRg3XxHyVQnFEIY0kgETz/59PryKp530skBRqhmLQIKsxAikBmEZeFAYpkNRnem9mQY37IBN6aF3xuM/JrVrxvLLFJXSa42jEGGudXqSXoaxPGLE9YzM9JuMJjtpUsT69VXw90N7TV3Qt1pvKLv4fHcNy2EqyXzWPXTxPl+75nF+MjWDqe9fNrYvJ1jm+VyLkdxaE81c7GfGqYzmGG+rfpZowtULgfx99s5hA14JrKaMdOYGhzaBnZ07TxRmXwI9ZBDWsR7LbWmwYapGiq4zK7zdAhOfnpJ6HVlO1TARLFeOc/69LBzafvFshsMYI+0ZiEM87dhBgCkAmOr2XD3bddRFm640v/qv4RZZdHxG45efu63vauRJIshXjWjcRw/+OCD3/j2t998880f/ehHqmo0FAtztB8lbH/zq2d0G5M2jonZDBB9nNMIQx/RcwLu3bt39+7d73//+zc3N4gIAkQEdd8PMbhUZzUsxPDm2V+cjXf0src7cW1r4RFSSdE9ZfNtxIh9b+t4lCDtmxCCNucAgBCiCZNxHEufCOpYE/DGkSiaxQKgiJn179VZqAKtf5rCjXOdJ7gODmrDENEw7i9fvFytVnfv3jW13hogd113enp6fn5+586d09PT9XptnqYiYVKWRWsrxqmbKNd6NoXDNgice2PF7fW2J4oIwCRX1Z5RyqldiFtxsd/vLVqlOIFqgRvmLd0UEUMAqUfDS+1gbLAZHoom7Dq62SrLMQXYxLu+1wwbN5fJsWVaNFQ9kF1Ep+6YM6ZeUrjHvzB4gd9ISPvTeFBkZkBmd+jXZrORWshnjbihNtNOKe12O21ENAzDOI77wygiWoaqw+px9jGmzWZzfX39/PkLEfROH3GdbwBIQ1a2HP6IzhCCBqQNOTY70yJgLoVsqzXyM0nIzICCiHfu3b13797V1VUIYcwJADT6N0VBERLXgxPdlUW6Gi7KWZiZquNAiWekGFHdFiXMm9LIWaD2QgcAxKGwM4ZKacXiqJWFVTtxVBtUKGTXY4NdzrH92RhXXlZC0Z5nGxvWnV7LbRfbBuU8puoHwWrUKc+knJU4PDlOTWnqd1bBQgTG8Aa5tr8Ht0PkepncMUFmqpjNzojDbEIjFJ1CxIA424ChKr567i3iRH8AwIUT9JGigpi0QlBfxTSgobFhzhCCFgDaDfqirBkFRAiAXJwNJDDs9lzjtyEQImY9lDgGEhAWFtGQIhBGgiyAuvlr8BCRERCQKOY8ch4RMqAgYGQmiTGG3fXu/un9j3720Z//u+F3z3/9tA/dNvzaN3+9h9VLvh7Gcb8f/sW/+BfP0nPsMVNanXdXV5d3Hr7+rd/51neuf/Ti5zchrhiFSISCSB5zDgIAFCmIIIXAPBLBy5dXh5vD5u6m71YD7wCLpac2GIggImPJ2uXiTxQREHd0ux07Qa6HuMe5kTS5WkGoe3w9N3I6AsikcCFmsli038Ura7guz8slbjYhz2t23fasJ91Gv/9lLr/xuKFm1Oi5uInwGDxejPhduQEVnWLRjO/hIZdViIt9UW7Z+KXqZOYC9EDqv5aBg85uEXf8riHc1tdTgp1A5RLDJrnh8WnL7RfRA6Ovi3FyEPhtG+o+7THg/a+2pXmBI05/tXEaxYhqbj86g1CcVmqYN4pSuaqtKRQD1qbCXOx6BXeEiY3j32tOXHuK62lpDQHoxPOiBZTRW0OiZaWEazqGjwPcat0d/Qn/PpH2ZrRj3NSyEtQmB8t3Lf9sRn4F/DjX0o5P8BbbhojUefnixYu+7x88eFBi7POMsiMD3gLPrQDcDv/Rb4yqodCGiAjDrLga3ZadUuq7eHJy0ve9lkR2XQeA6ldTaozUOXVoxpLmpPBk8wpHW3BNsAyeCVFT/7xprb3INXa7TXQb5uerj/YWZRYN/UFt8if1FDGVVDGExFnDXH0/xb7Aca7HNtc+NF74VC4GcCYWM2coskuF0jgcLi4uzs7OTk5OLi8vEUVtKrUB1uu1nk+opYOavyru6BceE9e+UyaRAEFbdGTXq5kQk7Sn73iJ3biloGzuR0iLXDhInJqueJCZzklkwcB6KqN1IkVErmm0OgsVhloyYFYuls06mAD0+46Oo0dK2grqG22VDQyYjkeipd4CAAGDX0db3xgnp4CJXHDnZDb7iEHuY3oiUjpt1ourB8HO7zV9wD+oPxmhioge9DWO4263u76+3u/36ke42R0A4HA4gGTz+IzjeOfOmohubm4uLy9PT8/VTs4567ykljYgBlX59Vl/iibPvajGTc2//lcREUlFu1CqIz0oCxAh50wBRGS73Z6d3iH6IMaYxpIsU/mRMVAIgbUXIhAicq01YxbhJKKZqFxIv2CVAIQ5IQWpiQAiEiMwCPAU9UmpkBODJU4XV2/XdTFnibFnHnLOq1XPnERkvdp6FOQ8pjRQPX8Gar8Ekxo6H1OsjTGISJIQEmROYxKRiCQgeTz0kZgZOHHivutCIAQWkB1nY4AQJ5O16zoZBpMORkkpDwICJMKgyoGKQBHJDHo2PVKMmhwLoGZmkQLFHUcUooiEGCmElCFlZY9IgVgGRQXn2s+diAiLh4MQA+WchVm1GaKAdavwFJNzDlBsNgCINUUQO2Hm0flIEAkFEILWc8h00JZExBRDdL46G1xri8ESAxC8uc6sZqaAdmgijDFSVci0oA4RIQCOo5R9UQAJQwSiEUBzqQkgAuaceRg6ohDCwESwYlwhcAbOmAZkkF3OQ3cSxnRz88nuo/H5j/jj3/oHv/m9Fy8//fX/4f6D1z+9uvzk6sXz4ebnp8N+oDvnr8fDgfaZeEw3N6v73Rf/i0d//Z0fvv+3H771+HPb9WrMhzFdb7YxhDDsx5wjQz+OtDp/LdzwzcUO9+EunOyvry7Px5ACZYIxE9AKVh0xcjjbnmAHQtyv43CdIgXI3GMUOGQGxND1KsJy5iwgRJq2rlp+UCcKIoagfDVp8wAIgOOYquxAp4qQ34oyC+OUvJeTKtkAgIlL5VUWQaKpL7itIEjmLLVfvyAIABKOQzHAvL6i38Sog+jBazKOI3MKUUBm5WFSu+yKtPIdAFIaTMoTBXWVg8vctp2eiDQFnbNYTnygIn8Jq2sDAiGVXh0sGGbBENv5xNl7XiMxgPVzYwHCXIMRkRjbo2W08t8iWvqI+kQQMaUp5AvORhKnqEHVwEIIOSdw+oSdUGobnvkIFFGxDwwizIKAgSbNVSRQsPDamBIRxa6TnFUbYWbmKXu/itkQgo5cT1ut7dFQjUAfNapo4nrUBM21VawKaCNmTVNhZnV8GA3Ytq0btu2gFkg0Mei2TGk+S7XWUkoAJrJmxxgiovKgEUYtYUKvihme7angDuJDRJKApdVoKcIAJEESYQC2tZj+7UIGFoQACKSLlQGJSB8B68GuJ3SF2tHb5m4oMjAMGACoYSFB11sbAMbxsKR8A8phdbJ7F5adPjjlwPsB/dL4V3RgSvHshJVxHLtVv9ls3v35O8P++uHr9zYdHg6HGNciCCK5ejMBAAgTZ/XVApQjwjWNiXEK6XtshPkxEjYZv5T2kz215HpGqutRq08RAViEN/1qTENGiTG+8cYb/Xr9zrvvDmPOSCFAPYUcukjpsO+6Vdl8C1EEohgCMicvOaGKI5rnWtuVgNTteBjGDTMRjcMBQXNYJAJJIAESQS1djCUNx97AlZyUltA834hAhNYW3yMNRJAgp7GL6zGNCHG96gglCpJoHz45HA6r1aqLq8N+XK87AuBMOaGoJZAz54AYzdOFOAWvqoQEgLLF5CxqjVDA/X7f9TFEGsdRi5cuL68Qgwi+ePFCIIcQLi9fjONhvemvDztE6kPsVrFfdZuT9en5ydn5SampztyH2t8hMwJ0IIIQKWYqTj1NjEIWyEwCeoo1IgJhwACCFtUwUle5kZImswQiqzMnPX2k0pUgAnPKeQQAZl3qbF3rQY+bKwRZ9hoqcYWCqpwT83S0wH6cDifMkClQkhRpnZizCKvvPgQgSuXc75XmX0ntrqeNMYeciCiu+p4I0VutIefMCBSK2Om6brPZ6JGzkYKdEJtS4uI4gBDULk1E1PdTRqtN0/bclLLaeCp+ba/su7XA5Au2DSJLyuOgt0WKqqmmxESRiJBFkkSMXV/qP7sONSqbc1LizzmN47jf78fxcDgc9HRHjRDmnBNfD8MIQkRRADPGIcfV5gzC9pC6q6vLlLFfbcZxBDh0XQcDA8cY+u16lUYRwRBXXezGceTMXdf1fUz5kPkAwDe7cRU3NVBsSaBT0KiqAVPua5BMLAEDUGnjmRkOA/en2/2YNnGVE5xszl5//QF8j7p1z8TDUM4VXFWr1SQbkmgVnQBgDERdznr+oRAFER5zJqJ+tTay5CgEJLnARmNEDAAgg6j+EEIkojzl+OgS4zik4TDGygmq3IyIqIxtB/UCAGJAnDli2fmqTSKDVz6qhBo5B22aVOv5BYARiBCQWAQJMwjArIG+yUIfcsnHOsCaMxvmG14jnd0uYpOaqb/mOfMA5JxrgLONhvM8am9P8YTo2WV2Mjh3Ws5Zg8XknFUAgIsui3W3A6QjugVUVczr0Aa2LZDN0QjOfmoAg7mugC6G3GBjGIYASICoRXnFbRzU0OxWq/M+XO+vv/e97/Xrbr1ef/xXH+WAO8iHyJfpgOtw99751cuXPBzunJz2Idxc7Qnh9ddf/9VfTVl4/Oh6PMh6Q12/vt7ddCH0YcuIJAiS83AICIcxvXj24uEX7gfqJbOIazWNFTlJ3ALVpQeBRXGIn6B9WC5pQ/A+suTxQ+4Mz+XjZs9MS+Z0ZY9zu62hT1qoVg1j+mdDCLmYoDOirWAcxcPx/hyefmShBgHMNgZx9ueCOI/UjHmibfG+uBru8/M1Lc3w0NwJjkH0BnUlGvINjCUk9hZx1ZX2Uk8P9iURAUozyPKzOBrQb0w+LPHpZ83zZjn+gxapmyfbvvfiwksDu9kniWBN6fRC2CjBTGiDvNGSxV3LFa9roTAjwAxRyyUoCFksh02hYRYD8pchquWLsCb4IQQQz3SNwTaLgNnEG/ZZvqL5dcn1Rymw+WzL5EY48pQR5PJiZsaWeaFmCaaULi4uLi8vHzx4sF6vNTtLTUdtmaYOCGH1QLfvNpJdEsBtmLntMoMWGwE4tcWxKBwiloAYIsYYN5vNgwcPDoeDZm2p34rBXD9VHhbQfqmQbMNubt2nLDLj05wzButwqKpPgbsxXfwcGz6FOQH4S0e2PV37c5j7XkQyzFo6aUaYQOk3CK7HhsDsZDKPf7VDDKSintVQlpXSYfmpFlSL9sYcRCTzyAQBBULsuu7k5OTs7Gy73Wq5I9dIoJ/pitTbJVhordgPwfpZmHsui4jEMDXngJmwOn6cmMwvw9hy9fWnuZ48XVwziQwwS52FWvhn35iAVTMYq25gebBLkvOtYtB1wtTuIIoQkmJdWMaKmbI+vqqmnRGYQmjOuIbCEclKzjzGcs7aJcEr0szspaOXUTpxTVjT2JRCiIgApVOeAjwMwzAMV1dXKQ2WKerLNY0gAUqQue9XzHm/32tSdO1Jk5UL9DwVtWQZp+M3FJnMnEsMber2r0ZvMCt/XhPhRYTtL/aUfqPxTCLoKBDidrvdbDZ2CLCnT48rdbuDS2P0Hn9j8Ib2PD2b63zBvMfz/COUXnBERCkNitAQwliLVusr0WQHV3c4uNAEuO2fa6acQuVlWcNysBBqjYjPLk/d/2rMbJRnA7Lz0zc87GXaktbtZ/Ppigi5NxqulYeXr2BmuKUIw6+WF/HiyMjLrOUIKkyR2q56Ul3y/hsdzWhXan8zc+2YpDPpg4uZNsjxf06UlxKTehARSaDEJxFC2O2uGXhz0occnz/ff+97P0gp/dZ/+eWRcg4Sukh0GMb9fgTGkYNc73YRY7fqgWJO8trDu98++8b7P333gw8+YIhnmzPe02Gg1WYVhAEpYc5p13UMIzx58uxX81f0lFKCDoRQiIWlaCklaFCIE2oHl6DtmNBaUxQ1VOhV53EdU08t5djQZawOC6o20l1itRnEiMcGpCaeRrN7wPGXvcLINcbIYzLasilUXeGobTyJGDymthqxeQpfYswmaB/8r7CQa/7mo+TXTMHzpgcJFgTcgG1/FoEYO4+0Zq39gM2v9kaq7af91IzLBNmP04jBo4xv95v7ANyhOIYWA2M5DlZDzpOZxdOalTIK9CSKrnEoHuvs6j/kekJ9jDGl9oQGI07/vQFf+yIfF4C34cffg3Wf8uzQWK0wpyhP2w2dMCOFEECfYnjFGpHo++EWD8KSfZrvXyF1bUWaG8Ctqb+OvsivES2yoaRaQDDHMwtirUhh5ufPn3/44YcPHz7cbDYXFxciAvMH3OeWxwGK2fMKOfBLXs2sJ1bC6QbFGIsglvlqcGOz2Tx8+PCjjz56//33Y4zoiooNCtaNAQ35R3jql4FZzKggFS+lawOUZ6WagpMK0cgT/+dRDNyCwFJbDgBqECJitgK8+VZCRKvVakxsupO+jpnnhzhMnOLB4xo7ExHk6ThWnS8wp5QsBYMzW4XOOEroAsZiDd69e/f8/Hy72XRdp/LTKoptjnocApc/WQSn45EBuBoSIqI9/AinRHeYZTxOCSDzZZ0lxjdzN7mXXVsKWx1vM+R5LT3XwwCkamLiPGgW6vBnvGHpboAz6Ob04A3CBhgAyDC5NdXGTjWXx76X2qoUp74GZYKWSGlboWKdmUs9U5GQtf1BAKtQABPXVT9h5nKwp9eCuJTt2bl/KWlRWFmsw+Fwc3OjvptxLFFBqbGinDPo64RyxkCkacZQE0dzzqen2xjjmDMgY5wiIgCipxyClGQ6q1ps9sRjCJ8akSy4tT41V+fSyCw8juMYx/Wq0yrZjz/5KOLU8sdqvrCGqm00g8o6YqJzevKiISLOd2QvLRtZIU74i0gkomEYEIWolNitViv/ZMUOuzm3QSeeu9JnAhpmxZf24mVExcPn4Tbo7R6/GOgatRl2bsszUTZHJESCqbGndo7Jk0sQUSsFmGft3Q0h1nvdT7b8eiy1TBfSI2ECz21gE5wu928mDGCmwOFcNHtNDp1+0/zaYNUjFqptIFVjaFCNTjHVQUoRMyCKQMlpEQDIklF7Hg7U9/0JwcuLy7/4j38TX0vf+M1vnJ32N3hzdnLy/HCx21+enG33N4f9bugEaLXhLLvxBgjuPzg/+4df2P+Hi08+vCDpaLOlfeAcAkZgWYVxx2NEhhGunl9ijlkQI5I1fWVgYQYGgZGzFhCOnFcYAbRhD8/xGXQTRZi5M/RHv0zLyxyu4jLuRETbKDt2mOEcF4qRJx7D9pIYyIUWYdGTE53IgHlzy0rzR7RJ7ytpbtCLF97rhjaO8rK/vJBqHm+G9RPxfzY4lLmWvMSkz6lb/tqwKlTPqDgnl7lRPGIbH9tyjs2fk/8lAMxX3xNGw5u6r4hLjbOfTNuwcY66Emym5sn2U4bqq/YoWhqEfnBLtfVblOq4dr8JHCJCbFF0dL3cl8ePLZHFqc3LBYUFOS2xsczuaxjTvxQAUCQUQhKpYQmoQaTqM5kAED6y9S6pdPllIwqWe7zn9wZUWyBPzFUj74/OLudjqYbOYNM/ueqjzEwh9iEcDod33333y1/+8tnZ2QcffIABoTRude9o5qrlILfM3SNqefkZeRT55ZuRUBVw86lJcWQjIuLZ2dnDhw9//OMfPn/+HEMUnhoROGKQ+aS4HhF0K6hLPJskIZdK3ff9OI5mDorM6Me/Upwu0eDBaVzKjxnmF7vUErP9EDGDJGGo2ddT/I0mGqsWi1LRwiKsDimNEM6IR8S2TjTlsEQIZgEfROj7vusDRVqv13funJs1aHVuzAm044arSdvz7IQzhqJh1nhmy1Nmbh1lQK/9V1RPRwv4+ZpSBDVcU5mOTNz5CJJ+kNqKwiJydmd2fQpgHhoyg1BEdH0BwAtSnanBCYDmq9Ls1mmyudCJHY0YQojkjlWoQRdPYOK0So8EIkpJz+5GIvTrIiLCJCQeUaIENMlSEMG6pxUq8ofbQY24ighzHoZBKwYPh8NutxvHMaXBw6ZXCEEYmWugW2S3u2Hmco76ai2EIhxD6RaDgBlUM9BT61gYU0onp5uu66qlExWl1hDITxbccU2e2EIIGpiGuQhi5hhDDDHU08VOT0/v3bv3+PFj6ilMu+1k2sQYM7NItjCykZbf6agemeO9w1h1QnDlzehMAL/cHlQN7BTaIKKu67Qn0pgOGmY1R4I1Xwru3Cqqlw4XwqwKtpLaq5iwgckor+FedMGW7MrtjPf8XthsMOSyuWSxD03EeotG7gVrA89ybxaRgLMt3N8PLhPDjIfguvZ5SjoKj7Dk+VoaWZj+6oExrUJvMxHg8WAvNZj9UIY0D4afcugQuCbySwZAJAEM6XDoupXWofCYCTBlGMbxj//5X5+evPb5X/vcuOKEhxCECfbDfuTUn2yidLthhMzdOgjyIV0+/NrdL8nnbv5iePbeyxO4d7o+hSESAPMhBgrCWsR1fbk73OQcIZxQQIpIxCQkwJBz5swxT4q+4gwBEWOGsc5mdiBe3UiwomVGqMvLSNEzG87byi+XVY5peA2lGXsbTzVUQfNQD1bzABap3VUiTA1X5hRypMGpUYvXTRs4bTSPQCOn5s/m39i1bV091/h3+QH9PQ2XNYAdXa8l2u0ahsH2AKjF0s0gyzEbJmoWyE/f57c3vx4dGatOY7g1pYTnSQriLE//Cv1Xk5GMEkxPsswCL388vxtsqgZ5D6Wfgvc6m2TmhbBqqHS5CkthC8688TQpNdrvQTV56G/zqDBlyy+TGfxHLiGEQBgyZ5GirKhVCFiVNJzGzzn5wT0+G9KtPx2Zvn1uVa5fInej+ek2+j+Kdv1xuq3mbgEEkISIIJJz/vCjT1ar1RtvvPHd734vdr/gZMejE4dX8uBtlx/Bc7oXUI7g0P2n3JZSXq1Wjx49unfv3rNnz0IIgigTuqyRbLBH/L84j6h7wIzkmtn5f43jRARNnBoR6Z2LVN5XI6q+kSwhV5mBmUOIWBvN651919VNSjxtGFM0fNRQlJ+d+Xq4VszaDqgqrOYBasMYRFRr5HA4XF1fHXb77Xa9Xq8329Vqs1Ll+Pz8fLPy1iAzMwhrA323i01Cj4vRHpAIlH9pwjZBYJxSzDwP2nQa0YqIzKYPtEg2xdWXz/nbuKbOKdpt8/U5WUjBiKERpza+VEXXZ7qZpq2Qhxi8+i0y+eg9qAAQAAFADSpmjjHmEIClHi6AppFqdSJMagOoBWDWr6lDAFD9BbOWZiBoMFf6Ufin5QPn4+5D9AE3Iyed++Gwt4aitYkRgOvWrm9EDIIoQaw7y263A4DVarVer4loHMe+j13XCeOYOATtZ6A8gESkvRstsk1EevxmKXR0yZ9GD0aoxhfFBTAyAIBMJonekHPuYyxNFpg3m81rr71WVv8WBhcRS3/3O3Ij+sSytRfmos7Sj2CP+D9N4UfEaHMDgL7v+1VkST5XtTJYEHceuq0czPewZovCqn36CRh9+6eyr6R0sB7B0Xw+PvvR2NW7xDww5LJ1/SC4qK+zAQWy3W9eHz9rObbPNfs6Oh0F5pvEJN38TjN3Q04LyaznXTYLDy7XGauir1eox9R4FDUQghP3eWoQMkuGhsVVFjQnEYTMLIiCWA882YbtMAyJRwDIzFkyUFj33fV4+Os//0GK/KXfeHtPQxr2sKbr/a6L630aSSRSCCFkHphTCHxJzx99/cGLYffy8qe7Z7s1ncZAkBgYESBCGBMQwvXLm4tnL85ev9dRB4LAJd5FRMhVqQlB54GBUFhYCDGETtx+j4iWUGocZwsL1YHricqvpq2vSRDm2fd2Nbnd014751u7vEMBnF8GcSIkW0RPIQ2tEpFmxaAz4eo9t5olhh/703hExG8hYn4p/2xDbwakzNWm5r2v+NL/2nwwPHjEeoS0CJ/bGLZ8Zrk5p1ib4r8EbwmtZ39EzO4g5kYONCzvV60RjP6nZq3NSiSiBgN+WPvJK4L2DQBoDo//ycpRlLBD7RcqItpBxyPQ4WoWxlx+bpbjlmvWRLcsFSMIsUwejVcsBMwpx3DuBSY4ep7uBNR2IiDCOQCULtAgBFjiZyigSZVwbBYNYM21pEa/xM16HX3c5DbMK1j0anYrA8mz84wHZ1kAjdzA4TB0MTx58iTn/LkvfLHkg4gICAowuvI9QgAAQtAW2QjaNeXoor8aRUcvLyTtcdJz6so33iAsTDiMo3aUSSm9++67IkKBsgjMgpxtpN22AIDjnW9gvoIevBo0IhEZhmGz2QAAM2vqYykfrHsKLrZaT/AN1y9QohJm0q8QETF0HW02GwWg6zoQTT7SHmPFilMyNoVSpc2Q0pCSDwAsZ1cY0Xm0Y1dq84jIOgIaiLn0FElEpPr6/Xv3zs7Ozs+1D6SkNEJtYU1Smpb6QyBNZOXany8EQJisa0SUEv/MIsI16uIVMJ0N18CdV4o0wgYLmiTXZVofMZHumdQirnoerGmVXFNG+/VGqsHjdxN0BqFxrnnwZxwqwszrbkUuKckWyEpANee2AMdyOBxy6QqTND6ZR22kGeyNXCtIRUTdFiYYPR7c3qEsw4W09MsMIrkiM0dtDuQudFcMkYjUZFZotfv9OI56xuB+vx/HIadEAggYQgQAO6kbEVlNL3VyVCSreCMijEFH00BXzlnXHZGFQWcmEkOAENU6KMFSgSmwj0V4kCdmv5E1shQhoOutausSu8mn3/fre/fubTabNAxwLPcqpcQiFIphkms9vxfsfk9kFyRs5IMHvhEyfoNAdR6lPMSOcs5I0vUBEStOy8PmQjZ6pXn/ElNVjee90ejBgnmAsZGhZfw56RvxodNfvbOkYSGZt4fxs4Xiwc1+zGYZ0NmE9i5cqM4NeM2znvoNgDw/TseWpOF5cDLL4618c/TL+VP+QtegX29bGrQevR7zfo6NtuofLPiHghQE0ez8vu8HPZaHEWMgJADmjGs6//mPPjzAfnu/u//FU2JKzH3fA9JufwDJ59tTATikASX16+7i5sXp+u5nv/LZm+v83l9/fPP8MoigxEA0jkBdF1HWIe0u9y+eXnzmzc/eUMcJeGBgpoAhhj72GDAcghYulzliYBaSmR0FAFo9WPE2Q2+1MFtXt9GDoctTCLlSfv/I0VUDAJx32zNObJbMkK81hA0MMBdV6DTmEALiFKSym5kZFm08AACR/J92vYIkmjtNhIGjrub+BjmN2DUsLfX+5as9Mj0GlkPBgn0auW9bQnbnQ3qRgojsUpiWKII5OzOzFv2rjJlLlFDx79EVYKpunck9S/X0s7M/vZvJizubnZc/Xob7CfoZ4TzsbJ9hLoQb9VF/9zhZEoC/vyGM+qH1woCQ1pgo/MGxjOG5oU/73LhR4RddBBIJIwWmWmBjWBWEmmHuqaJ5qZeoS3o7ykRQiecV7O8vL671G6MKPU9vOcLSx+dhQ9R2D9OvLNjHOAJQiB9//PHHH3/81ltvnZ7fudmPkyUFk89MPB3rEiOATDAspehts4M5JmGSV0f8cWrbHcVq13VjGlar1cOHD1+8ePHzn/98GIZ1KE2kKoN7wThzqRBRs1yvWL4GeMQSXFUwfMF/HV//L1RjXA2izHHjlxgAGnigZg9i9TluNpvNZlO1YakGv1ghX21vA7X9CYzc6iTzV0CFZ9pc9H6up8YZ9/V9LznrKerkDka3+1NK2ppIqqWnrdqp9PdWk9LFCVH0uKgpUy4QTY6wkpFoooxl6vprcqa4auvl9whfPraYMmi1ESKozKsLO63B9F5mX29mL0rVIZjnbV2o9nOGhVvHr7hHnWHSG+Saum8Gp46Tc6Ya/dY1mrJegTTTEuc2xjJjwgY32qi68fRTgZanhOTgzAdxkSgiUvPNStP1jfv9XvOKtZtoPep8FqLIOWceYZIwpan+fn948eLFMO77vtfj4CvqtE16rPs4ppSk5I+X7Nntdqt2MqeBiDgnTtz303khxpLz3W1mpYsIMSJOW3/O6jQUBOAMmbLEIAJd152dna1Wq+SOb0GaqVXKp55flJZ8mR7O7S/7fmJSOW7KsqsX9dOJOWdrbRTq+XvguuT5ZQAAxCla2hTvLaWVt7A9WhFRo7p+ryoTm5tGjX9Cn/XRS3NU2/Q0V8FcNQ0iJky5whtYNAUxkGI3tSEyeHieiedfIW49PKKHYfANgiZOztP0pzFdZM/vDfaNx4nJOPvTf0P1TBWshweYYe/HtM+hnkmN1UfV6H/N26MaDIGABbSpNWcAur6+DiFs1ieHcZ9ZQiBBSilxovOzey8+ufruX37/t+7/2p3Xzl+kF5llxHG1PRGm68M+Aq9WMQINaZ9W+Pzw8u7913/lm1+4/PT68dOPiTPhaey2vB9Ct+kwdphurnf7q8vz7foJEGfOKYFgoNiFQIEwEuUQurL5mewTqVpRbUwKk4e1FFE4KcAAiNh5tNsCaTmyUZ0tvWaNy9xzAYtUvYmN5+2FwKkCVrFd5enMv9i8V9wW6KnLpAPOowQi4nX3iaR9wqT6khEAAQOBeigaJ8XUFR2PjObkgH5jkt1u8L8uuawhP7tkaVojIiJz+6DnDn9zfePMG3Xbew2lWbLUJBbwkEPVjFm4nIsmzLzpi4fYQ2LLscSYTcQ7+WRhANj00V1GP1LdZHZzgyiPbagKgb3CC8biXa5JU16gkTuG0X5lzlLMAD9HcTZe+V+FZ5Y5ouq9IUFEDy+Y9hRzSjYI8X96xPr5suuv0NCS3ROJuhAoYMiQHCXUR6T832Jj2lIbYCpnwulncR8AykkMDdjGsOJUPSOAZi52NYnNzeejzOjvnACAyRr0g6SU+rOTvNn2MTx9+vTx48f3798/Pz/fD8+FMQAyCDlTkE2YuDJSIWiGbdbrl7+MdL1lWwhaZjfoT+M49iEi4nq9vn///jiO2mIUZrv5rTn8S8EFC8QevSz0oWCFekb8YRxESj8ZjXIrIlReHWXJBmNQ9L8lDKRBG2tsvlptiGLOKaUkrCcMlSPs9Gxkj8wMLHnqhMl8fIJSO/QYoymhjuOovRw1QtX3Pae03+/9cqgdeDgcBLKkdBkjImrEUquWQoir1Uo/a6dN4/FY5DYz5yzCLOhK1xDbomXjIHB2DhGFMOsZYzJTaOmwEABtRiCEiIQAxAkQIYNkdzSOIUG1dm3WYnRC9dg2qZq9VFMfnCXZwGMU3qyytdvheZwzq5HsCE/Xuus6zSnWO3OtG8xZAhFCQAggalELIh32I7htDgCYMzOrPgwuvUJluyaJTL45IRHJSXKcmvlZ9gFM+whr2mSuB6cpbBohtCC2DuvLN5hZ48YBGBBFcBzT1dXV5YuLw+Gw3ZxuNltGPXgwhtBRDBhIOKtZmHMGsbPahVn6vicCSVkjqFVnDjZTqkUQdnkMmFEHPJXlMwtAOUdgVU1uRY7KAXJhLSIid2Z713UpZ83WNJJQpPmyf1sgXmT6FKJKYqbEUc3f1AD9N3Zdl1LSXszgmshJDW5YArE+YHnhWh6t7wj1BEyjFSdi0ESGQlyyh/veeMNgFZFYQ8/gonP6XrP9zJ2AiKvVyojJsNP0fDcxJC4Urpc5Udh5dOx7cuEjH533+5ChqHw5D1QaTqyDsC1JHX9q1uqNWEOLNcsOISCS0CR3bF31jepOUw+QMZJhxrMT1Yw4T0C259mGIbUVr97g/QX6LhEZ8yCCAbU0AscxoQARUqXaGHtIg/ZWiRQPmfq4vXz5/Pt/9f7dh9sv/+bnISJ2SlcHwRgQxpxxyJmE03iI1K/6m3xz7+G9r/3G5w8vLg8fHK4H4Qz9drvb7ajrIQc4wPNPPrm+eEqPSHLarE/GQxJmhLi/uey61Wm/LWWxYNkyYci5F/ITrzTPRJONp86Cvu+7rjscDkaWRrTeWm62cH8+ods+IWc7qH3W0HIchtIcWa3Puh16lrbVx+rLsGX1N5gQUTrEqlPqoTK2NYqIeivABTqc0JgUkWaj8t9LNRsIZ74oI2yqXlmTJ0UaOkds4+uShRUtxY0+qS+y8EwRkaZpKVTaXs+vVwN/s17NAXrWPsoMpObq4jR+s53L5JJGACAkCsEEly2x8rgdRG7IKQ8788CWTL08DWay6yBn/K4320vRhVYUIXquYK49eI2QrPW0iBwOB6n2XuOehKphrNdrcOqXbXWWesq1ibyJU78czv4xJ12bydl1HWG0QWwcp5cUgmSnedhk/TI1rGo8QrXNt8GW8+Hs7AERfXq4YU4AJAC6aQIAAAEyCohmpiFTWNk6SlXIPHhGafpvUwIgtpEfs/D9CP4t4LaV5i1qYNtmYcLEzsm0lSrUmFnmfX3sQT0MehU3Oef3Pnj8K7/yK+v1GjJT6FJK6+3m+fPnq+0mxjiOI1d7uExhWoi28ZW/zUOin4Or8vALanVxzWgsJULoGVwZUIn8zp07b7/99o9//OPLy8uu65hB1T7FX1cz3BCC1Mc9nZs3fElyS6QpPtfr9eFwiEgr6lNKUQMRykFASNTFDoCycM4Zj7ntYV4RQC5bIdTz0AyHIgKCmdPJyclut+u6brvd5pxDiERxvVnvdruU0nqzoRDGcVyv11QPs4ZAFkrSSQ1DskO6rRVKfW+2zpmG8MxZWcPS8zRa1fdd13XjGLuuS12Xc97tdkhrrOZQrpVs2nMSANbrzcnJyWq1UlFWI6tia2Fbqr49g6DL80+Jc84UOqqnftuDMUbmqfcb1rQ4Ikoy6w7qGYdqz3au0ZWcM+SJXI0SVI55tmXmlDIAUBdEhJCQgAICSuaUGVarlbCEWJT7nLOwEJEOR0QBSYDHNA7jICLCQRtp2kuLHHD7ozuTBw/7QUSoTCRoC8kQQoDoI5k+v89mZP+adi01R1S1I4VQVSMdzZ1vISFoWLXEqhSx2rxN44caEtRc4uGwSymlcUSBLkTt9gQCvR64qFIaMNS+BywpjyVn9cmnz25ubrbbbQwxBNQD9/QtwjgcUkoZurTdbne7/TiO29WWGTSR++RkI6lMJGetgMJxHGPsVKdglnEcsh2wHIKK0NVqhUiHwwEA+37V4+TIAz36RYCIdrvdycnJMIwdIQFSgO329Ozs7OXFhZLW4XBAiqphli67rqed8YiF+qn2qjVhGEoXw6KsKmJDPTdY3LkaXrags2VyzhGAEYEInC2j0q2e/F4NA6VsbzEa6TTvmFESoPpMRIRFjP/VFa8QTR+ObWwmH+0Cv4Et3D+KrM1mY5uKYaShb9tFoEYMvE1bN8tZEyrbPrNrDkHOAE65TSHzmodNx2Azw4yqIwQRCdCzerATWjITRYPWSyLvJTIaanDobQn/r4eqUU1mS+nkPlQ1K2DUM+s1562ggiRATQ5hCKHTWm0RWXf97uXN6fZkgOsf/uXfrc5Wb3/t7ZvLi7EfBuDYAWpiJwIhhLjJacB1t1mtn3zy0b3PnP1nf/jrf/Lf/xknzImfPn9+enL3+uaK1l1cw09/8PP9P3yJDwFGEeSAOE6nqQ4SJkWc1C8fgqDo+ZqO7IuFJnMtxBhBqjbQEPxtf/oN21OCrbv9WgwqmTUL5bofTJv9/CKnQCztUn081xbSobaGM7PBdlO45VrSCTgCvu1+cCqdffA/2eMGs3G30XAzFMz53QOw5Dh71rqoe+mBx5pD2M7diEs/F1gKEDcX/yzOTUTDA/GkIthCeAGFzkBVdDRA2iXOxDWQzM4UZ077NHWf7KC6UbMithxmjIGjqOxqX42EfMcamNNkE0g0ESfHkg7sRkOAX3fOADA7R9HPXeZ2lyfRhvaaD/5xqllqJkthPPQdYaCIcJAsICyYUtLACdUj5Arwtx1WswjsTP/K7IaG0Rogl/8u59VcPG/vaSQBjuNm6MKp5hyxVEkiMBFJbec4DsOzZ8+6rnvrrbd++pOf9xgRUXIpYvGvmIBHgBkgSzjZ+4PsczhWvn4UP68QYvYrEZ2dnXVd9+TJE9XnGqgKHlSRu6WS35tAS+EGc/nTDC5VgTuMg587TK6x0oRjAT84jkBEENEkf9Lc6eZFhFG5Q12K6s0sviFCCkFcTmDR9ISJSy8TkdIoTts22t7nBay/wDnUqJbbce24a154fV3OeRi0yG01pkOFedKIpPg4JKW0Xq/X6/VqtQJXe+lEZbWNaZLeIpJzCRvEDolIXcA+YZVd91HbVpZCw2Dm2uzEPuuDaZhqDj0xGBl76Qdzse9vG4bBJmW3QbW4oDKIRh1FhLOoBWViVkEKnctGRiSYKr9U5PZxapQSQsjDpOR7D5HtjyIzrxxixxlEgMPkRoRj4lSvw26wBvuEIlXsj+MBERnKnjIMe52OJuJqHa8PhRER1loec8oAAGEc+bDfH66vr1+8eJFS6vt1jHEYhtBjCCHEqC4GqsGkcSwZcwzCPB3ACKiFzmU1A1Ko1p15Ewwe2we9vAInOZuNxs69FEHl9L7v79177fEHH8Bc4illEmkt0KzVU1g0pxWXxMfOj6NLw8yBZoLLeIfnPtOJfxUUK/+VSSED5un8SoNY286aQUgukGqImEkKnBbV+M1n1nmuA7djHd3ODcVmgyV3MKi9woimuQAg8xTirwqICrLgZQQgIKGuh9yiE8tCLTaMG0LE6T1+LrZy1mZqtsa+q019r1TaNPw026EZDy7SNfP3mHChRS2WJ1/P4TJJXvHfT3QPAXHWqr6Mo+fPcJF9AYpnousx79MmbiGnZz85/PTOu/fuP1ifbmBMElIAJBRAkpSHlDsKfThDoZubfbfthMezN7bf/v1f+49//N3LFy+3d++NKWOg1Xr94gXwHi4+edZ/uU+JGTlgBM7MTAE417JlUi+aR2DxH4smfMsMJ3Z5lrPl8KiThQYgNfjQoA6c7b18l3qALKTmVxDn13LM5jZ2Gae2B2iTpEZg4TEDyQ+7ZMCj8/XfN+CxC835e3A+gg2yfCMUIp9+MvljFkiD1dsw4+e7vN8o2ZN6I4XtdexiBR7shuvdrCe0m9d5Ceo02pzp7CIXDbb1bYZqmB0XokzmwTRbnUlauip2dKkT/lk7+bexBPxqeuQY8OBqCtzEsaGBSqzBJCo2ImguqZpvGlry7/JvMXhCCHYaGADEMJ6frhkodgH2rI9lIK0bzGq8CiBqR87AC76AY8JhUiCkhVPmRnUzwmw7uMW1N7+O2L1G3p6SDSf2rwBByZdDwomMhzF98MEHzPzFL37xX/7xv+bAWsNTAq21K0M7fSzz/f/LdbtEOm4WmhP94cOHfd+///77wzBst9uUUaoKWEcGmDKB2zd67mi+fwW0OIWJ0qrr+76/3t0gIiEJIosACODMO7aUS+BWZ36DLtNMwyOicZhiBZqiBpkTZ3QFDnUukpgDM9RwDUDp1NJ1UZM/G0+cZyIPIdEkHExWE1FKI9fcdQDQMrau6zCXQTAiYYSasWnpSMMwaBbDyDkJs4wAwPUwM0YEJgbqgio8JZc01yI69WJYwEphSykhkjcCJ3EXJ2eW/Yo1xQaglWNeQW/kEri8EhPCuZ7/JFVx11dY4oNJ8kaYew2EmVPOlubjl4PnsWtL5AYTlS7wgIjMoKHUuh/FOshkeEspAwGjcRHJSYimVij1zll6CDNn8WSJiEWuaoQwlxjyaMcGQsV5jL0naWaGeam2iWhESintdrv9fq9BsBBRRsR6LErOOaWMyGPOklkPM8QaESWCrgvMKefJwAYWDLqIbO1/PLV428fWl5mHpPSAjLXmX4r5h4haIiBV83/48OGPu25ICUsm10R4y23U6HBaSrcL41xrMqHk9TqPPXOU2086QgSQECjGQO4gGp6y6idCl7pxahaQVcc2YDVEHKtp7qWYMVIVHIxzDcCLGM9gxocGT/HzuTt1R/eGoh+B8/SlR2sTkcBqA+hC8jxuYHB6DEj1kNli+DXzM/JDeYNhIn22GvdCCuXPuda7pJ6josSTi8wrymSuXYGzaT3qYEGXExL0O0HJUz9ZsMIMABFGFK1qYOYx3azWIe9HAOwQPv67Fz88+8k3f+/XGFCQx2EUyUSUxiQZINJmux1uroZxf//8ZHd9nSC//fW3L18O3/m3fychw4Byk3OGDjEGefHR5QN+HRg4SegqBa5iEs7MUo4YyirbmJmRAZZtMCZ+M3Jtvl+u5lHS9QM2v97WxcvSLSwmb+Pb2vlVs2W17U2cQrkE1Z8z5mXZUbCXE2xmdOROmT3V0E8LjPMiG1nqn81WZwKEXOoUVIeLyZbm5iWc6C4/vruh7R2nH5rIGLg1Nf5q2BDmqltdl7Zox0/n1QvhkDZZ12YSU83nsaU30jKHtAOjUJHp8R5+njdlWYZY/TRl3kwCXf6/n6CXP43Y8b+ad8behViODIW59NNkP8uLsps9bfu3NOveYNXWN9eDCnUum647O9+C4EcdCiQKPSFxMRuU4AWdke+BMYCPvrE84pZVnIDF+eZylBiwNQyOXH4dm+8bCNG5WZegGskppT1+/PjZxcuHDx/2seNaMWWJZ3Ss6qmB3I9vwy7Bu+1aCpMy5i0vjTHmYVxvVm+88QYAqEHbgOFhM0K/Dfn+Kb8K/n6RcuYaIuoBfUpmqlQQESFlO99C8b+oCfSE7YFciGVqbNic87pb931vHsY0JpZC3vplKXJzrGFAhkhes5eFqrCUWsZHXruT6jhTLVwzPw+HMYSw2WwglbS3kuKOUUQYZL1e6yDFu40ANepYgQ225YWgDZBL9bLIVFtBOZv0NkoGAKJ2XvprCK1EwnlCDTghIy5l18tGrP04bI1MHAmKeeqxtnXwCPR4No7za62P5zSLcNouoAa/F3QMQjATJqO+XYSIDruS8afrgrUhTdP3CBY8K2oThoIflf9QxMiEw9h3vg5LUplCwVfpDJoswS0AgkBAnU6n2w4AZDsfEkSQFWXCzLXelTBGJApBRIZh6Ls11UK2YRhDN5bGOYSJM4gUbztIH7vVaqVvL/a5zHR7i6N4DFhFRvEbxljuHLT2ksS6QwEAQNd14k9MyQAE9+7d2263N/u9qBXdHjUJaiJ62jM7xUMIterBzFQjoaOZaBYqh8UmEo0VEVHLRo3sRASgPNxYREsq8a56rI7zpbqDziC8baiGoD0JylwVtu+Xd1oNjEeHOPuqeYV3gHkKNuXGo09clz+DWb+nMB0E6Rn7KPYAQN1vtk7lKdc82o+AiFQliIEHbk81SJbrZZg3tHh90SC0L2W+Afib/Q3M6hRU9ubaUqT6FOqriUgPz90fLu/05yll4nDSnV88e/nzv31877X7dz53xtsx4SHF3K83kAkyCfZyM8ogZyfnl5dX6361OTm5+PTy13//159dXP7dX374xtmDm6sXtIO7Z3c+fXJx8dH1A36w6VeUETKICOcc6hk4BQxCScXZxczsuNRQJDXVzaPXLhvNU6DcopkZ3pYk7V/a8FHzRnB07hcIFgZ88150mlbxvFChNJ9p8PdVvzx53Haz//co0tBtwx5Fnl/8v1wyFwrpGtrhF7EYzFfBf+9lTvO951/79xWv8CPg4vzJCQkMPE1KtJmR1tbrIiv/104rEzCeAJbeItMqPEI8AhuyAVdx2shGcbWF4qs9K6nYzbav6CFRhjRT/nzOjwegITm/lMtLuHS69TYe8xHSgjm9GUkYYN7gadbLwFaBbPt9iHJ6sgIMXUSRDCAqpMchi/Yg0dNOa2/8o5wCC/pxYEysjW6/aB6HhSvwNkpeXkumW0LoCax8CTPjsAhD4SQQQnj58uUnn3zy+uuv37t379nzF2VN5+l8R18Ex371U1v+e9ukYIbGX3A/ESWRzWbz8OHDy8vLp0+fVpV9sqKWPP4KfvePNBPx309j1jJaEbECSBYW1LRe0r5T6Jbj1TKtARIRwR1Qrpy7Xq/7bs31EAIjA2HJNS6ThLswVarbxJTxfd2vnxfXrtQmlHT8zMkwQDRLPNElDu709hBCxF5EGArHhRD02Keu7x3zTkf/OVLJAFrqnIhoDwOzglHxX1ukGELYpX1iPXTX+wWYGYN1ezY0IotwFkSgxWns3vsmC9eGF60FJwHtQe2a0/e9kYRhyeSwplaCbhX1cMJxHAOVzLKqMJS0OK3dbd2mCDFEAyO7Qy/ywGoK0jxoudy/wOmWtqzMSAHAZeoRkYg2Yi3an65AzhlYIIOAluoBqEXndGxw0s9ktp6SpVgqtyWQIOrKTJkjhb5br1b7vu8zSEqJQQJ11EVEpEpXMUYQSnlARD2XRL0kugQ2QQLEepIFlpBG28sAXO2658Gcc0AsZuDiJ5Gab1gmQmdnZ2dnZ88uLvy5TThT/9oluG3Xdkw3axkox2SUl7fN+kZyJ0oxs7gu5waBH8VyYcF5H7EaUV4OKkyZC8sZ42H19FM9BdIbJEepEOqqLOfWPGuk7NfYHpdFAqcNaAFPs2xz9S357200cp5vWwYjbj+OPuUZzJDjwQMvW/PkjvUCt5k7uxzXUFtQwlRy3Tp9DRiap7DbKjfiG+b86YnP0IIsQCCCpUge3WZmJhaWbs0558Sc+bBZncoYbvbDVk7whr//Zz/85uarq0cRV5mHkTEirDkTY6Dx0BNhwhg2KcvVIcEmXvLl137zy08/+eTZk09pBZ2sxjGTwMfvvXh7vz/pT8a9HnYEI7MkyZmhRvBNzAgySuDSffh4Dq3fCBud25OcJyQ/jl8yQ6PRm1Fgs38Yx3lSsctWVj8HCs09R4UFVqte/VZl81AT8ZURBgN+yZ4GoUcIwsRuxuD+EU/2/oNB7gG+Ddv+A8yDnA2cwR0g6/nXM4hfXE2Jb17awO+/l8Wv6HTro2LXbjZgPJkdvbm5vBegwZVR+FHe9896IgFHew3wHqTmsw3Irisyuc7SIYSUhzIgOscWALNrU44iwvMaqglL4JrI1y/RvLZ+iTyRHAV4iUmDQZNKzJRF68mch74P3WrV9QEYmBNBMDGJWM4e1MxPElhuwLAgJE9dcswn8opHjoqp22YHtYZwiRl2qb+ebicAYPYgUkTgNI7jOEailNJHH330la985bOf/ezz5y+8KPD8btsJHeMmnG/cM7TYv7fMy8SpyNwGu8WA0vneuXPn3r17H3744cuXL0tnIFnV+O4sJRLxFQWhMxiOzsivlNG/VGXa6VScQUqhJhIzB5xJSz/4TOwcU5YQUaSUFIpr0GKvIyLA0t1RamB/HMcuribexGLTlG4r3NYsOW/yzMXjZaMZJwaYZpUDTFIXAHLOfSAACPVLswlz1QyhKHVJwcquJpkLbAgAOUk1BYMFGwEgVOWT6yF1GiqwwjzT1vTPMaeGB0W41JaJqENDG6epvw4FzLgV13nrcDg0dKLDMk7tJwzJUs6Fn1VdmTJZRbqYPx/0PA8nD7127V3As+Vwkt8MvxhK90tTSGDh0DSE6PfelYyIlInjlHxbIZlmUSbOrJG3Sj9JRKAGS8t7SQjbJIXqJCmAFUrjMtR2eyoiIQwppdVqtR8HfUPOuQOIXRdjDF0UkXHIzDVOiFkya9KyFsCnlLI2FoJKXRmEhbpJSBpu0Sn2NE+B0XMdAerpRIySGEkIEAC6LqYkoboO1+v1a6+99vjjj/VcFpZJ/SYiQESaeYUMpV5aTtvoXLzrPSFEI2ZbNeNicIJX/4wUCkH7wKi6ZBsZDcXwIOvI5DFlosF/QETvRrIBYSFM/cbmJYunSOMiG7BRZ9FZROS82vNXTJB4RjWcYk079o4QW3u7zRtUHjbJRcn2U/azw/m+SzHMPS7MzFy7gBgDeL4yxrClpXmEx97iBY1HcrMcDXk19zTP0jyBAYACBAZQWT9RBYAAiwjVc371/lUAgNyHiLhJB4pxJXJ48cmzD372wRvr1/rXAsswYgCIPACx9JQl0OX+6vz+vZe76xeHl2995rNXzy4efP71X//9r/7Fv/ouC/RMu+f7s9W9l8/Gi6fPtm9smWEcx7CexGKu1cMTiiAAzGrllaONC5q52xS8vX2Ulm4jbI9bIza7FE6Vy0SzOPOS7T1H+NuM9vz9fh1TSuqCbR737NZcnjXsS1noprBQ4BoS8n+6KUw3+zubVGq7z1e1yULrWsJpbbU8xvyw/kv7vBzW25wew54S/GX3N2/0r7bdpcHYDCFzMdhMsBGVhrcGqqP0gFV99+KisRsbadAQuX2pqqcGE6hqk+Ium6+9yBQdjzRElOaoOiGBaYKIqKliVnvTkIe9zjtZ9MPS9+HXcUaLjjDG4dB3eLLdrtc9ETBkZk45IXTT04DlWBERmJ+05FfHDz7BLC1UpqHasx75y9FewbnNTOHYgvphPW/K3LQSEQvOYwzM8v7776/X6zfffPN73/sBT34QCESZ2/OEj4LUjN/A8OqpNZziBz56/ziOAfHevXsnJyff/+73bm5uui5oJw8p8CjBuCrxY29Et5c1s1gyFwAg1Q4LVDKtbNcuSls9srGMgNMIv3DWx4irXMxsVQkx9NZWKsSywamCMXIehqHvkhoYSsNeLlnAIGfL0QihdEtuJZunfKjal81XRLRGzKwmZmY8IgkRUWprtzIUAmg8CaYUOE6lUhEA0qg0iCF0MUZhJEqI5TgE3w2VS2n9iO60ABvTaquOCs9leIBAfLCIahGyTaRZxwxsRqAZXXqbj53m2trHiyZySWTa7Keu9dTr0c6d85ATkS/WR5dN1uMKnCT0PsSGtirlz9zNYLaae4RdxYErtWAU7TmEiDgMSUQ8ESOV85uWZC01Y0vqmUZcE25X642IxBgz83a7TdcsgrEveewW+cxJMow5Z/2GKIzM2seViIbxkFKSxCKiEU8R4ZxRBEJkFybxHKFweoOQiDSFFQDYrA1CEB7HUQl+ZBbSbBRAxIcPH27feefm5oaIxEWSpLBhu9EbchqdHxwD+vsBp2x/25R5HhX3A2pQtQzHNc/TK/3mVygEhyWJGV2gCV3+ocxFOdF0TLyRvp+Y/Su376P11VPKtTiF7CiCZC6djQfSsfMJlcFMeNm/XD3fXm1qboPqt/HQetQZ7TY36AcTVeZ4ICKGiT+hOc9jSvhH071CCFb5beNwreEWZxUbYA3AXiI0iyJuI0Tnly1zV0gQRaJgRiRAs2913fVOsycBQYZhWNP2dHV6PewOu0N/0v3djz7p7ue7q/MDDSFHwkEy5ogpHRgQAj759PnJ3XMK3afPnn77619/+cnTz7718PLrz//633yYD7s7p6+9/HTYrk6fP794+PobnHJKqaN1CCFBBsI8jJk5C0/9zo5pDpXaZ+2zPC1ZYA2c0t/IL/+U4bmhfKs19VqyiORKct6M4dp8yC8HFj/WLGvcVs37a/zCpTERtnrhL3PZCHhMU1leS+zBMb5GpwD5y9959I1Gq4Vl5h0CvLzLtd+Jp3nftt7TeQhT7cQC1NZgICI+ZpYY/DZ9cdfSVmlEoseVzCuyDD9Kh3lqCzEBIFNa1Cz4TPOESazbP83zi/SyUjqcS2/bXUyqmHqnbeKoNrWyLC+D3NOkx6Qn6aqdVzlTsWRrjTgLe7oo1IRSmRvwthY25WalcL4X+IXODCGEzXa1WnUxgibe5ZwpREQsiUECACCMMgdmCRg4QiqLlWcGoSdj//gvZNhGa3EPhmYErAZ/syKeXAFa8Zhz7rs+1H7XzPzpp5/mnO/du4eI4BSREEJeAMNQz4ufT81zSgMn3ML4t8233EzhyN0AKaWuX52envZ9/+mnnw7D0PfbnLOJhAoDTVmXMP10GxjgSM7fuWRt+77KmQAAuaxCQAxyuynYDP4KkLAECafEP67naUFh1cIX+iW7w2Bk0TkGXGqcOD2yMRv8CiK5vaxe/gblzb7vdUfTE49Kr+P6YJifrcVojEmCI2I1qFg4ZeWYnES3gmLl1XaAqoh7MKTWj9mkvEzI7iwKj2Rr928qlt4TZIYiw9h6vT7K1zF2UGumbO6IqCW44noHNOJdRBAlzFtegROVZV0YZn/68wmIjPBMtgfuDSdFQ04p54xYepwYZanoEmH/xorRiSzn1CsyWRCsUbK4yMaqoyEi+kNE/QTFaQj6JWGkgLY6emQlAIhkkeh1M6yblMGKEEQGg3YYBtsIFF2cgSDLIjXPbvDblgFMRNKceAcBCBEjVqVrHEehkHPW1s13797dbDZFu4OpVrC+d2ZKGAw439nL1rlIBEV3Ap8rDJwdom58rVOI52ev5ZxTYoDQ9+UAOj0KwxbXnBbDMACJgGAgQWAQisXe0GJWA9FoJVTmAafjKncp9GbE11mQTn/KBS/lxFVuCqSUzUOMGEQ001WwpjLqtGNHIdRTQUQIKYSAjGbfm7/Hwgi2xhMXVTeSWVa59u6zxfDigLooauOhurUAAxHMiownGsrc911J6U6ZAbuuYynp0UYEkzJE1BNpOyZVwiyua+Orr8igypIFBQiyZLPtOXMIAbDkZ7EG9wIFCpzYIsCe1k1B9EhDROachBGQAgKgoKihejgciivRulkAsAjle0QEMSa8wRWu+xwYV3wyPrl4/jd0Lz08v9fnNcIWDzAOcPWSrl7b3F3v44O8Ptn11y9ffOkrX/xvfvufvH/x7v/1v/tn3/jHX3mRLn7y729O80XPfDrGF3+zO3vz22OAHQ57HJmGLma8PpzS+kqRFTB3xDkFlE2UJKReYUQwf4x23zVW9ASMCDEGgKn/r2mFRGROdGZWX1rOxqWaTA/6opyT7T22dojAY+bkGkmJgEhAgnrAFrAgAIquHmYx8xtsExURlXHee6LdICcHRIAOA5IwZ8YkCCIZimoXVJYJoYxKM1BfofPVMW03CpP9gs59ACDCgQISCksumxUgYuEOBHMtQe07pC/yjlsv5jCQ0hIiaqdHbR6NBRoUUcdjGScd/LGHmFJxLmomFZUjqiaPY64pQ/pORCzuy6LoTP0AEFGEdPHRGTZS9TBjH92NCl/XYx64ntCF86YF9pR+oK5XYkJr2SLCmXOxnEjfrf/T7xiEAYkodp35L7yeUQkgxBCG/SElfXVg5mEo01cPtJgbGy1gaJ6dyTOqZS0AelpU0UL0XYFWZntw6d+NRF0X+2k/AyEszn7OWiZKIpzrupjGJiLM2kFeIRFrAmSj6Z0Wq6wkOvnasKj7jPVIBkQUZo04BIwRJIIEYcwpdtuf/uzD+xfDeJM6DIElpWFLASSxIAAJEOqRDAACFDTpgGuKrJ665J04AuLb1qtn3PZyKk0tGIAq1XnzBOcH0Bv8zWf7l9Nkk0sDiXM1Gu0llVdFkmPJ+wAQyDlnwJDzyBQQ4ocffHy4Hj77+bfWZ+txHA+HAwAFQB6HGDsRYS6GYeE+RAalX2VRsM0FwHho1pFCBDLrrzP+qmrezNQv+GYpepUGl4Ehi0je9D3n4e03P9uF+LOfvwOEKUPs1sl3Q3Wf86K/br30C+8CFpmaY03UWKaWQkQhCsAYiHDE/fXu/N7ddb++uroSQQqBMzDmEILUTDHwQ8/cH5VlKj3vD6PBJAAgGRFC3wEhxXBydiqQWdJqdZZzDn2/HzJQR12QnDEzc2JOFCBERMoE2EVSLQcFY4xDPgAwkRoS2rifc973fR9CHwIyp3EcBViAAYEFpkbQhKGLDCLDmHM+HHYMSLEPq/Hy8vL+qmPBzdkZaDKWQACMEFBItOUJoijmdTMFEEJBACiqZhpzPVI4lgXUNo6IDDlrbzZc5Xos1lg33AwCAZP+GQOFkIEzMxJqOxMVqFoepepxzQXoiIgw6EnuCIEiUjHqjGYhC3Zdh3ikSKGTEWNYrVYvXl5B5n67QqDMkBm77mS32zHrEhBCTGOKBJAZifTA2zyknDPn3Pe9xgBiF4mE08DMDGGFSCgETEQY7DQgIAqZRUQCdCEEoWpjEI3jQUAQQRCYmSEXJR9LBE/pXURSzkgCAajGvQUkc+0sqIyBmIU1J1l5IWgdHxWzOScBEBYgIQYgioja0UCtAKyWqbdbMIRO+wDZOdiqAENEAEgpXXz09GZ3mfJhtVpnHmKMmQ85Y9d1CFGEAbq+75A74Syc82Fk5i7E1Wq13wsAQEBElECjsKrNSCjDoEqdMmDXxa6LRJTHBCScGSEQESALZM4c+pBSykkgAyKJDHoDh5BJqxZDArzaj10fuo5iv7179/Wf/vSd9brHEhiEEJAZMx/SWM8CCcUG3h8yIiGglBNisqqdMfSM7N18xSEbgJkBQVAERUAYWKemTQpCCJB13TmGGI1kmw3GvrdfbcFNQok7oKaRVv4RE45+2za/vteEzDL0Ln845pluNkWazjUuYTF0pycbjgy2Bp6lu0VfzW10FNG1Umjw02Bv2hKg5hNX7vJ3Zlex7aemg3gk5NqnIddGXoZzZ5zM3ouIFMmwar96hNirCwFJmyppCGlm6pcGqhMCnf3cRCQK3nJ5tqlROd2eXDy5eOenP3/4pUenb5zzIQ/p0G371cn6+nrXwcnV1ZVw9+DuvW99/dfun57/7fef3F2dPvv4xWdfe+Py4Tsvf5rPz7YvX950V4ebmxs8XWsyd+KMWZhhP+4Ph0NKqSs56IzMjElkFkkwzHuCwZk/ZkYqXlX1KXNYNeBG0W+I1kja7KIQjni4TRMyYvDfwJwajVabERSYrusKteDUdpJFQKa19k/5bqj+pU4fakHFeUa7kb1Hr91vyGnkxnIhCjPCjIX9S5tnwRGnR6O5Qo27w6xAXJqRm2cbweKdo/aUeXDAkVChf3vHPGTkidBjj3DGv0Z1Dc7tRTY7oysvGP0b9csGeIOlFQL11XisKzosIjYGCfPMme1LZexLcX56rtVNhrflQjTYW36QRYrv9KxHmswozejKxiciHuHli6tA3ThqSEHTQtGtNoiI1aHBLZen89kbbwn1zKjQDdIQpH3p52jfLIX20QE9ywQ7KLJ4xxCqrQaO0URkHMeLi4sHDx6cnZ09ffqUiAQqUbnwMjj820ubz4bzZhWaG/wgy5Ryh14dpRCuzmC32z148OD+/fvPnj374IMPlqc9/ZLXUYL8BXci2BQSNNWwtTIN7J7jA972FqrFBXoDV4c4WdeWegpcdcdMe4FCoo7+0lhSynYWQrC2JbYonpH1WasctPnmnG0KjUg0kvDXMAwkNiAzcxdjqAcpl6+kbKMskHjKfagOVtPZgJkzTFnrAJDnOV8KjI5mtXxe6QouE0dNbEMVABjJKPBEpXtKzhlg5sEUEYAZpxehzQKAOQsz2Oow55SGEDB2EAjG8TCM+z7ELnD1kkyZ/OpttJgnVupCF7xqKEStLP1Wi8rMX4nYtng0c8Lj1jHyjHiMOM2B4gWviHRdxyBQD6/HekRZ5ix13dnlW2GlqEZgGq3a9kG16QsAXF9fX1xc7HY7hYuItttTjIGIchLBUWrbnv1+b3Cu12v1ezZSyH+2/jLgZJHORTUoWwiFTQWLTqU4T4UmJk0lbKBhgBgjdt16ve77PoRQWzLOzoozPhJGweyValtuqueTsasYMoC9tkM1QmCeaP1G7bic83QqsWdsz7qV88sOLa7mwSsZYXF8QiGj6skDp8ewc5+bUVuVLWlebeQocwtNL57OG23tIq4p70ajXk6hsz8bd2AjJhoSqXiYTdZP2Q9ia+NP6TA8IBdVwmwDQ2ZjEoOT47ao9jnnKQyCLo3Wk7hBCE4P8CtSGABm1qOfFCwucaEArwHI3EFg60VEOZcVzCihCHQikpP15uLZ9YfvfpRjfnMV1ne3MOSRD/s8nvabr33t1z57cv8k4ecePfjKr3zxyU/ef/Hup3/w9d9598OPHndP4Fn/w+c/3z1PuZfrF/Ds+YvzdU+hS5JyEqQszFBPLiIiEhJEgNZEt4mjO8TPaLVS1PEEM5OSVFPm/JjLyxACzp6Hua7frL6nOv9v8xaTAjK/uB54WsSTI1FLD6tSrMpHACT0w3rgl8TQgOqQNhvBM12MR8pfjUq9HCiLRbeidAme/WssA9UssTZr/lcR8Vn7sJAYtmp+sg2/2Ky9EMjW93yh7+p1VH6CswGaR44SgNGzVyD0J2NwlS0WOfTAO7HfMsVyXo1Y8KIGZw6UyfL0uqkFgmyyJiWacWarP6+phgWx2WhLg6EgAY87Mvx0ZktDdHOz77qbLBwojgRq7eRsb2QAEsg1OHF0+NmLllj9hddEDPOafE/VzSMyVwv8OMuRly9CqWUwczvQ7j8cDu8//vC3fvvb9+/ff/z4cQgBoG49C8x7qJbfNywPx9jKLwo4LvCYnIugSbIpId29e/e111774IMPnjx5sl6vc2JEFD6+EJ625wAf54ujIwCAT4FTok1S5DAiQluo2Jq+DU0243uWaZ7lmvKgsRRTGc2dbc96g5CIEKdzTbl0dpm1pDIulmIQcrOmBoaXCf4ej9txHDsKJhZEhJyCWxs51GnWVFJEhBK4It3K9D2eNgow3AKQXSP+JcubohxCMINQTU3E6WBqvZgZp5HZFwGKCGLJxkfnpyOMzDwckuaIxi6INqPicb87xI66fgUyCA8x8M3uAmBLqDZqFsGckx2SriBoJFXH7/oQaKot9FsWIlIJfhbJb8aDXnl+ZjURWHK+Zzp2DWleTZyepPPUgrWgInMKRc2YyjKJCISb9YJpQ5kcHxoeRMTduL+5uXn33Xfff/+D3TCU3kUYUkooyBkQGQMBEGIgiiJJRJTaN5uNagLkDnw2lBUeKSrjQjqx6DFmnuxhLqYQJ2eNsUzO2ThDOXSz2axWq5xHzgKoPtxsJUXGznogDVIwFkBXieatdEO+zsJXlxgYzWJRzVE6ciq6/eznYMjSNv009/8dpYz65axIAOdRL5gbWgZ0lXEswoWaCTWDQIkVACxiV+XaZFaVxxGWUNkbjXVNUPrb7DM5D5yh9Sjde2x4E1Rk6rQhjfoiIjJJWxsnuHbtfkY8j3+isyj8Sz3+fVKQn1cjxE3xAp4m4pHmDRU/ggk7LzWMfhqUighDFmHgCKDdumo7VsBt3+3S+Ml7n4YuvvUrb3dxNd4Mcb2+er6ThL/1rW/3Q1oNKV1d9wf5R9/4ndff/Mxf4Hee/uTi0YNH+6+G//Cvv3vn0Z2b3YunFxfbB/dx1TGnDBAYgWJHvdbWE0bEcnQY1a4yONctsOZei1M9seShtdrnEkWewpfn+cAxYXd0TOMLf3NzDzrSskdgvtz2ee4odbLeBSo8z0rJ1p7GNFQcfYtxesPdjePKYyBG8sDDQp40SPDvbb5Z0jbM+ZFdQMxvfv4VVGvKGyQ3Bp4sVDGYSwBxlbp+p/Fr5FfwqCQBAI1IezRSjT83SNbP2fWj8vi0Z62WgErFyEw+1A8zS2O2CjI7RPEoDsF5ATQr3RBylFz9s36ZDPLGY+ovT3INJsX5nv399tH3kbTpRArBHTsUKaYx58TC9awL0R1Bn+VadUaaVQvQcuhRgGFBY0duu8WW84+/gi/0S5xbcR5LR4lQnAEjlfzQSchCxoTjOL7//vt/+Ef/8MGDB8MwrFYrsDM8XeH0HO1TOkkDsNcomu8bOL3Tc4lJEdFYDQuTQM1hl9PT07t3756enn7vez/QQUZOy01tie3mS4+fV9w5/aT3F/dj0aYck852YZzzhUkMIrJaIHBaxJI8LMBrLUmoXuAOMQMnGKFmlxFR3/XMMI6HnLNgESYhYBN3sh2/0umRy5ha+ddX8XmYWURdKYioB8cBltq/VE9HwEAxRuoiVn0sjZnJiqI1OwwQJ/IwAHxXfP+9T7DyAm2cNdssa8FcVCxxARIiQgyBrBJKrFYTiovfpRdXjPXYpZRSSvvD4fT0NIQwjgfEfHLaHQ7702347JuvdZFyHoHThx/mjz7W85Ojmb5FXQI9A6Mzw5UCaIy34tZax1mZrG4KnFLSg4JoOlyg3c11Xz4qUXM9vtLoqmFAI84Zn7IASNlFSYhoFEaeAtoFqxPxzAQFESFO0R2qNV+73f7x44/+7u9+8uTJk+3peQx9Thko73aH0PFqter7deiiUn5Kqe97zeRSR4lSmp3VqVOIcYqTVWQSui7ZAKAJyQIla8+WvsAGylnqkkBE1DFZSkJlSimlMAzDOoTtdrtara6vB2YOUddo4UCBUHs0lhAtEZGVMmpFSQXRaztSQfJBYBGJMXqPMNSi4mik4HnV/7mADA1T9u+RfWiusTV7mMk7MySWhNXQ4nx5pst8bO4bs4ynQxShGlo43wj968R15/NQ6cWuf7F/I7nNbwmDv20pmKC6e+0pcRagh81jw2t7hkzjEHtLoxT6dZzDMHuLLBoPeo6FBfPzvNqQs9ViFdte3M0TJWAGAEaKGAKCEHDmO9s7Pe8vrq+evPfpdn1ydv90tep3L6WX1Q//5gf/csA7iN/6ypc3d+/cXW0Q8eXPPr35+GqLJ49e31A8+8sf/ujF7gUAvHh59XDMEfsxa2lgiDHIKCKYk5TCrZwjilabNDRp+IQa+bRVJiKZe7CM1HXvLGxf3W+esJfcZDWEnvhlbmn4dfH/NmziiQGqR6rh6JZtfTtEnNXP+Oso5Ec56OizhkNwvOwvH+o5Oh1bF8U/y6wp1FFI7EsRqW5RtAVllzhgXMPzzN6jjAPzxG/PpLgQUzV1ZDKH9M/s8rv8g+y+95IkH+tiumThozd41FVn/ywibU95EaR/2rlSdUSb3az7pfHLXBOa1Kacp/5VlqP1issT/4xc517Poz/5y7zgflFefam1bPEKhX9MjAEYMGcZhsSi2jZPqd3IwmjeYsAjDiC4xYoD+IX246uuRhSX8Rr2lBkx2M0N4zjB4vR+gVytSqgIiaFyYuYPP/yQmd98801r0eRJaAmbCRnPv0afcMzRc5vNtmT8xRtnsyGiz372s6enpz//+c81LIBFZzrehOYVL/KXUWAD+RF6q91W1B4oIY7Exew/5um7bXCYrdeR77HGBr0hp/LH+NrLN7UQlItjjIiDGoSecpqr8khFggBM6QMza9A+LBUnIhIKiNUetHy2MXOWlPI4ah2gpoMKEY5j0uZVWLRHE+OlKltqXNsU9OWKiLOCDDyTWji580yOVcTCpLUCAGKwHjpqEGI1gUIIAJMr2R5ZhxPmNKQxpWG73QIAolAQGHMX89nZ6jNv3D87WYOMKQ99P15dXxXVIvTsusqnlLquCyEUjyqUkICaH4qZOtlpubketXo4HMRpejDfC5hTCD3ONUCWlDmp4dS4dWShORuqffM2NLHDvF6vc84j5wCWoZNz5j5MteX+35RSjFOTVQ1rD8NwdXnz7OnFxcurLBhjZIbEsup6hKAhQSIijCCUk3BOfeyVNvq+twMnmnQSvxebiaUWqW1PtoeGWCwLgw0AtBBSI4TsDnQ0lOZ64O16s9lut5vN5vr62sgmBMw5g5BazggBaJZmZczrSdrg9HgzKWohR1tQP6CtXfTuAS9f/Ms8QftzzzzFeE+eB1qOeeUNeqwWiGkqnhqMUj1x2Dd12eye2Ys8rcMxieApwH/vZ401fghOvpTcdJdj3dDukVdg+4qyKiKqYDVqsXeEWFTH8NwEb6WGnv3ye870Bhu6qyEavWwdDclye19EcMFJBUazO4Jrzeq9kswshARaso01M01IEJgCymm3Sat0c7P/5J3H49X9h2884iGf3Dk/XO0/fvz49LOvb7fr3c3V1dND3qf15uzLj774+v23//R7330xfvTt3/vmn/+rvwCCq5vd/jCeyCYxZzU3RcYhp5TUUYnQITCiBAxJJieZUUien20wJ8WGZMDw4E07rNEYmV92vxls5j7kKa1iUqkN282HBoDl+P4bTw9WxKUZ8LjYL6X6KTw9mIg0kIyoGvCarcLWvSnaMXz6yJt/RfNGOapjLVbBc58Rv6FiuaY2fb/KsJADzVvse5z7jBpU2EupZhksp+93iOZ7z7/LKSxXBKp0akwyD2pjEPpF9HNsPBr6n/IrlxvQOTt0fRvI87zTaeOHbihkQp0jhiW6lkhubvZ05YlhQt0ki9H7QMzjE+vxAAV7ApBYBEPoBBGBQhchl+bixVTCaSe6zcBbEtir6XkO6uyRJeqODljukenPRp40lFNQVP/WGzT6UFgSFI2UhAMii3z66acvX7783Oc+d3p6utvtdGPKOcti1xCnXR2d+FGQwJ3Xat/w3KUCAIsBUSeuKKQ6zpe+9CUAeO+999QnuOrXh8MBjtVsN+PPv58MV0QEqFsYNSII9X8Zirmj30liZtZIBThiaNxxXg7A3F+Dc3VI86emDaAiR5suztM72fMgl/Z4ysj5cDgcDof1aquqrTKvP5zTvYF9tMFjjOtxI400biZF7lL8a6mWIEgNQJnDRS0+IeScx3EcU7J4pggi2hlRJc6ZcNpGg2t4y/PEtOC6r5tASynFrvPTqXir8LeHxBCIOddmpWhE5KsKbfo7GhEx51FDxiQQEPuuf7m/jEQhCEEWHobDDSBvN92dO3cOh0POmVDTOorvMtZLCY+NvSj6l3LNdxURrS9NKQ3DQXuAUS1vme9iLDIp2MyspzDquuR6bOCSYWEuZAqCls0XBDTzttAhQADQxFFmZhIuhdpOJQDIwgSox/vklLXz5TAM1/vdyHmzOen7NQAdxiFQt+o3+/2ekRBHEYwMXdcrp+z3exHpum673Wo/BQA4HA4eCcYm+mX1kpRLQwL2ZyiHUExBFIeH8l+sqch1pyg7PiIeDoe+71erld+s1ZxiZgSCyaeNBhK4vdvkpKdMWwgT7F4XNSVQCQCrmZNSmh2ycXQfhblgAiER4EIQSNVhLIxlGwH/+Oz0p8ZFBM5h30zP1kPc5XdTk01Esf40073QJSnRIkDhfwXXw8Zgw7lGbk+F2noUnK5j8MDcWWJ6lczPdYRJOJaKXm/jNYPYohjSzAiEapf6mkMTtTZBexCc3rD06NQ5zFb86GVEJk53t/HBBYXAiQxFFwnClM6u7y3GfBZed6s763Pa0+FieH541nF/763X5cXw5a994R/+/m+9+fDO+++9M768fPu1h48evBFxLXH12tnJ+xdPf/bkgy9//fPvPX/34+8/ubneH8a0ZuAMQMgMOUtgYAYQikRY/JiAkmmRPm4CvVlWw95RUeib5frb/D5kBAbOoYDO8NaHvGQx7jCEL4hNwIkwT11+uZcwo78AACYaqCz8iw0wmCi51dE9UeHCvm2cFEuiMhw2QsBCVZ4UXzE1QvLNfuy96qc3yWDAeM9xMzVwCeTLGRlabJWNN8EZGD6bbvnSBm8AmoU3yQGDpNHJDACzvmwKOo43yBsLs0GgtwaPCgE/cb8KNnF05qg6qsWJYnR57A09AwC6JjrgWKZ5owcGjlGjJyqpLkIiwqkGbDZOjBGAiYhgFpyPcXVzczMkDt2q61aqSAWK5vgtN8/dKMurQeYrpOsrpgkAvqmbR8uSFGFuaTSr1kgkRwkCAFgn07CVW1bKwhcXF5988smjR4/Ozs5ubm6YWVUNsRPqsHn1VKPbgG0irkGaL3nwjGls2CBqkirlzvLl6enpW2+99f777z99+tQkMxGlW+iqAW82ctUZlmL2yHIoKrSLP7DmYeoh2lAFuEg9x5LFNHM/OMxbjC6B9KibRF/ND/daiukGFYfMtU/Jfr9frw59v9YzRZexZZOWziacqYsq3xrSIiJxgtfPy7iSiEhNjmE0cW2O+K7rGGEcx2EYVMvWyA9M2+uUuoWT83PSc/R1jSPMC0lL6MCZTj8ZhAXDc4EpgjkNUIhzOhusUgg0mw4AaLKiro/Fb2OMZyfngCOPfH11RbAex6ELcLI+2W6nA7H9vqbndoQQNAeZLFsbJnmrHgBFkba+VJtZQ6wwT5OprxDH4ygimuUIdcdsNCVbUIsE2lAyc+xOJhagEOIwDNaCa8iJqpWlcTMA0DPZm31N21lryWu1YRiA1uv1fr8fExPFvu9zliQQcs6JM3HNGhYAGscBADabVd+vtbUPAA3D4DJ6dHMsLX+6SHp4vdusi/mq4IUQAkWtjVwyJsw3I1hsZOM4dl232Zx0XZfSQETWmdlzlhfvXhv0VGHr7umQKyVYfiwiFgaf61H6IfpFbWbCx0IWdpE7jqOx9ExCeVFiDzbQq1pGtSsUzhUUP5oXKDaaAYw1g9wgKR6HesFCmPqRvYLuoaW5AufpwE/WBve6jq6EjuaLRKcxWfSQSj9ZE2Q4v3QWypNcXfJQtBZ3nEBdrOppI2ApaoT+DwQQ0jiSKygSEQFhmTROQ7VfL5mr7zBXQBFRBZNZ2lxbj9teQiXyiVSa50oltoQiESJSyIFhvB5eDs/lSUj8a9/62u9+67e6EN95953v/+1f/Orbn+/vnr087P7iz/708vrwe//FP/72t77xg49+8s7Tx1/52uc+/uETlRdGbswMuWR9FDyzQOYECXjottTUherCTR5Kx2+3/SsiboOf2ngsmdmGyrVnrOcgIvL+FNsJZJECaiQnMuMF/4qj11RbUlUlfalGfhw3VQce52bw+n1b1uVfigvB5zcJc3EBACIY0fr7TfLYdqUUHmJ7Jy6sEXtQf2eRlDOJRJejDrm0syyiJ5ZciWzdX+b0b4xvb5eiYbQquI5j3Vm96QtzIxudUG14bVpinjV0NVJpHHngSNFjHp07TFwzW6O6OK+ZNKpTekYnqPWeGHqDU6WHF4DinFB+Oiaagjsb139wAM82ES8Y5fY0OX+n/mtbuDFRgafaKp5JpOTqFH+mZ1iK8ZDSMIwnJ30I8Wa3x8whBFzUEpsR9YrLL25FzvGHFNTlfMvhCm6QBoyjVyM3/IfF5+rvKHDMEAtQO2YTMvPNfvf06dMHDx7UIhy2880reM1OPb3qdgDmr5vznRGkpR830oCIatktWyq8CGs31O985zuHw2G1WuWch8PYdV06EutqQfLfeHiOitl2LghAiKwHpCDUVmZg+6kACyCX4lx9uhUCLiXbfwkA7Hq6EBHUFEqeRzlMibSmMsMweGiJSE2Fvl/XoMHMMYRO54HJoakG2jQdW2+YS2MPPLsrxKKEQGY1VNQkUPBijF2xFoWZx3EEDIioyNBvRBCxlLRpsEXbNpqI8xLYNJNcT36PMdoRREU6TXMBYRaualGVrzYTqCfCi4gZCZrJqbeYkDQYIsa+j3rEgjnpiGKM/WF3eHZ5vV6t+riK2Meu67vQ99cxxmEYVLMyu2W1Wuly5Kw7QkU1BtvImKfjH4oRVW3CXNsaKR/VuF8292VRmCGbiLYqUNtDDas4L4kyYjBc+hiGEtF+v9cUUGZOqRQ0ahK14o2qL9UuXe6bm5v9fm/rKECZYUx8GBIzn5+fr1aby5vr9XodKGpeaBdXiMQMzCO5DLvD4eBHJiJNItWpxRj7vo9Ujrgw+pGq3RkeiMha8lLUdPrS3FVEhKd6Kxa1qN3myLhe9ycnJ5vNRiQr3ZbDYJxvxUjXS1ePfJMY7Jp06gjDMICl/TNTCF2MhFiM6iptgpbQg+szu1RhbVDTX7PzuNiRd5445hsANG3rsSY1+U0anAmB8yaZinc7FMvjwm6uxt6U2Kmn0+jRwWJHaTl1aiZDKx7NE2BhcQBQe1VfpPO1THGY70Y85Y6DFwdSK5hNMppMRBYIk73ql7bZkEytUXFj2NDPSs2KVeVbJ8HB7gRnuRmJZ1ddjYhEYRxHdYSrrISaeu63E6glN7boRgU6Ow3HOzvHzkXBlJIgYkQAZOYAmqOCOfP1y6tuvYoSA8eYZHwxPj88/hGn3eWzX/3mFz588k7Ku2/+xrfx/PTf/vG/+eEPf/APfvv3ujUdDtf5cH398tOH904//6uvvfvR0zSMwzBQjBnGNDIMIw643w0A2MUVKNkyAFFx+detrgroCSH+JyIahsOS4NmFwT3xMzNRaerVrL6UA20Nn8oFk6OX3BEU+jpbdBvKxgdnmesNuZ6WKc4M0OKZQk5SjuNDhMM4OL5APxecewSm7+cqS/NhSb0epUZ4Oi0TfCZttEBC7zcLzbZV3ch1o+WazlRb5Cm/cwauDpRBBYg3DKRmOYbF6aChHkBs0Hrpp6jwhD25QtwldvZgdaVPjwdUtY+Zk65LoNh1wzAQUW1XDSIC4nqLVb3fOM6XBHj8+43ZgBGXJZLd4YchBI3V25f1lGRu5Vv9r03c5LZfbs8OVc5PkEDNbiAi57KZGXIw30T8XNjlqjUEaSRqH9ilvBrfjeNIsYNjV0qpps/N5OT+5hBDf9iPmy0ChnI+NU0l+x4ePJbQ6G9ooIWqYNp0bBY5Z0N6c38zvoksqR5JG4SIgKedznjQELuwxmecLlD+0e9VdKSUAhEjxBhfvnz5t3/7t3/0R3/0q7/6qz/5yU9Wq7WeYDxy3b6b9XL2XkNCflIezmahbTRfIQZzmkHdrMexW60poGQmoq985Sur1erx48ciMgzDOI5Vh2u1qyVt+C+9PuPXZbkQBljOjIgkMIqKERpTGlM6OTkRwZvDPoaIiEkTWhY6QJVjyXOH8e8wZvL+67kVlOvx6/v93nJBlY808pZzCcHpU8MwdN0QQrfZbA77Qa0yZqkiVA/9m9poqxUkzqG52+/tYAPjcfs159z3/c3NlYK3Xq8RBWoR4/765nA4BCz6XgghdDHGKISihg0ip9x1XaQggpwyJwEQDUt2Xb/uV6u+jyFQJS1y4a/gDqBer9em1ZhU9zkjtoLMnHORn+x890SkUb7KIMGkDSKuViu1r/REAUVa13VdCKvVar/fX11dXV1drlb9ZrMehtTHHqHnvH/8wbP9dbp7doYk42G4uMYQwqrfVNMXETGECFD2QWYuTltIAEBVAcuZU5pK/vb7vQqHNDJXfzdPOWWQEqdUNDd167Akrla6yUMRJsLadRPMoz2Og1q+mrdrKoeSkMNMSTnWzMJxHHMuMasi9LTJImHmPNS81mrY7A+HQ+ZMMUguwN/c3Hz66ac3NzeI2PfrELr9OCAiSKHArluBa8KJAuv12vqL6qvnAcDpfDsR6fuVzTHXLkcAKAKeEbCqZIdhH0LAQClnAN0EQ86567pxHLMwEgpD4owJQoybzWaz2Tx69Ojjjx+nNIxDypxFJsq0HVZXnuuJbnnegcLiajYLJX7TeTQ2qHNRSzi75kBoDmLbcZsPoZ7VPgdovpPVR8BZ814uw3xjtvE9K+o0LHpulGdLwjy1aW4QpDxpqrDNqj47xdzE7bVegpv8tUGMeux1XgtBZ2Eu9wNc1EQVQswz7WT6jJBcUE4WupEtgQe1cuZsD1Pt0+O5gjHhHJ2i4HdQe13ZVKoFbggx2PyfHre2IlLlkdrkugZGPCGEDExGJDQxYZ0O8shZGDMRIwq8fLzf9hc/l8N1vjp5sN3n9Fc/+vEXfuWrj778xbe++OXPffbNkYef/fTddU9vvf76zbj/X3z9qzfj37zzzjtfe+2ku9PnzBnSNqxIiFOSlJkyScDqg5e6Us1q8txhYZ89gTV48wsnczXLqzhQDXLjQ3+nSVJ7UEnIarQaHsnVzvFv90zq4RcXoAPHYjLXt/zVgHf0+1/msok3D6ofh+bmIh6LQB69bNWWC6F/epZZAm/TnyTpPOHeVJnb4DdfFTpC8rDRPBeOraundkFWJztLF2pKZwmlaI9kyjDzNZi94afj3ztjRie9c60C8oKIiPKYvHhsRJBJPDB3Bk9GIDnHZMMmMEmM43RiCDd8lqdYjn7v1675ZimE0e0d/h7jLFErzFFCSkkkIyKGQgD6744PzDzm0spCk5Eig3ZPhdsPHvwlL5xPxuAhIryFw0zy3zamX010X3pxjc568YuuBmQ53AKUQEtEhZmFGVGKtg2CiKqNPXr0SH3bYd6s+zbg/Z9GTh5CDzNUvmt+WjIjlON5y2dmFgAUWK83n/vc51JKjx8/3u126+2m6zoQHIYBqIO52Jxho17LXaCZxRLmejdA3RlBgFErpqYtjxbY8kPB/4+3P22SJTkSA0FVNXP3iMjMd9SrKlQVUCh0N4A+eU7LkNPkCHdk/8F+oOz+wv02sitcWdkRjsgeM5wmuWyezWY3Gmig7rvekUeEu5up7gc1U1c3j8wqdFPWpfAQGeFubqamqqa3rmMBmjeavue/129KwFvXxdrWr9C7ywb0l9qUq7SzJLQbx/Dw4cUAuozg325TKqKUW4tU5xUXAzqllNI8m5RlRSCpJjiphXouUY6dCKZUYBLWdVNtmQaWbWi9LYdqoEqjBC7zl3ajN4eImEJIVOs0xkhEFxcXGoKuCqHOLcYYSfq+Y87quwohABBneHl7C0ABD+Pp7uMPv/osPO9jCCHA/lEIWoMki8g8p21YClHUphR6g1TVxUdXKhxM8DYpNAQkAvWsohODj6dbjRE1WVeXrHM2ad9LkrkmGVoUEiJqiR8EQBHmBJWTUACh4GFuCONRyHg4Vzmn2Yib492YZiEExJnzaZ4AQBVkIuq6QVFImFEgUMdcSsiYtUJdAp6ObLNCCH23AwDOnPLsK/ceDoeui16TVIkXXIpHtfKz/irVbE3UE1GM1HWdmn1Vd53nzJxZGEDmOVNt91KvgIjIylFAJ0sCkFlEAiBBieuwX5FXtV0MpFJbFYRa7MP2cSmJ1hzt/lhtKBzW15ZmPLLmda6UZxxYrch2MzMjCaBOEXNmDXGXnIc46OEkIJyzgBAFCgA19qfZUX2VBwE8eKETtb0YYRK5n60N6BdlN8Oaceg56h1xjXpwdnpmwmHngjD25x/xrNZvQYX5ctx6yPuZeyXWVqqv9nq73WPr3e6+vWuBwzrWWTWAZQTELDlzyYZn5nEcRRAZA3TCctEBn6YXX82v8vHd8F687H75wed/8f4Hf/C7P719/vL9zz8kgavL3T/5h//w/c8+/lf/33/96OLw0x//1kdffZ7mESemHucpHVN+FB6lufBElICIgQJI5k0Olc6/OR5s7Wa58CD1OIBrSQI2pOQ3yGAlToL3BGhfGg5gzU2txk5uXt3MYbuucgOuMBD+K124Ftq2X/pLRAAWDcerwf5ZWBNU84095cEL9Syhe1zuyu63Y3JORmjZVR9l5/nxELalmTnGdm071S0C2E+hFjloqbiqDA3Jn12UPhVcnrMHtX3pX20WR3+z8jFDdUQUM3jJasleJzy7O/ehljcB+NnyepnN/Lek6t/bQJWd99LTjg3pozXF4pp48TbbTya/CmmftorPsoLbt17bfbnvavjI2SXDuRvab6T9pmFoDSJJVSMFgRB9XVAR0U4dAqVSixB8+OGHz58/f++993a73e3dkYhSSlmqwr8OiNX4TYODR2yP+Q3L8vP3j2yB6emunJUCIHB5efnee+/d3t5+8sknUAUgzksk25ZYzmI1bFiZgdT4tkdREaEQ1EjqVgRn1ysivqeRfekJBNdqvIhoCOUWSrhuQnjWLmb3G3pzaV6/FKDesizbLwAAYIBV1FUIQSsuKcWVWC1c+LkpJ3UoTImnMeWcEUKgbs4zZ97t+q7fDf0gIjmlaUyZBTEgILt4JRNUTAHW+E+pjoTMK6bnPyhYYu1E7/m20Y5+cBrjKikJhGJXyo3ESDaBEMLFxYV6CBsnUqAUY4gTdV1QdR0AmQWp2/UD7rGLh7vbG04ZMIbYn+YZQMMdS3mYnARBZsiIiKSz5bqDeZ6Lyq1OV1MIC/e20P2IXb+qs62TQcSU0pxG1SctfiqEoG8xjLIN5RqkY95IQxhmxlgTwQip3pzyJLN0XdeF5ewm0lbvxreXyg4G8MYym3O2IjFElNKS46Z1iUzbt9nGGDUbwkKl1FteaXNFO6rvFUrj8kFJiQJ1Xd/3nVkfSkgt6rmpOjMionAR2Pq+74cYY1RNsuDRqaRuMcM0TYjCki0fFRdfQqFfNdzYikxhCS6cuzkLGghUQ8xyvxdaoqcTr6jo94vhvNK8NxgYdXkq8oxjy0f8OqmGh4mzdYVNY2gbZ3OiK86Jv9O4FREBrBi0P0vsFbQO8GhW4V9kS2iWbMzd5LCG5xKRCPgNWGiGBQI1r7aJ2av99z781b9FSdce8XvU7JdtqMGkUVD9Zfc3ki44JGkmCW6zoEoYyyaqGUPq0giZBUVybX2WEkMVNJl5t+9ub0bOgTn91Z9/9MZ73yMc/ud//r/+h3/z799549l7b7/5mz/84TAM0zT97o9+upPu//ov/tenjy9GeC2PUy/hsN+NkGKkLsWcUiCSLKjFqpFMnvMIbGdA86V+6Lq48IgNyuHmuLXv7U4Dl41sbBocp0OnIEEtjozO6ODf7t9r33hygzXm+73WMfM9Hjmb8BaXGoSpb7lX/WuG3c7qLE76wXETqm3f40Z2adbbjGYjsPO5iWtP2hCRfdOMaYbGs0CGDfD9hP1PFhl79vH7Jn/fbWdn22ycD5cqIvKSjx7Mk1AGwYLhGg/WwB/WeOLf3lS7tjs9G1/jwHllz+73FGTjbDmeJwoPbRdRWfoQGr7lDMycZInNyzlnEEbIIEk4u6Lzbp++k04oGx2jLISWHqB+yVSrrzTr9R/OvqghhO0Ixq8aJK+/1g2C1Vts1Sjsn3r58uXXX3/97rvvHg6HFy9f7Xa7B+a2Xf7DF25ECzkHK/86IlIbUwgBWBCRc766urq8vPzlL3/58uVLLeg3zzNhCCGkvDruG5557rPYmQbOA6Cgs0liUQIrwAlVkUbEDELrYMWihMvyroZRy7qti7H9MiHH7e2z1JRdb8KWKi7zEkxUsFeVAZWPY1RlSWphghVYVm93Gr7eoAqhuFoGzBxjK61SDQpTL9Y4T8AlzF4nHKt+MnNOKU1pLswZAtSOaz4RuOuGvt9pAiRzUs025ywYYU0UCyav48n9Rpc/ASuUVtKsAZYw9kPxZMZafUR/2u12AKsCFoVTcZLMLKWmiyBlkcRAGDMDimDoLi4fU5Vd83QTAuvLhYGzOgkgJcZa15RLK5M556xVZLgWazUNzXxBiGiuY0QMVHKONKQQAFKe1J3lzSUhlD4Wdj4aG1HVEXxYpg/EWDCZ2QVeVnQlRNStDxFDCEKUi4abDEsDqPLfQSBICdWPLcy8KI0K5rJBgXLSUkllkHp8pP3+YrfbEZHpgeqO9t4/dMYUn/EB0Ft7jz52fd/3fa/wgHpeJJ4VYABA2pGhlvnZ7/e7fR9jjDH0fS+SU0pCEzNbCGsIyMIihjlLjQl/3plkaKzYR2gWrrLmnJ6b8TrksCK5oLadOMsK7cUNFz7L69El/nmxmGuwsmciRiH2vRd/Pb8z9LI12CNcA2Rl49a3V7t4mWWezWIda1tA6aGpdIJFV3G5NxuB1RiujS8mX4blNntEqYUcQ9+CmlxIT2N98aPhRmByS2vVaVmH7G6Dde0VtndK6h43/AFgk2lQ6OxVf1ohqNRMF5+QREIIkDVhPAfk4fnnd8LPL4erj64/m964+P2f/vinv/vTD37xy7/4z392dz3+k3/yP/zkhz99+2f/5aMvvnh8dfj69OoC9nmex9Nd318EwGmaEIJWT+cMIYJIKbdj8Pe70yzn7K94Tnvxl6yzgbe77E8awzRPL/prcq1gg0siPTsrrkWlGrD7mWhDP6zREVot7b/i1VClx1W7wdOj1OQfrCFP4OjIw81vlv/mLB9ovrenTA/0aM/MXQye7VjWjecJfsBm+8wr5XOc/G7SOkivIS67HOBaeRerYrNdLLriWDZbe7CBPyh/Xmu/dSZtRwrDK7VJN0ze41WzpzGugm8962gGbz57+vIwbG5odrbhPLR2jMgqx5tLkpz3TzInWQppcJFLwORLuw2EzlXhpQeCSOU+Pn/uTr8uDwqB88G9frtXNLVUVV3xHNspQ4z6wUhJ1cKldmJ5SoFWObiIfP311z/+8Y+fPXv20cef1APFjAvN4hBhWY7frIY/+DU+cDVsAWqimojEgGUJzI8fP97tdu+///7t7Wm/36meg6DCyXl5pkEkN70WyGeXYOBlBBFAUZ1w2SbfWWE9QnvoKNibSD9Hbuchg1WobahYUw9MUSkSOYACapomotj3u77vQVD9ezmLZU2jM/XamCvG6OJcTENwOrP43Cep+Zw5ZxUBAgUtl2LeqmmedBpFWXXSF68ztO0pcVGpEFaHCFSpzG+xcSdaN2oHMG5m6118R13XBer2h4FIVcFgkQWI2Pe9BvDZfpXszcQiwjWBWsXUnLMQzXOCzCI5UhDCPM/TNDGKHimc1bkq1VuoqwAAzllSShpNqg0GuVaOtcPFb59FEfsllyjTNPrkgqr+AdYkF6nHtF255p/bQWCkwVz4B6IgRiJiBIFcpfeUcznlgwBg9Nua8yzOrIA1u9szOmbOwnNOWThoCUkJCCA5A5QcwpSmGLVSC+ectR2LOM+KsXp7neJSIZDc6E6dKq6XhwurQWB0QEQR1S2kAZkxhBCo2AiGYeh6teljjDHnmZmFSER2w0EFNqIgzIhRZDEQGx2JSAxtc9EzNFgnBADZ5Ys2POqsChPtPhOD/KANZxSRGHtZXcvo4vS9yqaZ6IxMpqTuyQ/dZfd7/aeZp1TXolo4/BHr54wbBYbWBjbPFGAdPAlOK0MnpjfjoNPOwXFwD3eU4qH2M0REwmI1hM3lYSJOavQKmP/Xkt2bq6b/LovijW3Yv9TKgqMz85BLarc1bkcTEetaYQoqNC9iQClMjpmzpnkQYcgiAIQUg4igCj4UZDdc9Y+nmSnsRaZXn959vXvxw998IyQaqMvT/LO/+PMX37z8yW/89nvv/Mbzr17N0wkkP3vy+OtPXt68fHkRdiQwnU69DKfbO7Ao3JwDsTBjXAX62kJsvVRTTPXXeZ5sRxrMv28fwckN23tsI7YkIJu8RP+i+udqfxvW4OmumYk4yWMrmjTT264Iz8mgiAiwFnY3xgJx8rdUgcaoj2sorF/C5hVrkXcjH9vMPZF6LiEu/83OAPvJzgZy8RGeq3pQNKAzUvUK7RaADRDYFd3x8pZISSX04+hebxXChul5mODas7pCOTjD+bWMhB8Qa3shK1dtiKqSh+WcG1T93m03FJFgvV9bXIW1xO+Jwr5ZeOm69h2sacpuDkt59GJ/ghrV3yCAA6AAFHt8gaSQU+N+vajR7Uq3KHIfWM5+2eD8A09BuwVlB73aHAAzAgCQgCAE8CtV4x2gcxkx80cfffRHf/RHb7/99n/60//ctMPaXp5a/eVJ9eyKzo4Da35iUwIACIGZ1c6vKY4ffvhhzpBzxkAxRk3e375uS27bVzfzlxof1DyFqDnqldw0axhEXC03xKXTIwH6INuGRUBlXN6eDufOF7SMtRoYbwRC1T3YkEZx2gjNNZ2vBgouRUG3u+bpVC+vAVrUQymNLiqgLw0Pcs76OmZGF5/Zdd1ut9P+6WYJtbdAjWnnGu9n4rtxJLPipWr4tmf9WeABC472653LSmt4TtTik8Mw9H0fQ7/b9xXUXu4tur2hNC3V3cyYi7p8Zp7nsgoIICxJZsggkiEISSSMAgKQqrGA6iBYLTMEov9JThYrO9tphdXGqrDVydO6TBcA2HyoOtCIyDfZM4WN1xWDxOWoNx6dEJe4uYLwS6dcrmUpJfMCl4YD2ypCbG1JzCySmZOIlcrPIEEAYliSa5iThlwiYoy9lEp5wXp46wcRQAwhdESRKGqkrjfPI2LXdX039ENUV7BhEVEp30hRj1o1WwQiiqF3pgpFWqWIlFJKp4k5KZ2yKxCa0iJXe7yiasHxp7bFV9o8F74hq4Z2ZgXw+ogn5LaojB+rofayqZtqSw1LBcdVbQxDCL/l/suGrcC5a/vGnDNI9vDytO3n4x9sWICnmQbhFANsWOPgth/i2m8Y32xeKiK47m0lXlBer7HhVlsIeLW+2Sx/2dJCrS4oVfTRR8zLZDFUNpT96XUenztn84SqEPoprcm1YCGaDSYtVS4AICsaxAAgnDMKxj6IYJ6TCIfQSQcZA4ZhOkKAHR7nT37+6dUu8gk//eCTv/27v/WP/uF/l2d5evHG7avxr3724b7vLg97oHB1cXh+fPmELp48e3b7/Hq+nU6nE6IWzQ8iMwAAnXfH2UrxnDTgd8dvpT3lScljpr8HNpfHnOBKOtk3hmN+s8CFvTWo4qnM3W9Y9C1C53/dq8F/+x5xBTH7yejrLCE3SO4p199s0o+NaR/IBV1UY3mBmb3C7m/EEb+iLd8Irsj4dvLN5Sdgc0N3mUK4RdSzl+cPUs1VzQHgkRNqzkZjdxRnqgshQKVoqDvl8aphrbAmk7Mzb1bhfm15kd9iv+kNGLcjewq19doMxbnmEDEvxeUWidDbifwgefG8VWhbMiE+VGbmLEu5786zDSkak4RdZ4eVNa/e3u9Jr4h9DGjOU70Ny3vrK8r/qYQ1jqePP/6Ymd977739fm+BZ3Zz89oq1LYWnPs4mJdmmvnbzO1+W6kuJzET0W63/9GPfnRzc/P+++8PAxKVbkuaUh5oqeTsp/EAlZ2dDK6lNH+JgHj9UIBxZaT2DITvOSxoHVR1FuH9QuoqQ/NT4XGVu1YMLzYsrJFQKSWrziCLTihb9Cu/SpEJuS4/13rCIiJ5CceQWrXOFBKu3rkYYxdLFRyN4rO4R6gmA5Qi+JkyozdrgwHVZKx3BSJOtfoiOttHqIXuvZkMHE4WtrzkZ5IqAF03qDaojqZAnfp8dJ89CuWcq+pUGiqoEkUsIpJT8enROIpI4jzOXArIc845I0oAxCDEkYiYAVE1LgRY5EMRUP03pSWEoexILtEEuq1a20Ydm8MwWFc6gKURhboHyQJiiUIgSxpU5hlqdr1BuOHPPvG+4k9mXrEgKTaCVPcCsMZAVVvdgvBlbvOsSY+GLbaJABDCYtYHZIQgoFVWir4EkHX1IjKOo5ZgnKYJEbWrR4PV+mtKiVzRR4WJ7unxeDT6amyRiAg11zTnnNMYQhjHcZqmlCcRgeI1ESKiSRDFhkJE7f/ZGGQNIHlaolSMvho24lG6dnFa6dKsHVzOmcOW0oXN6xvGYWxO3IWurJMmdy50i6gb2fcl54RcjoqpwugK+hWyd9GVfiVeGTPhWFygs9aIA+dn8GuxNcpGLosr8QABAABJREFUvPAvMiibiEaucbmxFV4CVgVc0wKpIjitzfy5FeIr2knJIfRf+mPA/7umsUVe0c+2wQ1CRCL2kpzuWlU5ipnHAA5gliExH2wI1qPC44af1SIxVwTUWLucs7gV5ZxRADJLtZIyMGAgC/UhdXUip3KcxF189fL49PL1+TgF6N58+vbXzz/68sMvnjx660///X/43utXP/npb/HMn332BU7Dj3/zxz/4o+/9L//qj//jz//yjWdPb7+8PR6Psefj7d0+UUqJcECsUjYiIiWnxuPa0b1skzvm1UCwRKTUX60tpCdLQzlxItcWA/2fRlNYvUB6j3LwdQHoe+UST6fNPbLIK7U3zjnTb4ONW9T135+9bJnNnc27tNPGfUDzDG6rZTUA9NRhVNxMoFGzm59SSoHQkznUs+e+3N1zATbtJGFNmLZHxmeMkL2hzXMhG4pd8Or2QmftMjbl8cdfelsXljYbuJQdg2maFMOrcFCMETEsNkR7RNYhvjZtEbEcb3AHjYikVIyaDUVoxsV92AJrjGqAvL3T07VOyR2EgrLaDj3FGRcMMaGzSq5JQI8DbcS80Qbvv7aEUJYMC/v1UJKqEJ75/tzlkXlLSn4Q22vPcwwIqs+KU/3saKufyeBGiHPOr169QsR33nnn8vLy1atXKkn6nPYzq1479rc3wJq6t6A7u1i9jHAULS8uLt5+++1PP/30k08+6fseMXCNkWsiKitjNIH+/OQfWBc45oPuKhP2R7/P/pB2kAZK4OgXXazXlhU4ZF6BiF2Mg12OFZRQui52qGVF5lkTXWz3eZ1M6OYrxm1ERIun258lTYsXHIPqA/TzEZEQQ9/3fVUIcy2LMo5jztnkii4sIY7GnUwhDCGkNHl1ZU5i/pmCtGvrdrOiiEukhmVwCeOu67uuG4b9brfb7XY1By8ILcv3O86stfN8RVYAgDzNjFpJlVNKISUAENCehCDCWZLWrWLMIkLccfUpqdYnIlmLrKqunTmleZqmaRpTbVKtohcAhFhYtOp1qhCqgqe+2b7fnU6nUsY1lxIyfd9XIJR2fKZp62LtMznHPmxoVt9i0Y+60RYkrA0n+r5XLJuVcBYQLm20ipJJnSlgVO0dLvcPtCUjIhIuQVtSo1qIKISdiW0AMM+zDWKHvl5LlmOaQ1iCgXPO0zSxJAJUkEL1VxORCGNQc7OeqpBSykmUpu7u7sbpaHQQI/V9/2R3RbSUuPPUZKTnubT/bJjc8AovhEQuKK1JWVT/JVn0WHDHR0QIbGkCOqJoQiuEWhZWO5qDQJpZMJnGpXsmJb5OsBAtqKZj8aJeb/FsS5wUZfva7I3ebxI5VKFn2dcKC+Mv6I5/CzE3Lp+TaFwvYcEzkYK8CGj2QhAQLhUyEYgARYSTIAIRhaIrljpmeWatDk9Q+/5lBms7DqXOkM3fliaIyPryhUNVTloYQQiBSO1tDFUA9WAsjMw1wibv2WdhliXPm4IAZAHRLwGxim6o6DWX/fUHT9FDahYEKz5gkZWZOXEmohCLop5dgqLqoogYiBAgScI+UpU1+9grl0TEIWiQehbm2AFiIGI48uOhz9OrvgeA05RuDpfd7c34i//yBY/f/3/9P/7jX/ynD//uH/7e93/w+pvfOxyPX77x8of/xz/6p/nl//nffvgfHh0CwzTe0D48zt9IOHZhlAMG4Tns9y+PN/3hCo8nEQlEpnEpT7TOdaURHACIAMhYHapEwcLe0OX4hbBobugCdQyNpaphRszgWiTNOQlCFhaRLCwISISIiTMzCwjqiai7gJDTUr0W1uIjOkXd/MCEyDmjclXErAtk6OOQUsqcgaCEeUPayLliQrCtBSEsXIm1awBibdAki0WjbSyx/lM/EzOZJV3HsTtFQGSFlsbZc21L49duq9bvbUNNTfJ2YkdKyCzVj6EGJtHk9QrRlSQ6aRm9EDpNeqnG7Mo5a9nGtaBmTN/YvR1ODZ5oDgMBdiFmyCKijSLA2adyzmw1JKCEJACi1nIAADWvhBCUpSuFFk8YLSeNfsg5j2PSHey6yuFnAQiEAXFpbeoABYjADESlc6lV2U5pNuT3a98ibZlqXsnK/oBsMNw22l8eWZtvdExS6yyAJRCW3UFkxswgFIBo1sLuFELOjJSzaBARiOyG4e7uDn2/AOcVNFVKoEZlIwAi58IGwRsIEGKIeE6tbRdTfw1O1vEHv7Ae29XGDKX4iXEhgyQ4o61XLfRDQqcImcG93kOIWoZEBBAJkC4urz748KMPP/r4vR/9xv6wG6cTMx/Hk9kQAUAERQtXQhESABryJ2YlHL9fANCGOtuH5mR3c5Y+Dpwmnnm/253uTu/93h+8/ebb//yf/3NJEkMc09x1/TRO/bCfpgk77yNq7Rc2vOktRFF31G0LQ2k/j1oNwh5jBuFJ03Qll7FFSMuNBMA4dAEhjaNo0DWOIXRe7NGhSv0P10ZPagAbcM28cAKTAEQK+2GHAoSx7/qcM2FEIM4MQoQxFywq1SP1CCOMXUcAPM9jjH2I4erq6vnz5zqfnAuP0m6T+j5NuSKJajLSvq8CKJmFGYk45XEcKRQOwwwAEbGbJzym+e502/d9jFEIOAjuIyJNaU6n8Xg63s0jL55KIESRWeccAnZdl1LKeQ5h9/jpo5wk5ZyyTHOeEyNizikzCgNh6PsOlc6B6n+iGpgKdQAgLBPnkhsGMOeMiKpqHi4OwzDs9/vSaQCZMRFxhH3mnFJiAUSIEWOMkTAJpyR5nlFEJHGamBMyJ4k55XniNOYck8SkaJcxMSJYFaKKA/2AFj0okjVwVSBVPwqnNB2PR1XqRIQCq+xBgWrMZ6j5aWTtJczJkfKEJMOu2+17xEuoNj5XOwOmacaifsRxHFV5BEARnqZJVvYmO/FK8RuUDCIxxogQQLo+dLh7OY0MiTRWmmfBoDJ4Eg6ddFFF30xELCwg3a7LGeY8J05EBBlijAw8zqNwkszzOGm7URTmDGEILAlpyJI5cbFPET693F3s9qfTKQACQB8iEUUkRhKWvu+6ECXlWcZQWxxnhCiAmaECkMapiNm1FWENJ6YQQqwpWkAIAFxSjvn6+lobFym3R4Gu62Kchic9EfVDt9tfhNgLEqBKjNHS0Y2fkABUWUv/tW1qbMcLd6WQdS8AQwhAAWsDakOzQHp8Z2aO/vmt5rZ9Aa7bD7DzfW0vcdqgHWP++G9+glrtCtcKYTMlA4HUXBS7V2p4A619qfYiLwJ6S7afQyNz2L/iVPPmoHL/rjLK/MheZrWhuDobTXNo3u7djwCQ8+wB0tzsD0h/3vs7G9j6ebI/D9cAzzkTLrFw5iKjjROmGZZdWSTYhLA2E7YlO4mQbXFQRBkKEG5v7z744IPXXr8YT9PTJ8/eeON7//7f/5svvvjs7e75j//uT/7O3/5bH9x+KEy3+U4yzOP8zVev0jT3dIgUGUADPAQyEIqAEDKCAJTWiISMwAiiv4JBqOCPz1KgGr3g0czg3wgudo8PAd1Cb7tlRmjb/bG98+N4pcLjFTVy5AaR7pvMvXNzlRIeYB3NU4Yz+Z6+Jts7/fK3FOoNLvY9rvuC4kaZ9CxCqqLu7/dLM5r1b4+1/6cHqfFrP5Nyj7ShYnqppGXsqFmXH81gYnMDh2CNKadZ11Yx8LvjF8ubOjr6Cssh9C9Fp096xqj8gRZvIbvBW/5cP+P2S//Gb8Wxhy/P8M8yruZiZlWIlMzVtv0dacRfW4y18XUPGmgQ0dmQ0e8yfvN9QxewRuwtxfnJoBM+YB2walh0c3Pz8ccf/72/9/feeOONTz75JIQwDMMqzA9JFUIAsECeZtrbSeoVwsYqBSDrKtzN/USkQVj6f8+ePZvn+ZNPPpmmab/fO2sAxBjzua1p9uJhzobrsKmzs71vd/wjIoJrvuTJChzS2skLsOTBNnzAM1WbJ5qReu00hvWO+wOCkGKMp9MppUlBp2efrx4MTu7ylEW1/lnXdSnP9qVhlPEHdbbsdru+7yEzAKjiUc5HAfcISC3RqbPtuu7i4qLrunk6HeslNWbH26aDK/TlmSFUOzszW8URnY82MVcXmbnXPNxUI+IadzmOrFpB5llEtGU8ICNiAATA0+nEzKq8SY29nOdZ23giQnM6T9OkInHjCdAA0WmaTqeThiMq8GOhgmAlTy0EN9S8TajHgVbRlBrEaxATkWEYzDCnk5mmqaT5uHI1/iiR9VWmCkvdUazmJI/SdsCdlcZN0jZxy3ZKA5t1jToZwwoTqk0wU6bUhSg13dfGyTWb11iKf0XGKlpUf5WNr13+LJnwrEKYK9XO86gYojF6XYjqiUVEgRKe2kDGGVtN1oLGS2/3+3PZ/2TlABaKg+3jC8BXIROwTo0zwvZcA+tl0PTTss8eLfxndBdsWO2Wb26PLr2oZgdpD0q9V1GqGcRYSZmA0wz1Msr0nBQcp/MkCmsG6sFSp7GwcgMmOnlX1mcGogAwFKkaDULKMsBUlwcBpZhqPM6Dy2/i9vLfe1K2eS4HwzpRs4GD/5NrVJ6ftmGOOugtIKfZMjl3/tWfan0XJIE4p/Hzz59/8vFnrz27GuLwzdevfv6Xv+y6+M7vvfna00e7t/urP7345Qcf5B6Hfn/x6NEvvvgVJwxAzJKSVhxmyTlUiHkXBLqquX5dXANv/Fki7iy0yRt7arbbXhFqlmCzO83abW7NN3Y1Mr3hsKc4f4PZF+AewvTIcB/arIb16rLIuaJ96/vrxTUqj9x19o0GZ4BWebb1+v1qSNgPYsBpiNrUGFlHkzb8Z7ubWwvO9in/oZGZDHTeEOa/Z1dvxk/AwWRZsoiklCyOxb+F19V67Ck/oL2xIUO72eQJv6GeRcBaZ8ib/ORmIfbv8qcslOUnBr/mdXazpEoDftXbO8/+2XVd3/dqfQ/3F2F6YD5n/5TNGxfi/RtpvstbGrpuZuLxDTZrt3uaHcGq+c95zjn/8pe//KM/+qN333333/7bf6uSt+fwJUapSt5+PuAQDM5xCSst2EyvQT8/mulUSNj3/fe///27u7sPPvhANw5ztnM/hMDiPYTtudmsffuuB/60Zz1R10EWdWh98+K89W/EdfqA5yFGtzaOcYOtgUlH0+WHEDKvsnhkI6gwMxKp2KohDCKkJODjzA1u6BROOyh18DlN2ZW+gBp2pNhi2leMMeVpnrMphDY3G8qgag9eXFwI4zzPp9PpdDpZwUKbocVMGm7UjhoiTqa1M8iGvbi42O12dmj6U6Pg/5xsYilNzEwEHGLO2ZoEKlsVWqBRzCUYpQoPFsoewpIdx8xzOjKX3EhVL7EqANM0HY9HBVTdu4ItVk/IZuv7znv2bqoIrSNp1ftqgXu2ZHbxqLBmGg1dF7zKixEw1GboMUaBbDgAqzQ8yK6krT7bIL9JmFD7CXuqVyUUqESEElFKue97a+2ANSja0Klq4wsbNCSfpWqMFZ30+67rSIo9gqovJ4QQsC4kEABXD6FF9okmYHRdt9/vD4fD0A+ZkyKth6dxCc9YxHXxsAnrv56vGnUgYo28qKVoJPva2ogIsFigiKhVCMll+p1lviKLOXw7M7vs2eAKVcHm8kQLa8VmO77/BizJMpkEUxDaBau0vgsRbY95Xg8Bx8Eb/LA7V7z4PA0ssqY3sZylFqihnltQGIp7Noob2784dRdxtV5c5236kZuVOoS7VwaJMTIs++gQbsEW9gHfdap2wm2vRidvIOBhAgACLFIieAFgv7sAyD//+V9dXg3ffH39erj6u3/nv3333e8/o9e/+PrzT+4+e/3pa+F9QAg85bvT8XQ3RYjMMI4nnpn7ACiZ5yCd7ayH0tazZ1Yxs2h6O5lXSDxsyeWy+os3xlSodju/Qby20i1IViGzVQg9PdrjvO533yCJrB2MD6BBc3m29R3uXJ0f+q9XUP2YZ5cMa+OW/9BA2PZiI3KJvbeZiY12lo814/g3+S9p43OzNxZC9uGgG3ObnWoNOXudEJ3TRtzRbgjDNfXfAyHX3sGGlrLJhfZI0mih6FK+m63cEo5UcdBG3s5fu1d7EBWo+s/uhjOQX6NBczV7at+w85T6VZzFnzJhQETUyoen00mzrM8eZw9c981TN+8sf/gbju9X/fDlF7v96Sw3tklKSjHGjz76aJ7nH/zgB7vdDhEtYB6KebQlNI+0zVvs+2YyHu23k7ebmRlJqOpXl5eXP/jBD66vr58/f973PcZApchIra9OZ/bFiGuLRQ3E/E/bueG5s9V4ptHOdqXNfJpd8AA8extWh1hY5w3anUV45WDKiWK1n63G5giDHnZ1tnNKmYh8UYo1dbeeIpPv9YOqeVJPHNXUzJ3FzOM4qnzsGYuIABAAqfypjrW+7w+Hw+XlZd/3t7e3Spu4zhLUeFTTjtBppHZ2k6vIqsL6oV61OKTsdjuuiYvikhjTVHhmhTADUAnHd8xchMcsAFmzy1TKta008IqIyGIBYebMxTejaphB9XQ6qXtQC6KEmifJkuzw8hvEzLZAP5rundroTYMyDcf0UnHSnUUsGyP1eN7Qgp8GIobQhcDaC7RBEv1AsGR+yeaIMcjbZ8vxcQRVVLKcFk+muuNEZJqmvu+5phCLcwU1LzIkKd/X8fWKMWr5A7M+FFkWsJySgYi0dVpBfiLqumGIXdd1Qx8vLy8vLi6GEHLOp9OdLFY2gTWrcZPRkPWFUXhuSZtUO78LiMiSG5WyyN4uI2/pQ+hFARvUD1d2jpd0HZsEuVrGzcu8odr2rDkP/NRlcxJs+Z0Hhw5vgDMdoymraiNkzmHTrwY2Z4B5gXktcoGTcpozzF/6jXFYE8iaDYZ6WG6+VMELQDM7UfOaAHERrdhJluhqG3oI63v9/npQgMN7u4jO9tcCZs61kphnN35AIx6vNG5RCCp6mL1KFm64Ovl4iZcDEeFa8UlYjvM07PbH0/zhB5998skXb7311uOL+PKbmxevvv7VF+/DFf2jf/gPXqbrP/6TP9kN/fjqlE8csJMMKZUYLQJJPAtEA5pHA1MIae0aVfr3LgKv99rMpeZ0eTry18IBHRyYOXSroh0GGXQKwBbT/P3bGzxhbne8OTy+qy64vBrOLvDhy+43SNrZ3MzCcwaAll3Yn1vPz/bxZpIeGz1aeogZ3EL4Fk9FwxO82dK/0fDK/qVzvnfPW5qbZSM9ewCaW8DIUP9U+4UeSxaDZCfrdlF2LnisQ9ehxKjGbtjyt4Yi4P5rud897p+971D41qt5kJ1HGl2rNP+67Z5ytUbTugHPfQv51mn478/+cN/330poWyQHx9agLnA7fLOPftprGgSPGFp34JNPPvnqq6/efvvti4uLV69eCWI1Pei7QOB8sdDveG2Pm7OqFABQgBBiAGROAPDs2WuvvfH6z//iZ6fTKfTF9qcVZTMICxOcD+LwaNxMYMs/DUSekW7vbEYzgPtV5HuK2RDRdgT73GwQAKiKhU6qkSp2e3Hfn8vi5N1lPszapaDve20iL1KS9+x11YyFIsBcSoOaAMa1OYTebMYCROy6LkT02uA0TafTNI6jrQtL2siSDG/r6vv+yZMn+/1eBDVHC5ytP8Y+BIl98YzZ6sT5T2zhpG3iuu5wOAzDcDgc1DHI1RcqNbzTdMJSwWUuHjOsSri+CLV6MyxdYUtgFKa70/E0jV3XZWHMNOcE6hjIkFPinARBi1uKiNZ9tAlY5dXj8aixpsbQClQJdb2qZqsipCqQ8T2ufleu1m2ds+qW+qDUaoKqw6sjkTfehUbs2Z4CiKEx4xKFrutOp5PH/IXcNCGXARBKWRJBQko5SS7aYM4ZGHLO02ka747AHLvObA26xoBk9X6IqOSpitUNKVYAk//FdVTytL9I+46EvTXZWzdMIcw5YyBE1CI5AIBYcOxit++6buj63bDv4hBIEHGe55QmKHojIKqVtq1jZz1LYF05RtZFwsk5jRZRgYBkqdFat7I1O66kTzjH+PwNHlLormbebu/PxP/Y4dq8VH+NXbQxt8xxO1UTbhCX0nCyOb3sEXGV5biW2bT5GI/2nNfu33Jh3GQGavmWRhD0fzYAtAn4YWETlOgf91OyobxyZfc3zrpmg2zwBWlQzdWtQVepRSt9mQJjJqVmNHAi1PZc9N8rS0Knj4HDECPmBnTlewh3t6eLy6vbm/E//of/0sUBMB0Ou7/zkx8OL7tR5tefPP37v/93/viP/3XX0YuvX6XbueMLKiEqCEEyZGHQmjEettv98ltjFjUjS7O62dLs1KFaLQnWdNTstf7Ezk6DzsgizmzRjNBA3qOun4bfNd64CpvJfMdLzsnoeL8+6XFp+5PNWYqGfO/j941sJ7EnLlgXYQJHtgsirUnDM1P/lu3J5wdszBnNYj3A0zx5fmIMpDHM+csjiT3l3+V/8oUTPb2ETQH67We/ZFp7CO17Zj77LJzjco3EabflnPWE9ZP3aLzFLgPOA5jgL292bCbZbMp22Ga2EEgDtGDNln+ta8sJl/HX99jnX0sh3ELmYXLebrpH6e2duD4T7deu66Zp+uKLL375y1/+xm/+6NGjR1988cWw30MBY8UWtNEA1hD2u+/fUr95aG5n5llAV+b8ve99r+u6Dz74YExzCCGlzCKSmBGIKM2MzqD8ADDdlM6T9tmnHmaqhudehNje7wHSjC8i5NoO+Q1SCZhqfIpFIW5H88dQtRmtwuaZWSPccs7jOJo+6YbSQ6qcdNrI3g9ORFinZxEEMdIwdCGSaixENE3T8ThqWVFHaKSl0PRdXJPfuq67vLw8HA4ApJGiXKPimQsnDCGErrdjVOoBQUTjOGJVnNTTqKrgfr/XwamWl9cJ397eapyhaRQmXKWURKMKsTOg1fkXAAAICOeUjuONNlfUMMWU0niakcTbjnPO1dICFASQBSDznPI0p7mWdZmZGZApABEgCSAD4jDsUNvl9b1lD1oRS4W/qutek+m6Lues9T9V/ZumabfbqY/UIG8CtqGZ4UDVkUr23cLWyg2+5fjqgEcVOWuX1y1OmmHds8pqOzipW3gYBgaYnXgcgtZyQhEZht1utzMHlWrvelCqxivOWGBboNPQDjEAEFwzRoWq1hJr1qJVskWQsyCaOwmHYei6vu97TZSNFEyKZk63t7fjOAKycFYTQEqTcZX6IZC054Uh9pY/i2t2ypLpTCnsEnPnGUJsTmtwXePQiSn2gEdce7E/1LFe4BQD2PCsZuOXcVjrXqKIgJCWLAIAEIJa3UtEAIpqB4vmmdk5zRr+bm/RxsRWP9cuk+ZNiAGnnPg5g+PafrGyVif8S3mdCASOUaY8rocCREDEzHPW8q9CiFgKvQJacWO/a/ZeWWtWtqFnl9AASucD61Au+4CIWlaUnGPar8s2lGp9Xh+vgu7Y0/mbkmMirIekmBVEAF13pnKbhMurixev0jxzPs3/8n/7t19/9fyf/O/++7/1B7/3+pu7H+HpP/zFf/j840/feu313/r+j7749NVHv/gwn3gvEYUCdZEwyyiQiUDyympgsPVl+j18zJzmD1fcZInw2q3XYKP/IC5eQkrB3oXvGAvzm2UjNFvZgG4FMYPtxrLuKVExzb6Uc1qfPbX9CREfFpVl87MJ7v4CaGvkPDDaWSDbfDxPgLVdjddBDW4JLQ/ZvsJzLXBHCLlwcT+mXUZB/qezdlapiq59r5cp9jqUuNPLlmaL1UfyUhqX7F1UXYVbpiFrNm4wbL5vZuWnYZZIA7VU2bdBZnGXgmS7ifdiwP1XQyn2wdhX4x19eBA1foHrePTXmNJ2ZABYNIz19ddYNZ5TY2yztngOayr2EPOfz87EM2pEPJ1Of/EXf/F7v/+7b7755i9+8QtZZANToswV9hAd+auhwe0kt/fr1iBLAgaAruveeecdEfnwk48Nx2KM4zhDoECdIHvjY7P8Zob3XVtW0Py6ZbYKDX8D1hPE11VuJnZ2fD7n1KVaIyTUrgzqJDHvnLigR3SBPFKPNkQkbcATMHMKIRwOh7u7O3aFPepkyJR2h05UZ1cwJOdaN5IZgNWTEmNUD6H6asZxvru7S9NkM8kaZlnNIyKiek7f91dXV0+ePGGGm5tXGmJa305UexL4MNHG4iYiquXqtdvt1EOoN4/jaCDyka7qoPP8UJ1pbr8ZibQPDQYiAdWFcuachRlU1zJVs3T4IDrhyW+6O/6WvnN2v9X2tMxzXazucghBFUK/XvOPydps6j9Tbcxj3T7UHwsuoMmUPX+Meny0oaQ+sEFmwlrlMmO2CajAYjqMZ9FYa08YNalt4vb21qtzuFZJQihJPZqeqt/HGM2TnFJSoOk3Fgpr8CciJVUi6ruOaixYCcamxddi5x1KtS+rGhGLL1rr0+73F6oQhqoVI0rO+fb2Wk0tGhQ9z0vp+Ib/kKtnuT2LwfV8KjAJmHNWJFozbbFpYyVeZo7Lzq3lwi2nLlCmYESOLr7OuJLByK/EszwvSduvNofkGoluwQEb3u2J078LzrF1EQm1xY2sz0JehxLZl02I4FZy8vCBdb8gvTwJeSCXncsrFXELc78oRExz9iqZVAFRZ5jXVRy8WgL3nC7NK/zGeMAOw2Aho/pr41qxESxojVy0ulezYX3w2P2ejyyIZAYFKS/Qn168eHl19eg43gzD4bNPv769Pf2D//Yfffzh13/5p3/2j/73//i3f/t3f/jOuy/Hm7efvvXZXz1//dFrd598SRhACEBy5ilPEjN1Ihsrr77OQuya1UGVKc3gqgvxxccaM0ozgv2E7mA2SV25np1kfjcbqrFBwFEQOL6wZdy4OrNXW+wVyO94NUNtSbW5+YFfjfTqFyuSaUZu/pS14m33G5V5uyY7r6nH/O1yeO1K8rC1pxBxmuct0jZ7sV3plkF5KNlCuNYvsT/9ev00xHmkV+ylnnN6+PnRVGq0ZXr7KKw9bN780QhARgIG56a4n3/Wn1UptdGqZdh78OQ+eN6HdXZ/c8M62+ShQfyGqiDoE2wexuf7Rtt+fuj+X+8N56+GjZylWY/n/vvtB7vTIKCeBBH58MMPAeDNN9/c7/enaTI82SDq+U18mBbOrmv7WUSIhaF4APb73fe+970Qws3NDVaqiCHcnkbM0vWtUWM7n+ZQaObWfLOdqvFVz58fXgs4IPsxQy1C1txvZ43fGtpWw695gKGWWDTCtKPHxvFDdV13d3cXI3b94mVy+TiFZ+MS6ZCkHmoiomZ6AGDJWu9K25E3E0YMzFmVJS63MbOxkYXi9vs9Ee33+6dPnx4Oly9evHj58uU0TVB2quhFZrjBqvN7UCPi4XDY7/dXV1cXFxfqnwQn8lmioyoeVsOT62VbmfKsjBQpsLHWWj0eKEguIZfzPDODCOYs8zxbqqQmQ56OU4hLZxGbbWIWRABIzPrfnPNcezCGEFQWKea9WPqKq0Ko+xVqDy3bJh9No6q4WTP1yjkfDgcR0Ul6tCSr5siMuLgNNQRU9Xwv/zvUWlmoNY0w82zwFCFEMkeCnEuP90Gzp9NJ6dp+AoDgjOwx9pYiYfWEMIZ5PIFmDKXUQRdjLOEDWn8FCQmRAhGFGKFCaTcMCjesFQE1DMGkwSJsJCnrJYwxUlfKkB4OhxDCMAzDsO/7aI79jmCeS8/6emLmNM+7Xe+5iggyZ5GStiAu1chzA5serhK5a1lUlRMARDK5eIEqsWQRjppqaWYP23gVLHztu3oWglGOIa6NboKIcbGG0XhmZ8jhD60lCLte+pQJzTZU/b18ox05TByxidmAxR4GoWGvSjC5Fl0wUcYfBuKETgW0slfDfqgWd+Yl8FfWJyKsS+yIq7wE7gDwbFrZkyH0PM8gq3axNrIpV9llRSr2WOYAuQhGcBrdSmEr626Nx3aZLUT5kXGTJl/ZIGyIaFkH6NotkLOveI4MNWMHvPVFraeIRDTP4XQaQQInjnG4u53+P//vP3722lsB6YvPv3r3vXePr+7+9E//dLo+Pj08fk63IaEIB+mYgTFhCBRY+/koxKgGf+pMNKTEk5mdtXba+d33IDI2gS63rUF4W6y/mYim2rfNsH2h5IonJugzM54rvInViGsHgGkpmk6w6LrOZ1VRbsk1l7WQhIgiK5TQX/zeab8pWYs+tClVZXMOtWa0wQ2q+baZgL6L1jF7No4pS7BmNbCO9jSMkhp2r8g/DIOeMQhgipPXqI3/GIc1DdM22rbG2AU6y4tOwNbFzjZv9imPcvpB7cfgCNw+e+XWDlF/SHjoGRYZyhmh+Ts9YD279pqkx0aPoqWItrMPmiXbDNg6AhFp7XiPSwXBuCC5QZju0Z8Nz9FJHjagN+F5Zm6JIuT8tFCpyZ8yRSKc8uPLQa3Rb775pso95Vc3E3ACkJ+kh2GDtwshuGn7swDOXX6NVh4Q1+dpg3JbYoF1GIsHb87ZZI7tCA27A+3ckHOM8euvv76+vv7d3/3df/bP/tluv68xOGcm7wcBR60ejNv1Nivy27f6HorZaxzHrnvte9/73suXL99//307g6bMfd+zoAoM9n50eWW8KSS7BUUDBxN7PPCbGTrcWEwwJrUzs2h/WfeI52+eRTg8WQoIh1JTMR0Oh4uLCyVJEx+xinOn00mdcnydEEtCoPrH5inP87zbHXQ+mjcYY7y+vp6mScu3vHz5MqWsOoae9aXSIyPnjIRmc885AxQ7++3d6XA4fPPNN/M8P3v2bBgGLeBJgQG09+moqYM1fHFJNVSoDsOggX9Pnjy5unrMzF999ZUd0yKiCoBOW/lb3/fafo2qiog13NS8glRDQxX5VffToEqNnzTl2R+mvm/2MAxFikMB9cdiVA1QuVzmPM9lRTc3N69evTocDohhmiZyxv2cgHlGp10QUaoKiR36UK3VoTZMV+5aMgZjHIZhGAarVgi1YpDGvZuTUDeoQWzLwDK9yw5WrgU5w1IEdclIqjr84pDQx/uaHSoiAGGaJkSMsVcRuuddjNEC8gFAq1IpwuvZp+Alot1ud3d3py6+u7s7NR/M87zb7TQK9/LyMsaYZs45qz9cH0cBTlkQjscjyhhqY0ZEVM0/ur7zShGK3uadMuQxcZ2IuricaAbDYLU8EBAlxKh9LBXZ9vuhDxERQQrHON69muf50aNHOecYaZoEAA6Hkr9a8lSZEQMRaYt5YwjgDh3PNi3+EeqZQk4jQ856dtnkNd1aUSjqw+yMT42E4SUtgFXrWM+bjK17ztUcNp7B5VoPqjnP2JlzbFhjedtxbA6e+Ros2AVIFKpbF3XAdTkWv7RyjxPB9VLUp3Vv34au/PQMv42e/UGIwLDi72cUZg/evla5taX50xTOFfTzf1I18NhyyJXkyjlzSrw6kcuzOWfVBUzibBADHLac/RWrwu8B5RGgwb26ECwh+F6VAtjtdoCcUgIhAro7Hd//1Sf/8o//5B//43eub45/9S/+9Qefvv/j3/nJ/+n/8E//7/+3/+ef/atfdBAJ9MwGwggkEAKIZGFAoBgAIAvnXANLaFXYRAAEBAHI+Q2arWmh5lbt//S770HhNWQzOijE+r63Oxsq8LvvPwfX1sLzDvtcCNwN28xZod3Q431Lw/tF2Iev+57yc26+b2gfNtTnP28ntpD2hodQTTBomIkhs+cteoMZETz/JFfUyr4p8MzFLOIX4mkZnV5hBjW/I3guyMJmRWuTmc3ZBFYA2K7CA8c+nzUK+Dc2uyPuMniy82CYFawJZECnYf66OOQ3y160MIqNrtVAzA/S8PkCagQNKFIzv+euD+P7FutgjST+z+2dDy/54aWdHcETgjjtq/neEBUcJBseYj8Zvs3zfH19fX19/dZbb7355ptfP39+3zwbVuOxBe6nWX81xN68hSydG+Xp06ePHj36y7/8y9vb277vBQt6iZ5QICCEkJvtODusw7FfO5LivlXY6751x2GDTtsvPRWgyyG0yxiL6b1+l8GJZFVTjaIqdK1mKSIgyiRn86oVw41UD4mUxCQTZwGWuqDH4605bUJAkdx1pVYNO29GStx1ndUIiRSq8hMfP3786NFj1Q1Op9Pd3Z1OO2c5CyLzi6pV3URzcrW1jC/pYqUatVUtVK3Jinl66AV3lZTFEDAEAqqniZQgzzSfpnEep7u7u3EctVFnSoRYotXU7h9CQGIz9JC6/qoN2kvpWvPGXmqfh66zb4zcuHo7PUWDMyoZAwy1M4QehaYtG4jQVSMDV1G/ehpWsVFYDXDesaSGGFO0NLfNVG6LcPHSS645pVAJXG8bhuE0lkh+XX7OGQAPww6JNPCyWDrmOecMLJqHZeuFejCllLquq/uSmFlRBSpYQqUjXZ3JRTaO/hupGpIIQwhx6LWZyjB0Wu+2ix0iwmJOhBDCsOv6vp+mU93KJWwTi18nIiLyGUtfw1HtJ6ngsxkqMZKseIVABllO55hd/hI45QedMLE9xjwLs/0z4Hpw2+Rsgz262ON2DjVrk81lt9njco8EIC4F04tQfv/IOWdgfSR4oJvLsRnHUKQR6URaldgM3uAOGxHpgiuuA2idZKriCoBASCAoLMp2PViaeYo7zLB40pby0PYlujQA04eVEiCz0Crfz7YPaKUFbUHqDWmNQuhh7iFg8refPzk/jEuad6/Wt0NWMo5xCDh/8dnX//pf/snh8nn/53/xzcvPL55cvPHG9wba/85v/PTdN//dpz//BphFgFkEWUQwQ0aQnBvWCRuLi78iBXQhnYYSWwUYHK1udw3XurrdT2Ep9OzhpiYrI6Ll13sEJ3RHu521WG1gC4Q3xWzuW/h2/LNfmoW7gcN94zRAsJn4OZ99UUPyBpPtu5rvtztigG02xe8vOrHY80NvS/IjN4Bq3t7MVqqjwE/eGIsxZE/FZ1fXDG7joFN1PJ6zC3VuLnbmeW/j2/IcWCOwoa4XQ/2r7deGW56dxrdeHix+MmeHlbVOIu7gaMaxuXVdUEO1CouWZQSOhM++aMv6PJUtQEPEc3zyPpJpFtvseMNgt882dzZM3sZk12GYXDxh82pEZM5qIb25ufnkk0/+8A//8J133vn8yy/VE25vlvVT21k10HM3f8sqGsoqeEsgAk+fPt3v93/1V39Vjm/BauskQUAJUHcAnbTTgLGZ232Iet9+3fN9eRdt/Mk2+gMUsaUpj7eIqFl2NrhZkM39C44YzUg9TVOgLoSgyg9iGMcRgOd5tCjK/e4wDMM4zjXeB3NWGbtk1mAJGowhpKqHlDNL2+Xt9/tHjx49fnylnfEQxWIyi4yUre5UEfS7EIdhr5U/tejLPM/agq/oMxAQszG6mjwZuq7HELCGUKqSrJ+1GIleZqzPOZ9OJwOLqWEmuXnOpn4kCsWhRESEJXwjV0SyiNN5nk+nu7u70+3pmEFC34W+AyJABGEINE8ZIWjNhMTFOxpqswQvPOtJYU3nvU6oszJWFmoakSk5DZ8xHLCzzI6zUCuscvUD6wjKCVX9I1o51RVsDcITBFlipESTQnUC81ywaNl6gCDVBQcEAAFCkIAZI0VAyJRFJEIMEgDh0B9eyY3kUhdHxySMakQYeQ4h7HY7LXylvm6o/K3xA6nNXedjG6rnI24UQn0ROG5mkkDfFds9xdB1XbfrVCFc2p9EtXsWBEt5IqK+74ehe/EiAUi5YZXNW3otiiyFA1Z8ZMMEYDkU0Hh4zrOIloashxEywlI2QkRW0sB9PNFfDCsflwHUH/D6oFG1OCmhAZ+H6WLb2/C+Lb/enmS0jr1uuLw96C2gzYs8HBe0PlemxUPAz9NPuHmLLdZ+3cqgthA/iJ+bIqInZiPj7LqpGEg9/2pmYhZfcdKh1APJz3/RY9flxfz2Na/GtWfJVtHAXKpCGFxfL3RxpCKC1MG6kJrSyVRD6XSBfT8cj8dPP/1s2P/9d3/4zqv/fH3YXR5fna6fv/zBW2+Pt0dMDCjKdbIwZk0nBFgoZ0EDtQ81rK1BjAZbthYjv3GwJl2pjcg9WPwcjFVJFc6U3xlk8rqM2xbUW2Jppt3shd5GRAQrVf+7XHhOfvUY+MDl8Wc7Q4+HIgKwEIJHaVvsdnWersFFlnpt2d9vnN800q1BXdwx0CzPxvc3r8avXS38wqlGtp+dqpzj9ff9mV0iPm28PTafhsM0AzYwlBqjYTM32Db7COt99zP3vxKRClrNG0XEp5p8l+vs+HCOrLZg9Heyi8gVdxgPMZ7mpIHWKSU1zKs0ABsC3256w8D9nWdB/V3We3YVHuAeXeG7UaJn7PBga6VmcK7SWM75V7/61T/4B//gzTffbCgaajU4OGdre2BiDX6iOwft2WZ1LDmGSELA8vTpU2b+1a9+NQwDEIL2EgNBLGbGDBJk9RYPwGYy9dUPQPHMdc/qWjW+An8x8N23a3jPDGzOqkXo+cI1tlzZgiYSS3WJGJ7bcWOyhJ4v8zx3nQYpiIioa0sVq+NxRCyM0QrGxBgplE0JwQ5QZuZ5mnLOwzA8efLk2bOnjx490iDGu+PN8XjUSMKUUppZPe+aWafhgpE6rSJzdXUFANM03d7e3tzcqH6bk6SUKBahPcY4DEMIUb1DXAGiQ1ls6jiOXH2YuhGm9oDjAGYr3O/3xlRVsldQYMC+77tQ7ERcQjEl5xwAx/F0e3un6u7pdLq9vdaGE6qd2ukWI5mKIiIhdIiMqNuh3qHSKosoWsJgoxDqUBblCI7ZSk10MiuPrdqjGTuvBtWus3o2Kai97ZiZHatbHUBeOBm6XU4CUIISdRWIJUhYbQQikrmkMqEsfMbQUndB16u811LDRESkplMSdV1HWGrJRCSLBfWXnZLeb1m9ixBC0J1lZo2wLTx8LduHEEhaegwhIBZ3dOgUS3odzXfBhBoQq5sSQtjt+sPhkHMiIgA8G6wLALjmfp72z8psiEgl+o+hFrLS1EfWfq01CcuOj9g83w53LlLf/vR44LHQttDmWjXdRTMJLrnfsCe41qheYwRXPAbW50Q1x6waHtadO6OKNKto2G4zJRGJzn1kKAU1x9K+t18bOdU+07o6kH3QBs12JyIyS/13aUJY4dmG3Hgsl7VMZr+aUaF51iCca5JhjJHnBBvBzv+Z19X5thBu1o5rAcLszd4lYvMHF1Oqmw7akIcMCbM6CPsuhC4K9yKSZuYMIDSe8r/44z/5p+++91u/+Tvv/eid7//gjcD06SdfTKcTaVObAMIIEDAAAAiJ0RLV0kHGffymG2KYFuFZqgd486HBNA98Dx8DhW2Px0l05gMz98KDl+KnGU39JMU7ZusczGxMUFZqr374RX6BDbY0X569PBY1GNUA1nAVNszHv6KhDvvTQ4w3Iej2auNRHsPDuUbkDSHbILlm/DckUG9AowhPs545bLmQXWY187u53QU/N1uyTcMD1lu7zkKy+bLZuy1WizMwwRoTTMgAAD0Wcq1zaG//NeVtaJbz8G1+Ps3WQ1XLoe6gLVCq+UCNcX3fa0W4LVREBM92cd1oiQtsAeDXUQjPrgXuD7He8hm/6obEZJ08jOt2bf7ttvUUQq6Jl1988UVK6a233lLrlbt5pRD6JcAGgZsXgYOnx7cG94wAsQp8jy6vvv/978/z+OWXX4YQxAZHEASEwASaCfDw9cD0/oaXPxOhSDjtTx5Vtnh+djIxRjVbgDPl2LFlPplcE33FdSZUWTZn8ZpAnSFrKZRAseu6u7uT+isQApGSjDBLyrOfmzjNCgCurq5ef/31q6srqi53bXCnk0kzp5Q0zlUzr2KMfbfbDYPWgDGdpHqoSBiJWO/X9LYY4263D7XkJrjYe2uowC70UedmLSVMBvBc12BLtb+fapjDMFAswYrAUj2OMs+z1mIZ747X19eqfJ5Op7u72zEtfbxNFQcgrczErkCDvdEOBahcVCdArpiqkVWaR6M+z8RwLZbLupcVu3QVvUd5nZGVvp1qxrhlDNohlc71/QaAGqEWKRiRoohiI6hCqCyCAoYQ8pj9Sg2BAcASYhFxGAYFqYiUQxu0jtQeoWRLEsU+RI1VplofHjZGW/337u5O/c+IqKqg4s84TUo+vBZFEFE2LBerbrnb7UJf1PkYqetCicTB0mjXSsZGohDwcDg8ffqUiJAEETPn7AoNmhof4Hw7t4YzGAABgAgU2lBFa1RtQpI2atRnDbCRnWcDNkzKyw1l76tm4XfdYNQAyAMd1wohO/v3ehyrQ4PVLuJ9UPolVdVoWa2HUV3eSquUannSafg5w5rz2uQ3h9NKWpX15de7HYQ2nkl02uOW0WfXUN5MOx6t/QnRqJrbMWWtoqMLejQ2pF/maZYaUuyXgE4saFikbCRUL33as7bvfhwPH9tHW7hyfACAzEyr1YUQApIEUlYugl03IKZXr6af/cWv/tbf+cnQH778/BseP727mX/0w/f+8k8/ZGQEToyCkhiQIDN0m23agsszyhDO9/DwEAB3HHqweA7rNwidhmBvl6r+KX+3+hxet8e1UuE/c62L0KAEO8xELBV6PMAJFp0fHrzqyO3kG2R+eKiGL1fItMU56g0rN34z+JYpNV/6p+ylDY+y20xmorVPw+8arKnYzy2va1MtfC8vdVw8pZgCb4zX9rHBombHmyV7TyOsJRvP0nV8XreN8dSt2E7rcIBm4xo4N7wR1gRucNAblrJvDcP8azV02BKUh8m3fgYn6+ifPpUFKoNVg/F+v7++voa1iuXpDr6Ndlp6uWc533r59XqWu33X9hHPr7YfmnG2w9q7NPpunudhGF68eDGO47vvvqtlHtyAi16nIeVbdGqwaDthWAO8mbMhQKAS6vb06dMf/OCdly9f3ty8mue5G/qMotkyCKBt3hrYG9Gdndh9oNjO87tc6ET81fLvee998/GX1HjRw+Hgj1FwRwPXOnZY/T8qsquPrjK9ohSp2ubVRRVja5s+SSkFWjBqnmdEQFpon13hGRG5vLx8/Pix9bg/jcV7BrUGia2rxGYzDsPw2muvPXr0SKd6fX2tS9BBQKwHANbMwM7CZYkIyCQE8Y4X+4DVgmDfe/5psiLVulmagqgKoRWtAYAsmTmnxKpzvnr1ipmn4+nm5kZ7JKqWwrAY6LNLpUYn5oVa00FEBEEQGET/A8LQxW7ou77X6SERUPGryVL6sTBhb7m27dDPterP4g+wl+pTnlcYrvKqiFr5RqrJzMBuAMxziSDouq7rQy1Yla2xBy85EaviebJup6S7o/uea3JjbVhS1hVy6A4dYalSczj0espofiYAzPPc9R3ZiYyItmqAOSWpaaICkJnnivziEuAXLuFCVWHR1bEfYj/E2HcKphhj7ELfl5Qf1cdERCALZG3N0nXd48ePYoxadjXnXLNHtZlTIV5a5+Q3DGQr2xhbMIUfEUEYEYUXJPdPrboYw4bxbc91dpZydM4HL6n4uXrG5NH0LDtrfvJv394P7mTVnd0exg2Lb35qBpdzl2UPGtCx4pOsBfE6/nk51Us/nsA0RMoDql4sUhRgEWFWu1GnSYZbgHhs8L/a3JQvW+S0v7OxQjWjNUeyPWj3GPvwDGUL4e3g9iuuyzM2rwMAAAX3MqXy0lDs9DUCkz7+6LP/7Y//9f7Q8fTm48f71x4/ujx0KMySSRIDMyMTAANmYFyJkl4NM75pYnRhCiSVGxbdzCsMW6TyS0Z3wZqUFmA6vbR5xKOlLj/GeJ/o2BAmOsXGA7kh/+Z154dut2b1ZWPkfngQ/96z4zfoYYpiQ7aw4UtGqg3J08a/6gdEd+Bt2cJZsPgdsXvIVVXdsiBZM8lmtIY3NnM2AjwLLv8uD0A7ReyGBzBBP2seETvLcbMK2FxG5ranZi2yaUDlnCFEu8e/N32b6/u+y7Os7Qz9qrfQk5rDiU6Kqn8WsRgR1YdweXn5/PlzOxcMJRp8a8BF64wGe9A2zPbiYcJpbrMB71sarEnbP34WDkZNfje94cCPafPR1X3zzTdff/31m2++eTgcrBxrnZXxmTMLaUDnecLZFfmF2we/TCJ68uTJfr//xS9+MY4jFGOiqAcXXWQEbsSVsxzJUdMD0/nrXB7OIvJr6pUtmiGieghhvUf+gDaxMtTeDCKSUlJRGwD6Pmjco7Wlqd/3IjLPs5boPB7HaZpqXQPUCpNECIzas1tcSWrOPI7jPA8pTXd3OE3TnEa1sJQEwlyMmGYq6rru8uLy0aNHjx8/3u/3GmF4d3dnK+3iQLXjYhbu+15dCFxLaIYQBLG2fFhS/RFRFRKsFjRTiszTpQqwKjIaRqhhovv93vLBYozjnKDmCp5Op+k0atmYV69eFRfo8WTG3K7rIgVreAjuwFVXZUOViAhCpQW3EEIIFPpuN/Q7g5IwCJSoPxHonKppaqdqudl1UFQWp9tqxO7vhxXOr2QDB8aFYdZjdGUe5RoHa8tR/yqiqAHCIzBnYFqil/15oRE66svZ7/cAoOj69OlTCPTq1Y0p8+PxFGJviUhd1w2xwxo0m1KKXeFX5CLCoNa2PZ1O6mZU24fGLeveNRZeEUEu1V91N7UmzWFfcjtDCDGG0Bf8qVvKtVbRXCRMKKR3eXnZdR1PWlY0xdoi2EMekTSQuGEdHoyyFqG957ac5gAUgITonM3dshVXXKk52teIu1TPM9uJGm9kLc3g+rC36eZa5R/csWQYUNniqjCJOBFqe2zUQVoNFta5Z/qUz8Xya/RrZ+fFsrU0/27hVt9y3pia89Jrkp3VX9kobqQu3FxUEo2yv8GDzkRbQwusToAGtvXVi/5jW4OIsJlDQSbnozPLE6wtEFLliWYyBo2w7kSidGuh4X5FngE1G0FWF4fZMn4Lm4v0ySef/U//0z//J//4v/mDP/jxv/njPz4Ml59+/AkiEBIDEaCgICGTEAZJi2veMyC/KLPYgfMwe5SGcwXZG7w1ivDyvd+sAtgaEG9gN3ox2OZ1wTFsjhBHmzYNr9bqneWGc3wE15z6/8+XiNhJAxsQbRVC/d4bbux7o4sGLAb/7WjgdsrmA2sc9nsq1cLqjbu0brPRjGCTaegCKinZzsKm6hVufNF+VrjWde1FzQcbR1yVOXD07ifWPOjZ73YaDQMn1x+l4dvGtWDN67br+tZrO5MtesAan/1nv1h2ra7KfAQ02SnUsr2Hw6Hv++PxeHYm4uJZjAAfmAkggtvus7jxwHrv++D3q1m451fNzopT0sC1UtiOY49M01zjDPPz58+/+uqrn/70pyq0ubNmUQj9oXx2uxsEw7XToHlkO4L6DVRwnOf5l7/85TRNVqW5gYOB2B834MIrtoTw69or7kHpMzp/YVZh9dQDmLB9XD+EWlXfX+wbka03XS3FGqIJNYqsuikopWmek3bM6/udBkP2vaY8LXENIRT8Ka716iQ0hvDq+tX19XXO834/9H0/TVPmeZ5nzcgVEWFkZtW7tP7Ho6snr7322tXVVQhhHMfj8ajyumqGRNTFQalyHMfQaX3ExdGn94imJlax2GQeqee7pnKp+BFqQW+suYIaF6ohmqppa5ESv0eqq5xOp9vb2+PtnboEFWIpJU4TSxaRvut3uyHlqBqCHSK4yIfo82U8zzd2quqH1uPxdiuPBt6xpreZQuhPFt1i/0bbQasfY99j1Zx3u51244DacUDHV8XJI20ZH6OlC6YcqQT3dqZRG74Zwtj0HHaVHgy6dtPbLy4uQt+lxOM4KiIdj8euFxFRlUzvFyRrHzJNkxII1sKKUhVXvV9ENMRXt9vkUqq7oH9quqAVrVXM2e/3facEK0TYdV03DKo0ppQKqaU0z3OqCYRDTwAQY7y4uBiGQT2Esi7Voy9X+FjtCanSoD+yt7w6uT7qRaCtqHTuwOUokAWEa+ClcioACTECeB1MVFaKQVMCFnzlnOdpQkQUwtp2lqg4s7MworaLVIsRhtDFiGohqKcFAJihcYYSGMaK1QBqNpMaskwiuVJsSAlEkFm04q2mMqfEIsUFVGuqVv5POpSPsUQA8THTZN4/ZMKlRhJWh3KoQfl6LYdHZohLV01YGLqinZ7+OlKh8QCSZJGWlHSVAJCoTB0gxEhELKLQMo1IcTqgRXWzsQ8RkZRDieCAWHJICVmgdFCAMjWuhSgAck2CsXE8TLzAF5CQcJozCAUVrDWmmCiG0u0UEUOIIS695jgTESEgc8pJmIUQOEMMvcBS8NZBb1EUSRAKAJAzRyTt5yQikkZh7km6F93hIt791av/Av/lb//gd/I13I3jb/327/67P/ursKPTdAcSAsYAGBCBcXIaoOeYjVxuISUYiKzjYkosjIDWolSJB1yUKWVBZ/QSFgQhQqwxz8wMImACNAYEFC42bBE1DSJU27EwcAZhRAggZMe8R2muQThY9W2plr/lAEDEKq9b9IWIpFPilAkQmkxFpWvlTeSSmKVSFkCGpSiR8XeDpzEsu8F4GbuaJYgI0Go19agQYP1zcTXoDZGWIkCijgBF+9h76QpqYraGrjGDlUUlIgCs8Fhaa5olta5V/BGlAoSIkDFlhczadWzkQ7WqMFFQe7YueW2AFANqjF39w0BLiKX5jWGsCFiBBD1o7VeqqSaGz1XKFwBMaYkaMCJFxK7r/Z8itgWKqKuQECh56oYJiKhR/YC4gNF2jWroO4ImKQkiiKAwIlq7FHQHoUIJ1gkCi5phaAUl+X/pQ2hYAQCAzE67MKMAEWlXYgVaFsZQeknNQpw5xl5EWOTuOH79zYv94fLuOK7telaTZnmdf7UnJVsCFsnZqQd1s4lIzgWb0Dqaw7aAfVCQs28299s4dmZ5ZmvoathimF9HtsxbKTuCMkMG4Sx82O3ef//9f/Tf/cMf/+i9j9//1eFwePnyer/fA0IIeDweY+yllGVHt2tlYxSxtZGvTc+kW4BaqxQ9U4JaRb3aX/o+A2SA3/rd37l48vqvPv48U18AieqWBmEBnhEgADjhRKm28DnmlXVm2a97migaiLhG0LW4t1KDQ0pJM19SYu0Dhhhi2Nn2Nfu+FdyVPAmFAqU8CQgF0vwlRFRdCxGJomEFs4QQx3FWwSFnCDTshphSgq6fRoiRQghpFqLQRRIGzH2as3AECWnGLh4Q8XRKXYfD7jBO6Xg89n0vIIlTCCFP3Pel3mNmTVPEaZpOt3c8T9cvXn7E6erqQp0wqJyVgZBiV1wiKve//vrrb7zxxrNnz0IIt7e3N7c3x+Nxnuc5zQwZA4jAnKfEesD1mt+IiEQBqkKYc6bYm5gkJYtySikdDod5nlmq3QcFEZEg5Xk37NUNqJ0P1b5wcXFBFNVhaKwyZ5nn083NjSqB41iq3ZxOJ9UBNBuz64faUoKePnp8OBxAiDDuhoiIAqXAieKRqiWLuMVIEECAkxCE3dAPfQ8iAMzZwto1Ry7kzAitJY5rMLl9WTphTFNyvZGkloGVdfioxzpmnqaRmZWpqA8NEdX4goghlIbezIIYYgySWSDPaQ4hdBJUH+u6q8Ph8vb2NsY+hNKcUO1ujCWMNgnPLJBzzjiOuQv9MAwY+5tXxzTNPe3GeSTmzLcXlwNSnqYpRtVHRhHEQF0XEGUcjzH2AaELFLDDAEQCMs9TMQGEEDAozjAz6E4x4zwzoEjiEAKFUMU86agkBPZ7LXR0ESmoat3HMM3zMAy7falnG2ME5jTOXQhznnma52majqdxPJbAVzp0fXjx4tVut3/77Xd+/vO/FJGuG4jU6aL/Fj+TCIt0/uTVckTzvPRDcsymuOvUfqGaRUdBQMZxAqfBiYj6mRAw+gEMMzybu++yAxWsEF8VRvXZ4tmIK0+anToPDO4PM3uwMZbYnSqfMJv2WN6ip+p2cJNQaVVvQ1Zx287dl/ISQmCKNWyK3NS5LbkZDU/fPmunL6wPY/0QQgD3pQPLEsrM1tHL6fl+ZERMwopMWVhESKDohzHq6bccbDpPbIsy6VDGUpt1BVdvw3tFbI88RmGVLD0yYLWQAa68XgquOZWON4hYtS0EKHpBmR4iswihCFDfTWkm5HGc7q5vLvrDaZ6evnb19HD56jR30OUsEoRFqCbUbV0ufoM8nvPa8+lx0kQHu1nXVeYJIKrLIjKCHt5F+kAr6ymsjHQN+ebyhLkl0rNP+Y1rCLzdgmr9MlnZv8sG90P5t1gZd3+bh8Z2eh7O7sOZpSk9WmHY9ferfjD+pY0NtSE6j8zN9GzfDVC29Yaf4kLKPaD8U7DSwVbQM7udjz/0Hzw73bII++BR1NdV87P1SO55VwMu+9dUTXD4IwtrbWHY7OxZaNhzDZmfu/PM6eBf5PHW/1kXzp4MV0+t8daBbtXsCxc1b3WIqCDVDPs3ue7bhQaA4OAD6wz8LXqcfUtDlVsK8m/fTqnCmQFwfTOGEPuu19Spr776SkTeeOMNH0wl1cNARLDe7vugYRP2go6A+Kq+ZQvW057G6eLiYri6evr06TfPv9Le5VbR8V7GdW4CZ6HR3Gx/GvybwZtnbUB/dpxlmP76VjSzcbTAva4XNtQk6+x075ZRQUIPOHGYBbVjtUhSE5jK0PM8h2p40nbhAKBagZkjtXqFhSyitbYCgGqvzJmp1swUESI6HA6Xl5dvvvnmxcWFvkiLgqoCYw0qEBbLewglbpNr4T31+4lI6Aa938Co/n/N60MqsitWy2lRBfudaoNayQYRD4eDVv+HqlNpcdS7083t7e319bUqhKfTaRxHO0n7vgyivQdCCDyjKplFw0dGLJ0SbWtWV7XlhVrmRyegzgIsRl0yrxdsur7ZxTWSNtdSXqY6wjn2u2UUsM69NMA2qCjuKM85Y9UFdIamotisPBFRNQKGgCIoUEofWRFHRJFcAUXSUegohLXLHZEu9odhGKAcZLOeiTHGVK2WzMvBIQWkpTBvzlkEVKMLTVePPuy6Xh2PGj/cdYFTac5xOp32F4dhGLouWqyswnOaprkUUZrUxapbMMc5RE1EjN/73vc+//yzV9cvZC3L2FZ6Xsec1E4qTjgBx7jsXGAXipxBSBYHMjgmVhiI5xH+kNvudCW/VsRXrJJqxa3G4DOnjmeyW9a/bM89vBicItc8S0QirWPBv8geUYl7O7KX9pY5u7fYq0Ukr4GzLFOEXU6FXt7c4s8MhXDERXrz8yTtUbMGHTMLLJvVwBDXAm4BAiwje5r3e+Q/W9Ea/16/j804sR75skmhsbUs7rU6SdsUA3sIQWokqqm7/qXNh+YVBnDo42k6DoC3t7fffP31s6vHn3z+2dPdxfefvfnq5x/s9rs7ToEwC7MQ1xQedp5VW68eFeYsgmov97K1v98A63+yQ9fYtMGqgYCHuYkLW5h7BmqQ3O7RVkFq0N4/azyXXP8DcbWY77uwJg36obbzgTVpb+fskcdTh+dICl1g9AvxYGkmUN4I599oEG5I3u9RwwP1J9tQKO0TFg+GB2ao7Sv8Xvt9MQtRsfRv6kvp/ffB36qeGNBwMW+1hwS4inbiLnBtSLcg9RjrpwSbmpbsUgBgTQ4AHleXybBzpPh98QqwX2+D3naBQxL3Zcsby5R48RDCCrtWyTYeaNpoWKeq+U5bBcND5r/idZa0t5CBtcXB/2vbsX1kS57NI9v32kfQzg31MrlQRD7//PNxHFUhVBM4M+fMuMaxs9PY7qafeZkSgqao2YQzQAC00x8R53m+urp6/bVnN6+u0zw6GmRNyP9WIDdT8tfWwOQpBdaYcB/kPW02rOMsZLZDOaoHcDFNGremNRhtK/3O+nl6s3J2WWem8huL6LoupTTPS0UZZg4AWljl5uZGpfxpmmIMNjIhSW1Sn0EQ8XQ6hYhPnz5We8Fut7u7O9n0drudliF98uSJapjH4/H6+vrmprSmyK4qDEIJ6oFiXOssRIJ5qZhC82LiISKNJ+LarDJQXGT9ruu6sN/vu67bDfvdbndxcXE4HIo7qO+JolRV83Q6vXr16vr6+u50c3d3d3t7ezqd5jlZVE4ZZ3dQbVCjChFxPrECrTq6l2wUk2HMGhtCQCjRg3WS0Q6FhvmUna3765mwRwaTr4LL7tse3/6Y8yjtWQq5Gocehxu2Y3Ow9ARTvP0poAqzOgAWcZFIa+wDyjxDzjMiBo2sIUHEPnaTtoYG1IaXRISB9vt933Wylg2ISB0qdhjVswa6rlNbCiKmlERASQmltpHQANRddxh2GkJcSyvNih662KurK2s2CABQGwSoQjjPc55mazhBRNM0UdBTGN544/WLi4tX1y9ECoO1IxtKV8ayy8zCvESFNB5dd8BnpKgvKhaHlDOs6tyKiLoHi87cMETjs4ZDDUf26qYXLFJKIGRwt0f85cexf5tjwIQk2LBjfa+JWR6Jt2y9zrBlyN4y50iFmwHLEhBkXf5BKnb7RuGL0M/CuDROsIWTSwCDzbWlt7KoOshqC3A1w+bUxE2KvEYMb19kM/FLs9EabqL8yJiOBwic4xqwLktgBkjdu+p8WJmRYowsi8HSQBFLiFGrE/rle2Amglmgj3hzfffFZ1/+6O/+wZcffxqm/O4bb/zsz37R72EUCEQ5g2BggFg1d3QKvxR3fDRAedka1mKBB7ifPNf6hFJDIs0gd9YEo3/mdb3WhvoMPuK4rZ+YfV9iicMCPbu8zN18r2Y8Dd6QqrdsKVfOQR4cfvqftmwEnfRp4PIrQlwRsuMYFkbXGiaadxnLM4xsZtsAfztPG7nZZaNovadR3W2Bfq/tqcYTaGN6n14Lz3OiqriyAehEUm/1B2etMGJEpxCC61DqgeltN+bD9MeBRz+PTvfttV+ITbUyAfSg8ENt4eA3zl/Nez1vsZHLCMi8BnjFE6qRTkVAIaJ1uHTZU/VRaBnDrc3lr3FtKQUcXfgb7kMVzz0emNJ93zdY950fVzEAEGme0sijZtHcXt/c3Ny88ezZfhhub2+NBJhZ64f7E9CP2axri5bLIwi45sAJBEAjX7jrYkrzxcXFs2fPPvnkk3EcY8AQAt9PSvctfIvMD4BOJ3/2Hv+lcT/PV5tfH5jn2V/9MVG78AV05iFwVKPOma185TGn+b7hZhpYqLKsCs3H49FSNzGUdYWIIoE52SMayt513eFwud/vNXFRSY+ILi4unj59+uTJE3WmvXr1ahzHm5ub29tbDRbNriYK6MHEaOdUjLNG+UJhYhlAABqKKExMNZkQyfr41WSwLsZefXrqJDTXJSIyp3Gc7+7uTqfT8Xh8/vz58+fPp/moPp+cEgr0sSSVPXr0qO/7YdhbPwwlz4C562IIKEIGW3FVf2R9LsvScqwWwKySJNeCseyusOZ75g+0R4wAjQ9vEQw3YqRHJKpFiexzc740/B/XtTFVq0dE6/VnxB5CSLl24csgghrqwcxdH0VySoKIFLsQkUuQL6KA/geiZgfsqhUPasg0aD4LgqYgAgBzUU1DCIghdsXtV1ckGjOMtR9baYIyxF3Xxxg10zLnnNLUx+7i4kIrOYXS4CSJaEIra2M875v14p9AVtwG4MvLy4uLCyIC1wv6LEP2FBpqwqHfd2YWyCCLzVpvS3kGWYkfdigXBmLoCOfOhi2PM9EWnShQcE4W7LF587kiKx4F7aIaw8muM1UzJVhzXj373dzAkkxo8dGRRrNWFD8TogauUax/owgH6kzGYst02tQUcUtrZVO9vOPFc1hJ2U1jLeauV93gR6Plb+/cgsu4jyd1+xOc9O8HObsFNpO8zlS2m7c6j51PW/QFgGmaLIfQeA0iSn4oi99L23rNwBBIghxPpy+++KIPfRfi7auXbz591geCNFMWZJHMEAlAANq+drDBTA+x5iDfft6ScfONOMNKY6KTjW9wyw48lnrINyD1MIRz2NVssd8+e8SfUttp1H+XI8Tjv59Sg4Rn6Xo7vqdTI14dfkUmbneaV/tvGiixC61Zrc41L92SwJbQPMezmdj3W2QAR7neYtqkKYrTuGCDTv69zeCec26RpLm2M9xieHN/AxmbktcY7V2GD1g0nGXLVCZQG6JUu/VZe9nDV7PpdTku8czzpTUU6lRXvs26NFKLnMdVNZR421ZzbooIfAtet9dZWrgP/vc9YphwH9++Dwe2G/rwq7f3UC0931Egotvb26+//vrtt99+9OjRq1evuhjVQkqIgJRz5rVNZMEZKSmCzb+ZV1W+ywTWMqteGQBAOkQg+v7337m8vPjoo4+++eYbALi66nlmA55fhZePt8tsGEhDlfdR6ANwaxgXfBuvsPHZGeLBbSjiyrarIaNQlQdv+KDaEqBIqVVPUHUI3QVrlM7VHkQEOZeQyxhjyhkRh6Hb7Xaqs5Gvzl0RQ53qOefQR23boG0SVe5XT9HhcHj8+PHFxQUA3NZL9S6NFPUTFqcQmq2qFv8oKtMifgAjKlvDkjCGCCgq32voZtd1+/3+cNjtdrsY+/1+rymOIKjarL59mqbr6+uXL6+Px+PxeHz58uWrV69YSiraMAxd7IdhOBwO6lfs+77vd129ACDnfHNztzXQ6xFwlmOL8+kJaACmqKMMABCLZcGEZ9rwQ7/dPvGneUtDUJ6Ho5MbTeb3soHn2w0mi5SsXHtK9SKNPfbiKFfLu2Uhanwo1EbqemcIQV1qOmdOM6cEKQMLATJiIIoxancHZe8hBCEkgC72l4dDCEEQRYqHM8aoRwCFxe8nUjy0AZfbiChERCh1eqAKbKrziza6vLtlZpZkwe1ELrukBrsqDoQluz4DwMXF4erRRQiBpdhQHBvIelziORmeiFhqNVEREEEQ5EXwruixVBiGhYVmKC3dkABLyRpyoU11GWfShzzK+s/1hhVCl8ep5bMe4Rq8bFAZHH/0oq2HlOeGOpFGJJWCUoWV5LxULmqQHmpw46JMQtZQdb98IgpEyqTYWdMBgKTkxPs1et5qfMoQuk7Ps/ilqZSH8/YsWdbIS/GA1fcAsonCMmJutBG/d419wtO/3wUpprgztlL/pwUn0Nq1+8C1cBk+w6E2+7t8yCgYiFGE4auvn1+f7naH/e3d3cWjR4fL/avjKKQFbyQyIqOQLWRxVTEvtebZFWO01YnzuthTiK2qY8Tvgd8sxCCpX4a4FA9oLr9BsCEZ/0YRUfemeZAMBzxdb7fJ5mMOEMPDZte2m6jbjkudj/P3N9979HazOqNCnwWCfelnKI5f+U5jfr3srLP2iE7FnOHNJO0tizXH0Ygnc3EFirwkp2IQurh6EfGMyK9XqtxzFlC0BJOAYSm7JmOeZdnMG/G3+cavK9fSqbjWFowH+qluN8UzCj9P26YCFlimYT/5mW/fsgXUFnqexdlsEREQg7NjiggAqSqxIIygJZ+jc+aYJVScZRD+xlfDLvz3Hmh2J9c+41uc9w+65bQGmoYR+Tsb9vVdrpxz1wVmFJF5nE75+OH77//0xz9+/fXXP/jgAxEhQCwnmkgWwEX99ojq2S+4TRRYrQsA9Chv4F+Lb6NIZk5vvv763c3NJx99SCBAGu+wgNbT15aP2esaqG5h6+/fcsjmaniFYWCD5Gen4cffvohdwLbqHsxc/HVruQURTTHg6gy3EiZbitPBSyPrckNhbqF2AjgcDhcXe2vzoHwDUaC0y+KcZ93caUo5z/0Yb29vY3y83+8vLi4uLq40HQsAXrx4YeGX6nZTB6CfsBe6AEClLecGYUUP5qwSP7jgxq7ruj5adqsVuhyG4eJif3Fx0fc9YlAXH2i+3LyUYLm5uXn+/Pn19fXd3UmV1XEcY1f6DVxcXOx3h91udzgc9vu9boEWTbFNaSQx2xRZx/IYbjBzDL1tenUlAUBp52CKCiLGLuScNRDVY4v9KU4asfHNUeE5sFQBwE/SxrED5WFsX/hwLeJom6hJdH0fu67TAqF+5FLCQIQZCIsrT9NT9YBSxT6lSSSf7o7j8ZjnRIoGgl2IQ9cbPAuGdzGGbhiGXd+rVxCEIJBCr9gEIYQuajlZqGZurFXZ6k5xqid70RgDEtLNTYkc1pophCXQNMbYdbFouWnx1hpO1uBEDoEQ8eLiAlFSSkil8iQALKGXAACCJCASIiLqBAgReW75Rjmja3cZgEUXNfQLCABLkRfUxvSexRhHsJPPUASrTKNx4R4hKks6c/wQLqKDx1EvivllwHcwsXr8ljNn2GLctbkhUq0ryJ4k6oBslZr9YqFWoPbiF2wOAzCeK4Cu6IuXisDlWoA3TFaLpZvMsrQGniKiZC+1j0WhIkKTVzzwpeY0wvpAspH9LtvB3ADn7Im1ZQe4ERwNyTyPy3kVXQCO42iCh/GRYr/ElcdsO5nmeO4gCATmKfT49fX1x199dXF1OOXx4vFw+ebjL9//hLqQOQMLsmCulV4AQEBre6qFRQBzEkTNVeCssQoY9QMACiPXmHUixS4kJI0xRiAQqGauYrZjFtRiVYUoUGpdO6ne9UZk94e0l5YMqg1AtnvnHz/Lxz01QcVSZXbWpdchj+hYyzgrFRc2AQErhGnejmth12brV+pvtn/9r2cfWf6l1VOecv2D9n22fgPryZuRT6o5Vu4JrLB3eXv2llnZW/xtzVqamxt+YvuC62Pez8Ejgzi+9MCm6CN+wOa9DUU3EGhwL+e2YtN6qNU0POAb+DTb1/CijXrmAeYfkVDrCTMzAGEtWVlHWLKM7CkVc1XexVrG3QxwDTf7r3g1SHt2/LMA2SIwrBFse3+DKnAOqxEr+YPeqYc1d91OUgaUgISS33//fRH5/tvv/Gn8TxQAgSJz1hGIu9q7yP8LAFmEUBChijEimq6xkgcqb1wtpNZIYAEAybzrh7e/9+Y333z1+eefa4v2aZyFTM/P3knYgPcsJp8FSANbzyRtzPseaXhUQ/Vn37glUt1lq/erHNuUq4ajUr1MD+TaWkP3lF29NHYpzfY602cQxcqcqE7Ydd0wdMdjqivKOlmWNOekPEc1or6Ph8tH6ka7urra7UrI6KtXr9QraOlVqmHqMq0UjbfIeO4EtT+tGfqLPycEjJ1pg+r/0QBOpxCSzqeGCwIAaOOBlNI4jre3t8e70+3trboE1WOZEgPA4XDY7bu+7y8uLi4vL/e7g71FtQ6iCACqcmtrCpFc4yh9IO5i1EOXFVW3mAAgs9aGzRrl2vc9Eaj2klICXPFD2y+uHrktoip62OnTIO19DB+qwdTP1jNM/5Z63KxOZwXsNE19H81na1IHZCBtKELMrAF0GjeaqbTc6Lqu02DYeZ5Pd3fT6ZTniWJHRISlvg5oeY4Y+mG/uzh0sQ8h9Luhp6KnaYV20GrSgn3fCyMixmJJqeGK1RNQ589YCaSGgKbpNGpgc0pJqS92pBGkACrvIWT2LoSqTIYYtBotIOKcRvUtHyX5ujuIqMUJCUBTFhELLVhti+YoX9BGQKuRFF+XO7MCaqnzKskwg0j0afT+WFVqNzyww5Uo5LwqBg12oqwdx/48tld4rNqyOVtJg2ENqjUDwpp16jT1nJC1CKhGIz+rOuCiJYJLrWERLeOrGGb6fYZVouAysUVIbk9fHbMhNmYmWYnUfnUe+PYKm4k4eZRqLLgfR0QaFd0I2xgNuYZpzRlmoNCb2UWBr8aBeyMAbS3k4g1yLsM2Oz7Ps2YJ67UYzGSZm//gZXGPaZBDynNmjl3/aj59fP3Nb7z53pRietLR68PxU9j1ILPWckYQ4I3oby/yyGBx2FpRWn/Nm/AnTyni7BE2Z3H+JX8bVVfGnFZ9Ppvxz8L5LOTNE2VDYa3svCU6nZvaRGySK8S+Rx797pcnWI9p251Fp1b67bAP2/lsKbr59ezrDCz+5pQZ1W7qhF8AEIDMnLRQgYjqDSlnIkLQxtd1fKnFex2ZiwHZ8T3lsVxb7qLTGD0W+S9NaLMS4Vg98PqnFZvZooqs/Wb6p226vUunZBn/9l4PWHTGIz8r2xqP/x7P70Nge+S7YLgf3z94lgxXiAGZCsxVCGumtAThewal5M/OYb5q3/I3u2x6tjR7qT8F7luXhwnczyK2xOu3w1ONPb7Zi1LHu5nq0Pe7rp9g7GO33w9pGp4/f366u/3e997Y7/cigqHwT9SilM5l78cXZ2fxq6ZVe2H9n7bAqryXV/PJPL/52uu/9ePfePni+usvv0iZh2GggMLAyFUV/HbH6RZuDT02I/z1+KR/neHed9zB5sJav9E6ldWnVizFPIeIWAu0LAdQWQu0OqGxC7MVznOOXanjoiGgzJzzCWt6MCJqp8HEmZn3h12M8dmzp2+99dbjx1fDMATEaZpe3dwws1ZfVBLTEFN0IodlnamHr2FERdON5YCLtSxkaR7YH8i1mO+6ogoqrPROzRbTJXRdp5qbeinv7u5evHjx6uV1zvnly5fH47GK8tj3w+FwuLg81IjTQxd7dEdtCEEFbouQ8vk1oVYNFRGsmGn8PNa+5KfjhIhIAjVDkgh0aYiiu5nybLpfAC9rLZeB0Vv/t+jt7VyG6muuwohCpe8UV1JiKJKUiDAAi2QRVkeLNaUkF500juN+P5iVAdYFBRU4OQuzaFOW6stVfR5TgpTSNJ2m06iYE7S/HdVG9pkRse92FxcXh8srhX+IceiKum7hlxlEBLuuK7F9LAyspkN/fBQoESJCCEGdxtM0HY+3t9c3d3d3IYT9fn97e4uIw66rS8gpzXmaVVHUS+EQKUQKh0M/TdPpdFTZbLfrd7vdeOq9WC4iAgxC4nQQAAAICracs5oe/ClgNE7UGfGGWhQAODMUbdzsZCqftOVuFQ/UAYVOblDQTNNk6ZjoKs8SEeDK6WcMzgjA/5pd9TxZhSYnz3arBYXdDaWJZAFu8SCJ7ZmiVIwd4vKNP2i51nkPS3N5MS5m2xBCoBBzElhn+6hip+57OxvqbENEMplbqly+Ld2poNZYZwvr9mDJOUvFBv1S+V5tCbrOx5OlpqVnlywSu8jOzWuSTSitV5L/1R915IKjwroJkleEmJeeS/a4KRXLDOsud11PJUw8a+0yZ25feJO9LmwKY5hpJLvqyYaKmJlYhsPh+ctvugt4zuNrXTpe4QfHr17/7e8/uXn+4vMbYlHTKiHe5dwcwFSdY7rXxqSg9nIx8Jogbn2rPZTKNq3FBS/3i7uMQPwx7HHGgOynWgmeTMm0t6jhwGOU0Q5uBOiWkB0ovFaAiFqPcV2q0csNQNXNi2u5ytbr/7QJgNMuQghEKx3bL9n+bMb3t6HTpQVbRmQ0ZUq4px0lVWuJC2vuZLSgESx931voOLkLAFSmsVNWJ2M8sNlEHZOcwc8clV5A9HvheNfWIrtSev2R5qOCDVV43QTSOCTWaBM/oOIbrOMdtPag3yCvB2L1UYisUFpEWJbYyxr4sIS12Axhc9ms2CV4uwmwsS90FwBOU/KgNkQSQS2A0ZASYNnrristtrTU/n6/v76+HoZBJYP9fl/Y/j3SPLvMDfAKz9qE53/1G+cJ1kPAkLPBNHYhxLYjBkwb2cyIumrcRCjUi3POwzCUEyGACoJdF68Oh4uLC8RHXaB5HCNBQPjyyy+fPXv2xhvPbm5uYuwZZBoLk7f9kHq+K7KpAUKt/nY6hxBmAQCY53meRyLS206n074fWJYsuIVwRn7z2euvPX7y5//5z06n05MnT6Yp9X2f5kwiXFE3SznN87qoxrLgdYyPQQwdz/QPKnp4IIsTnMQdrP5fA4W9t1HD7IMmKeW8MnOLCHOxEYuI9jNgZi16VO90Bu51zIJUd40edtojTtX4cRyJ6HA4CGQitKNQN1H1opzLKXx3d7ff7x89evTVV19BKchJd8cbzb47HA7vvffek8eP+r7vugAA4zjf3t5af3lrJmFTgtIGoKSTqeama9ejVqHhebIt/PLyUil0GIbLy8vY70xgqLpcUc/UvYYugQgR53l+9eqVdo94/vz5N998c3d3p5qhNtlVmiouwf1+tx98zqHqcnr+IhZTIVXvZd/343hEjLob6lmpOLCqGGl0HTvSxhun0wkAdruiz/R9cQzYCKVwZa19ZZulmGyUjpvQMzxnGG2EAamGbC21aoepaXHgZBv7kHOe58y1MKxoV6QQ5pRCjIlz7GLsu+PxiIgUw5RqM3ApSB5LO5McO+r7br/fU4A0zSlN03R68eIFcwoBrWMEAFYFiaZpotgbeXbdEPuurwcd1NREQhSilFIXBwWUZA6hVHPlKh8qa5rTKKk4Y7S8UM5znpMVv9XcUavECwABYOZZRNTHCwABSdP5LCyx73uWREjzPO92/UsAkXKE5ZwRCzpJymvHctkIRX4RwU08pp3LBe0LyQBikf/jkmaMqDVJPWmhE6/BnV72b4yxcd3p/HQCHqvqgbqwzuZXz3A92nmGZedcdumwa8kbAUAyMAO7/h1V4FtUU1wLEB5qdjJ5RomIspomGpbnjUHX/kxO48LWDtT666D692QtQNs8YX0qIGIIZIf9Aj1ZWRadjgfYR8Hq69DgEFANsthKAVC4fA8g1tEF1xlW7bYuG3q+C4J94yUh4y91Vxd8ax60y3gEEmrGEdaGHAjEOQMCK/8AYRAaEwKnxAmh24U7Sl+mmzs8hj5e/sbrF589+eiLF4+G0NNwfHXdcQfDAmRbLDt36JZL2m3uVL63MoEudXGeIxaRsTHwA2RmOafwNC/y1g1Yo4fHoi1G6WVGyuaeszd7yNRVs4gwtGGEy+oevPxGb9+4/eaBm/8al+c2HlDgdKQQFs+MP1D9B1tFw77AkW1jAzISaCZg6pBqHbBGOZtDMzg4Y1ZDfWYIsKuZKp4TTP0HUzCae8RZrNAJ9FjPy2aBFZ7BQ8wm08yw4TPNzR7Z7sPw+xC+uc7imNTM8wYrEALgmbYfdiqYTbChym996WYCLViabT17g/+TnFWu+RIrt7VhuTqEv/uEpcZLsyRgskGGoRPJAQMgh4AEeHd3M02nw364OOxSmlTyWNC4Fv2TtUIIAESh6+JuN/jdH2rvH573SNB1nYhM+93d3d2uG7rOqjIo9ofD1f7y8nA63Z1Op9dfezLOmQg0nWee0pSTRgnBmgy3yGPEsoVMg+Swdt76vSNn/ju7a37M+/YCHA40NAsAgCuHANbcJNVbuq6zlirgJHiptRNFJOdVpZCccwydFcYkoizCnLQdIFYZfZqmCAIwGC7pgKdphGkadp3OM8bY97urq6vMPI7j8ZisXijzknZoofhQmZ4V7TAw1gDPlZVZlcOu6w6HHSJqsVBFiRjjbrdjINMDQwgx9qZ0BVd0R8V9DRB9+fLl7e3tOM76wTxsFneqHSm0sOR+6Idh0Ko8ALhUH1FXTAYA0AIneZrzNIeaMV71KF/6YSkZYjtie6pq9n4/6OsWTMBC0eV+KoPn0uaxTJ7XroKzOOY/N9Own4hWhGNY16CQfjZlFWt3E6huFa9GeuMmESEEAZ0Ahqq/HXZ7fVAgq0no9vb27njbSfV6geaLlrzKLKUxYLjtGHCaUt/Pse92MRSlHeNifMmcQQgzLRZwDQfl4/FOoafxw+N05Lk4UTS8GVG6UDIPa9JgZ4md5FSIAs/MQtR1ph1ICIQYMoOIOeFWpeYVWQBAatpC/X5xkFR6SVYjzRC7YTUAjJrGhFhr0goKECBY2wl08ULmQ2gOOan2e6yFrYyzFPvNWt+rIy/za37y37u3r7iqOFnKz6SZmBRZWWMAF3tzCMF0Qs9H1jNcxvY8V0TrIxWCtFnlnIVXOYf2k2mkNqJxH3CHh98ezpnXRfYbYpON1C6uZEVd1eIz9POnc/F1enndBtbnnI1sTMHLiGvcanVF+0zncpDQxaxXtciru2eOYXbuC3sLwGq9XPspiUgE5BAyp9jj/smjvIvP0+0UR6L52cXurZ++/eHHH918OpLGFggAtKFfBXTOa2Qz94piozR6Yd3Pn1zoVwM9P5Qx0O2dfpu8QrhCgLWsvx3Bj2Obwuu4oGaLt/QCACIFzxdnVxPwjAtiALSI3Vy4kUq3SLV9dovMD1weOH60Bnls2LWxqWUysnZ1NgPqn7pBasO2Yw9MJq7Psivs5F0K9iKoB/N27eLOmGae5MJy/Dh+1ffBStzlN+IswHUob89GdzXzr6BwUe5lnqvQJnvVfS+FNUbdN7EHEMTvdf1A2xtKHn/Nfmk4EtfEcq8Qco00/i6XjcmNeXUzQ1iTkp+J/8bQ0sMHncHetsmYeUPddaUBoGFlS7AoyxL8wpyZUTjFPnZdQM7d0BF2Kc9ff/XlkydPhqHfDX3KyvCRNRBRpLQSFSGkgIFIUJhc8gIFd+4AESIFSkLAWXISERR+dHmBiEhCAuKqdoUArz19Mo/T3c31G2+88eLFi9jvAGBKWfg4z3NWgAgyiLm7m8MF7mehDfwbevGY/zBblnOCzX0o3SDGepyFFQdXvC0vfQXR6oIGl/1l0+Dabl7/nOc5UDQfo391zrNIsYlP06RmQc28Uvv7brcb51OWBNgj4jiOiBLj4XQ6zfMkIpxzShNUuYhZQliYkkKS18Uw0alDwzD01aeHRfUlbQ5hEnlYqmoJEQl2KqDWpD4yctA+KLYcbXHx/PnzV69elabzdyeDjFaOOewvLi4u1CWo/tiL/c4F3ZQwVFryj9AgrJVUYh9CJEetSS0mirqI0TZFRECIpVTWiTF2XVBFnYhEzLC7kLwBzbQv68coG+3OnwgNN8AagbI1QIvca4+Ghc8vXS7Ud2c/QTVl2m26NblWcwwBtcMkMyOGjsrni/0h50wBcsZpOt3c3NzcXo/jKNV9rUaGWiQGiGhmSePIACml0zDp1hz6LsbYDX0MHsOJmTGcnEJYNNjT6ViRP+ecM89aKdQmD1BMD9qK8HA4hBA0n9G0QX0HM+faqRKxq7iNfd9TgJyJOXVdMG+8ARZxsVdqqSYAYGFZejJ7R9qKaSTO5NKwNR4RWDpVSREBRPLSPr0tRWg6HjuPk6Gap1tw4rId88YNlw9O7vEM1Bss/WUKgLf8wbkClZ4/OqCDVgVwc9ZdX7x/tiJem/z9tO0VJqB4OPg/V8QgEuIqKszosDlyVhxwLeLYKQ6bU0pENB1pS4cW0ytVQAEAEkiuh6l/ygsiRkKIxVnf3O+RwX84e0Rtv/EQztkUwlXhCiItGoy4xrroyjF79awRfN2LJAScEULXXT1+REM85VPucwjwxc0Xb/7ojd/5+7/9J//zf765OT66uIQUMK9OI5strYMlbA7NzR6lYXO6bxHGgGNYbUu7T6X0D26HshHQSa5n98XTnd8+XMuFHgjNOWEPEraywtl5yoa6/avt7X7O9XVtiJ0fHNY73izhgctjl8feNUhXfVbtZtsd2eiNfkVnIekfpPVtZsvwcPM6f4OTZ5fZaPVbKIFj0bAGaRMNfnYcf9mJ63VX5pZym83dwqo+XsLDTHBp1LOz68W17AIrfeYMzjw44JlyqSDakGb1lMcWPaRijKfTCa0snk7svqpK52ayXUgzzy0XfXjMh+/B9bXlD/VPAliwRa8QAlIpZU5UMgVUML26ujgejznPUQJFihQ//fTjm5tXNzevmBMIUAxDKI0NTtNY0Vg7ASCUmA+cphI6qCNXIBBiDEQYiIkjIgB2oVTpyPMMRJGCAOSccs7UxWdPX3vx8vlXX301jydEDCjHce66IWi2B1eFVpbUie9+NQRul4e8CVENX/KfG6b38Lv8PbhWL7lWE4gxlo7nAKboighzcdRwLadnnERdHFa+xZKANALciBQRTeMax3GeS+JJrrVeACClbhiG3W53e4waby8A0zSZ3KUN66EqGwp2dEHp/sjQyahaS0SmfQ3DoGl/1QkDpuOJiGaX6YSX0qlFNqcQAmEJ3638NuUsGrB6e3t7fX19PB41RrRm/bA6fBDx4uJCY1APh8PQ79QtaRp4xYEi75lfThPVwJ3v1VVJvhyooo9HHhFBCIhglrJYm2QAgKbVAYDU6CgREckAbFtfd5+bw8UjmMF/vYozR6T96Y+n5icb1k8g5wyu0xu7y6f8mNgfqdxJRESxD2XLapCdMKfT6XR7d6MNDDPnoPn8wkgY+y50kUFiiEmy+qVFJMzFtzb1XfEQhs7NWc0EgZcz15JoViGCRBSoYLJOKaUpUtBmKoh4dXVFRAFFan4WIqgHj1N2JMMFhxcEzoih67pYNLWCUT5rA0FSPgGAxkUCigDkbAUgFrezbUohtLogbWsJLBXgoJ9FJIZA3kNoiAtrBcBLIYjIXOIlPX7kWvGy4WJnWZvna3aO2uucZWuRmcSFQkkVyExM9wpkzpkZBRZXdX0EmbPxnWZiNg6sOT6zEEajE7uZqk3Ov0LfAhvdVT9v04cW1rxpXVj+XSvYlW0sioTbMh+MLgZYEGAXNCsu/lU2Omr5t4p52/3C9bmF62u76c0WGLLW8c+/qNkd88Qaa9MxvQXFf0iSgWHmBL2G7eV5mjim7mJ3c/fq7bfe+Mnv/8bLz55//Gef3l6fdh310tv4Bp/tWQ5VCzXQ+dXhRhhtYIsbyUCjZcRJEmffeHaP/Ib6P7cj2JdnzwM/T7ifSP03lde7L8VvAaiI/8Dl37jFKD8TkdbQcHaSf72rAaB94+zHy75AzbVr5ry15BlybkMoy4qcSxDWpO1ncpbW/DiyPpiNqM+FiACsFT/Y7AI6VtPQ11n4NMhs8/cU2nAGIvKHa7VbA6wNPfo0rGmque77/uGf7Aa3HMvgcyH6sFT98ZDxQNBv+r4fxxGcQwZdcPgDeL6dzPZLD7rm8YbqwdGpmYc8vTcIZgK0H8qNuZ0JAIBW/AIAow8i6rputxuGoT/e3lxdXRDgnMarRxdzGl+8nCnAYdiNp5mZE5c0+P1ugBqCBaJNm0X9VznN03hSdcIcVl0vKBlCABGUzMgBiRBzmgNCvxtK3OOcCKHv4m//9k/eeuvNLz//4vbmOuc8dFEAIpGGY4UQ9EAUFJC23YtBssGiBw4pz1o95P2Hs8/S2vN/34vuu/zWY5WYr66uLi8vlSktjRMYoCIG14odasKw0p3qolHEMH3GtEedk3MbzDmLF3JEJPMMMKiMq1JvSglIulBa8Jn6hIhU+0JnV6vJ46fqn6YQqlNOYzW7JfgzaiKWPdv3Je+LiNSTxpKBOnDVR0UkJ2BmjVydpul4HE0bnOeS3Cg1Za7v+93ugIivvfZa3/eqmsYYYyjLaWxhUnvBF/4PS3kYrEGqtISwMuciP2sMnzF2HZLqhYhaDqeut0TACSxmSr00HbTBT0Ot5rwwQsbNQT9NE6xVynpbPovYDVOy4yCludYNRiMQrmmi/pQsVM9LoyOi2MVimODMApmzaFjv3d0dM8dIIiKEGQQD9N2w2+1CjCKQVJBFEOZpTDHDRCiCeTwVkEKAhZXFruvU+81VX8Cihiz8nIiQRF3atpVUD3Qt8Lvre0TUpGtvRM6uWgczzzMfj0cAAOxiR4B9SrMFNot4+lJDA4gISXPCqos7CxcTG7gzVKGtNTgQWERXS6oQKqJBedAggRHW+dOmt9Am9Ki+A+1cN1dvVT/aFmeIaEUdPCLqZ5/O5CnKqNGwfGvnaLDZbrZJ+hdBkTkKNjYMSIq1srI/f0ZKqXota92VXI4lOs0WKj8Cx/dx3fmqpSVZCmv7n0jjGTeX68dCi12HVzHHfhzZnG1e5PUg5drmwXgHr2MM1rNuA1lxfbiKs2KiEzf15Q1jKoI1tk5pWGvXdrMJ4lsdPgcUYA1P4jxjZpxnmUaeuA9ye/3Nk8Ozv/WHvy9j+MV//HDMt6/LAKCZkCgCiNqUhpj1s04bxXWFUU5tOFgNYOfz8dyXqy1WjFZc1YVWyl8K9jQg9WOehfnDl6wvD2S/WbJRd2GNmWXNToJv3tLcvx3BlmBvtC9hjcANPogwVXn911q7h8DZ3WnmiefUm7Oj+T/tNtMQYMPZrKCoye7b22CN0p58ZK0Hbsf3Wiver1PBeru3P90HJVu1OFeDfWlHoDEZm6lyYL8F6GxeUE9cR1a/3vXXQwlwcAAAhIDn1F3YqM0qc9/d3WVXYessq/xrI2rzrJ/A2fk3/GdL456uDT+bxVZbDwKsQqkRl3H0IoK+jz/+yW8+efLkt3/yWz/96Y+F+frli91u9/jx4/Hu+PTR1e7icHNzN03TadR8G1QfoNURgZrvBABaOWOaJpUU1X+loaFQhEtgySwZBGIVyHLKp9OdiDx+/Pjp06dvvfnmp59++rOf/ezLL79kyTF0WWC3252mSW3hOZcgPT1JPJ2e3b7tLtz3E8AGvb+bjneW3Z3bmpYtgMmpiDHGq6srdVAY09BThhZv4VKBSXfBjlGrhKfDZldzy/yB+pTiJiLqp1BLJWeeiUhb8KlCqLl2XYghhBjDEs6HS/PVu9OtZ2vMXNowFXMNhRjV7aP/XewPFVZCrkiMegt1LUhAAZEAMoSgUFqipjPPKbGWijkej7e3x+vr65ubG98KHGulmf1+f3l5QRTVRbl0d8TVRngmzDXkFZz1xA0LiGZbIVx4tcZGmimfKKxMk7TEZWSAwMwCi3imehozCy/lOu3CRqx1lynksDbM+fQ/dFYSCxpf0BUZQLQftYAAMiIggnAWWIWneTzk2lnEMKHYa2Rpqkm0pPlxrTh1fX19ffNqHMfCoIiyJAYkDSIASSklAUgAQCKYM1cdj5gZoukGgYgoaiwxplSy76qTsKSMpjRXdqeqPAgu9ZlijCJMVApzOkPbAnyApamjkkBKSSRrUmK/E6SuBnWXHveq6DZbk3MmlxpW9XPVlq3s5SqBEBGnNEIV0RQRVR0W114rOPN0tLLjsPY72Ubakc81lkMnZ0VTFl4jK2yr6LJgnrEwQ9CFMFydQ+Ok/lw8i9AKGI9kZc4YBVbpWG60M4oKuP5jHgJQu1T5q4xS5SGomq3+mXKyBTbA9CtalnDOeFPmtl6vp0zbjvLN0lNoZTskAXSxK/5Ffv7oziHDQnGip7/BsIXXQcXNZaKhB4KZu7Zb1rzFruwQ1yjBCM8PVXhWH1kSZmTm+TTCNIWY53HMp7y77I93rwYaHr/+9Ie//aOb2/z5+5/lMRsSNkAgF0cBayOrgY7OOY6++2V70ezO2dEa/PGPiFMbvssgsNY0/FN2+cPA3aAvc4+s33UWmWFDts2LxMk3W0Lwazd3unw3BcBP3g/l1+jv5E2fUr3HGJ2nQf9ns0ajx2bhYgWrzu3RlmN86wJtm/z5sb2MHmEDWFgD2ZPDdjINyjVv8URU71kN4tmC/n8D6vtWvEUMW/t2ix9GjPrrKli0Pn7Gb+PXa4Ca51lL1VvlYd2Cpo3ud2QLW/J5gDD991vOvMWos2/Bjf7TfIlLZrvoz4gILvorhKh1R+fT+Jvv/fC1p0+Px9vHVz84nU5doNfeeWu32z19+vRuPGlmSsqLYKQqH9aINWY+nU6qJVobOv18Ok1cfVacfIzipFJUmiYtj/ns2bM33njj7u7ui08+ub6+jjG+9ui1lDkLzPPcDQMz3N4dITMTEhGwZFlS8Q0IW9xugHz2s+cDnorvQ4Cz/PAB+t3yVUM5fSSEoEGbdk7V86ucVlJPSc1nE6sSVN1l4ziWRLvaqxNVq3LiDZaY0hkR9RQwgV73qO97bdvQ9eHZo6ePHz9O0wwA0zRqmGVBnhrtKchSylHOtrkAoJMxxrsFCzqzuFZP1CVbhp4iEoZSIwQAmGGeZ80PPB2n4/F4c3NzPB5Pp+l0OqkOrM5J9UB2XXdxcXE4HGpI4SJsAAAIiojWjalbj8bhgwsigw0DsSRJRDQW7pGHcKk9IyJaMF5ZjXctn8EHYW9wUSIKm+LShhLguGhjavSCFm+SAhrstWV6ZK5hUKseaVKTKmMmZlbXsZXxt7kVMNS6OIqlNzc3L168uL29FREizDljh5xZBIAogxynEaFYfCh0qgSqs1C50OJQwTJbJkbkaZrUd46Ic0pcHlkkgS1p19zOohNpCCsOAyLWGGGFp0hRnaIRZkqTUVMuVX9S38euj2p0MAdjhQkTEQmEqGhjaqEGlJZG2fUCqDVUyyB1p7SLHlYLiTt2hZkhc9TK6bo2XheZsMPbb2cIS98q278Cr0ZVU4SWxbPnoenx0hk/VlI4n7PQmwFMn1W3ZF63WEGX+9cgsSch/6WdT3qVNyKneSXBFPKQgi4NK/e0AQubWBpb2TcGN6IAeEb4Y2bYmIexdrCw19WXloOBnMdMOZSBFxxHUCuFyXx+5vM4ebCcHXa7m7BhN/42cj5JA4hISWywKXkPoWwSbJz1ZZX8aYhaXkfCSTJIzjydxjzNNDDLiAmQA3aBJd2Nd997582Il//Lq9v8vIQSoWPcDf/a8oIGf85uul9Ig37NDR50W4zdPtigSnPb2cebXz3+3HezrctjvkqHvu0EeBfWPXPDjfR59qdm97eTf3iq33oZQjavwHXurudI4BRCf5uOQJswCntXdE0dPf7ErttuN1aLrJ+eGXdxXY6cajVqcGfnA3BroNcM6NON/Py9QugJf8NhVry9oQJZ5zZrOpM+VUJGWWyldSNarmKfH6C+7375+bcf5EyBpQYsdmXXlsnuYWYNvvmOkxEfeOC+vO+l7bNrBIYNb/Q3NOM33ziiWL6EDcztKEEUTeUignE8IuLPf/7zL7/8/Kc//smr65ck0HXdy5cvnz9/enO8C6FDxCnNUG3kWCuFqOMFAIZd13EYdh268o8iAhJFBNi732sHpnFUiVP1Aa0w+dlnX7z11lt/2PcvXt1cXV29ePlqHOfPv/zi+ctrVQhzzkIBEVmQXc+hBuBnlw+umq5BkmtumMlLjQV2uwXN6x64bbvX/jZETOswKwunVNILIWggHFQngwrW2hACKuYU1Tol7awdaNWblGvGrMZIq+ZGFLcKoYhQnUA/XL799ttPnz69vr6WzC9fvtjtdsMwqDLAuWikBjepQZU6q5ubGzuXg7s0wiLGGAKZy1HHUch3fUk+zLXMplt41uqUp9OJM2gVmXmetc4qOBanqrUWC4kx5iw2gYUVA4sgW9PFnJkXOio9WnBVAc4AzjwzlxMEwbPfctYQhu1Tle0Xgx27igB2LnAtOqq7qaq+0k7DWHjjObRri71bptQID5U2V7HoioSaqOlVBqmBteDk+e3gNg09IOZ5vrsrWxYCql8uhA5AqIbmppQQJEbKwhGtOuYSFpfm2pGopq8DIYAGJy8ZrWqDUsXe8AdUwgGE2maGiEIoVYVUCR+6zibu10JEFhWs4CeiWO2JOc85574vxVdNB/ZsnAgjEqA2aFlZP4mIOhUDjC8VbdCK26l2VppWlJi4JeFdtcGZORaloXYjzKrLIpqRQ6dV2llkhrw0cTJPkU6OXXUywFJen6g4kxUTANDCJ9Uj7GUvG1kEiGLXBYPOMHQVOZRsSD3CADMLCzASU8V5Lg0wsiE9IogwIoQQPT1U8l7lbho2Iy0REbbBhijalrrruth1xVEZSBJkNdopq1Uzm9XVxVZDm9iiCwqjYUmS2WL6F7pgQQAEjYEARFT0VHADqNav/tuaLh+jpJTy4ugrVYoygJAwZhGrIwwAIhxqoEJlDSBSusnbJHUTobjCM0LpPsdcSkUhEkjWdAEAEMlSM806jLoRgEEAOu2hxAyFMxa7o/bGFYFAK2bUsC2fGaVQHW6GADNgSj28PL28wzepoynEGSRM0+P9/vl0fXUgCbfw5vjs9/j2ExLOEhBY5jHthgEhSs4UOwBgAKIgyAySJTFARPSzgCpFKZZvj3MRIQomxNdpa+6BYGHf2ULVTfGYpknT6JlZzaVWsBud6qKRrsqsKkdAZTcNuBSf8zq3TTaCr7fOiDIeZhAJRIiYS1wxMtf+XZgXTi5AdMYz1pwx9kavcXn4IPpkfWUntXFcqgYaEqimLxDR/GgPeCNzJboYsB4PJUdfiUohVttdAlKAxS57xsPpWQEzC5ecZwANkqmEDAIgVDbEsGJlWqjMSkAkFdGWuKqRRBS77oGiXIZ+5II4tkWYDPi4tr7pURQC5Zy4NndVU1FKkx4k81z6WcWOYkfMzFm3TOuPi0jSWagduorCi3inUwLnFCIKmVnbHglJzoyBInYAwAgAwDlrYmqIAWtbS4ZM4OHAZb/WNjgANMtkxaXFsyGMGDQ4XNTmCrV4AHMCXKLTqZxCGYEtaskwWgRSmhCl64KIlvGYECVGYq6oVxC3PELo2axxbRRam1GKgojkJCS/ihomtvBAr7r7qyKwIkmrdSgA9ZRe+UVlLmneAIgBETHU+iIMRIHzHImyEE/5EHd9H0Xk9nSbpvEP//C/+dF775IA57mjcDqd0jRLLeCeUjqdThiyiJxOp3nOItN8uj3e5hjjy7u7riutCKmWPAGAQIN6D0o76SoAWaJUqI0HstDdfAM4v/P9N0IIb7/zekrpb/3+T/p+9z/+j/+X+RSmQ/dFHncxHqeJYo8CgFSOm7XlEVwbGH9Y67/m1cy1knmMxXtjdGdo2ZjObajtfhUkg+WYg6pbEtbih6auQ+F+sdtnBqR4cbjadf10uusiXe6f3N0dBSJI38XY7XZENE3T8Y67EINATwEAcpJjOkbBXegxYk7CzDBRyply6Hb9brc7yYkQmFkQMFDiPKUZAyWeYowpjcdj2VxlFIfDAREvL3avP3v26NGlpOkwdNM0PX3yqKMAyDlLHwJRp0/hmIGCiGQKTk2V4fGj0+kknIdAHQLmJJBnSdjviCjneZp8kwkIgVQmZG04UD0EQAcRmWcYx5RSniYZT3J3mzQ4eRolZxQBwi7EEGMEtPItXQhRs0X6vlNWNo5jTV9URjTlaaxMr+y1th44Ho993wsVdVpNXn2/yynv9/vj7bVkjojzdNoPu8PhkHOeWI7jOKUEXdcPe4pBBChyloQhkGZ35QRCgjLOoxNfkRlzSinleZpg6elamnlkEXDGfUTMwiKMgLuuUx1E7MgjBARmCSAqtxNhNSsICi2sTApnQ0KuoYlFcmOJ1MV9lyUp34yuMWOMEViAkZNwygjQBRKRgCDUq4ieUmaekSWlaZ5n4XRz8+rFi+cpjxcXQ0ppmo/9EI6n292wT8LzPF9cHjqgeRLsBmJmQY2xDCgAeZ6vRaQf9ibEAghjRgFkDAFDIERhTgQ8dDGEQCDzeIwx9hGYWWBGQUEE4JwnROz7oesEaBZkFsgMx1m6rgsUMmQGwIBIJABTzsMwdEMEgMgdwEHbrmAcEmcUJKS70xwC7g9Xx9N8cXmA8SSQKTAigsyakMrZWhArq0lSWVgIQZVkZl50apAQiBG1fSIFIkBmnsZJMTlW6VS0TOtWQMw5Z2fZbTgXs8Y5Lzbd5l/P7LDGPcIyQbRxVIv1ORhq2vE322ln7Ljhzvan8dDtZfIQrFm/3dCIqgtAkP2Qsrm4ZmYbsTV9wMQsx+svq2y6BEx74NjC69GyLJNCG9TkTw7/pc6N1vFv9q+9sdkvs3xvj0MPpQYrtq/wIXMtVPWkxBa1yrMCgu1GGwDtFWv4LBOgiJECgZqLZJ5TSMQI6ThG7vjAu90eGF89v4Us737/h78cvhyPJ1Gtt8hYzm1VA2NwMVatDmw/zwaYzWXQFucSgTXtgAtZ9CRgdlN/1W/OFPz8LjPZfmk/mYUbEbOL5fDXghjrX/56brwtbT5Ay/VD+83Zy5DZFkjOreem/ZA1FABUPPXT0Ctz3j6+RVf/puan5kX3EVfz2SZpNxte2Qjbwbd463msnpQhBM2J98MqcTAzbtq0wMaRaODx0DCUtmkbLSyMRYs03D/49jpLdw1MYH2C2Eg2sbMjP3yN45hz7rpOO5XhEgx8b9XKsy+yyftfH0Dph1HXP14HOTNs87oGP4uyDSDaHNk4EpzJrpymScv9S077/f7q6qoPMaepo5DzLM7IrRYBiqWFV84l4pFLufkQQsi51K68u7srQYPQWTBhE1hodvRxHI/Ho36Ou/3XX99Mk0aTzrt+98abu2Ho1AbPzABLuUvYSAWeLjzTsFWTa8Vk+OPPd0Oz86fb/btmmyRrUeShm9fVg41U1dFH1ftkispuh2mezXCjooSCXUQ468EaHR8AK3GBiJpEZ6Y0Zp6maRxH9Y1IVZW1GufFxUXf98aRRKSjoBZJ+0YpCKr3svqRBBHVHhpj3O/3anSQGt1qPkl2bQy0tR0AiBAApDSrOjSNR53n8Xgcx1E/aCd0dmUIoBLFbjdYvKitN9ROElAqgswGbapVqVUhrGsp3qdANTGy9mnc7wadWN93XUC1N51OdwAwpxkRDhc7Cl0CkMwYKKcFzQwnDccMmGae8MhpZ4Q4btyg0zRN/uj3eO7vpKV82upkMTyndXSlrTpLhioNMnPOyQxbOWtDlKUxac45pYL8St2S8jSd5nmexuPpdJrneZomdApInSSiC58uJFkBBoQoECgAgMnnWCUrJROPBiEEVVyZWQM4HV0qreEwDHpn9RMWO74aNSqJFSeT3tB1XQgEACGgJi4i4jgdmRkRILNAFmHNXC1AXDOS9WF3Jubf9h2q8TTlGRGrm4cQkVPWdRkueVk0GhDNiaEan3nM0HMoXdtily+ifzGBbI40EbFWTjbIA6cdrDmybRucY6YeKZvvwdHP2cH9NyLSnJr+JJD1nXbZl16hhTVhrODg7vGbZ4i4vAIEaq62fkG0DHLfWe59COhTntYvbWYimy3bHoH+ZsdeWv7SzKqBgxvrvDIffF3Z1am8Wqbfo8YqoeA6pVk6yZAzA2dIiSkREHXdkKf86utbuAjD4wMmnE58OVzth+u7m1tvd9C8bACwSG0A1HIYFgjgwegX4sj1XkHWY28DH3QxtLAmAa5+eK+uyybEy16CG53KI/P2ak4FmwMtnXnO3A+w+EC293zHyyObx9WHb3iQi7SDn8Wf5s5la+9hLGfnc2YEADiXMi332Ib8Xt/3Fhvt7Bs9NnqWchb3mgnYg57pGSurXujiykZEnyi+5RiykYNlfd4gkpX1qjxwVdjg7PLRyUAGNI+o25mQ61rEXi3hjLiKD99C47tc6q7f7/cvXrxIKamN/4H7GwPQt14PTMzzjfvQZjm/pP3eU8TDk2kODjlHdDnLMIQ+xvFY9LoYCSRwTiEEBgaQEJS3c9cF9YDFGEMQlbp0HBWXiSiEiIjjODLzbre7fnUrTsywiZmyYTGBqk4wdfM8A8vXX3/953/+s1TjVFUY5RrTi4g16uXMeQQuZLoBUfX2sKkT9/H57cjf5druy9np2UuJgtZTcSUoUTETACyhRkWCYYg5pYW6saTBi0DOWVWbEJaGE+L61xOViheqAao8qvq5KlpYe9Y/evRot9tdXh7UcaoSMzP3IVJY+ihwDT2TosfOOS92TxVYYyTdO1U4ReR4nGMs5gPT1vS9UlOHsvMQTqnkd93d3RWv4DSp+1cJ1gcZ6RJwfQFYWX/xLEU/B7Ao+qIQ5pyJks6QgzWIL2xKofz48aMu0vHm+nDR//AH37+6ugKATz/9/LPPv5zSNAsnBm2y6GdiMjnVDqieHxYFrPoGsy8CtM64gZrJhYgagmt5UsZbiAhlZW7AjVvF3mskJlX4pJoFN89LQLVNEgAiLdF/FsGRUmIu1nC1/uRpnib15R7FupVyRloytpiZMAAhCGcuAReicrRNu4ophtuIC0sRkcpPwFE9iYhBhhkBQ62JmNWWUQwrGqQQexEZx5OIxBh3wyFG8raAEII2X1GC1c3sh5hSApA8zeN0VHD1fT+nybg3IgaK2kKwLqRtjGTMoR6IXMKscrWbFAWubF/sOirWP3eU+LYTBi/FOI9tnmBg0aEXdKkWxGVadv5RWCkShlLMbEmchlgWCw7robw8ZPPhc9WTbDm8tv7aHPxC/NWwgDKaM417bDaW2owgVeaz08IblprZ+nNlNVVgYBmGwXpzYVUJcJP4u0UFcGGHZ+88e9g0O+tZg31v3FChdHZwvzppZUETAWtF4AoZEQk1BMjeaK45P6z/k13hbBt8koTMMzLXlO805YlzHuc4xNM04fFmD1cd93meHl0+3e+f55x3fc9EoQeoXmuqobSIqFosAIhk9ZD4aRg+2wybeW6xrmGszSYaWfkd9IriGiCLXOuUkLahue2XYfhmnFa8NrrzdLFdlDcDicgDHpKz13YyfvwtGJ1HdAWN+8axBxsNbXsDVAMKbA6/li04uvCjodNb2IV6+n0XN9tm/n6cZl3NG2G9ofZ9cIlMfhVnN12/NHqEmhdns7JzOues5bXQKRjNfJoIBT9nw2fduy2ONVj9rVvpuVZDa804tE7y9As3/nYWON965ZxPp9PTp093u52Gy6podR9TXNjaGrG3K4INW/CDnOMA54Fw347DPUzA3UNQxA1ELKkf9cXt4zkXBXueixMvgfbmAiSErNpIcQ4ABM1tweIgmoiIMOacMUYgYc55TiFEYJHM02kMBACLLX+Bf5p0Nl1US3yv30+MOefD4dD14Wc/+xmicE7TdJJFkCiCkACLgMlDDdyaDWpgDvVMbNjsA9f2FLjvNj+BZsdXnMrNULUjrBlT8zxr2wY0K6e6YZ0MDUJQOjajCZdYivShET71SwqJuQ1zzhp4zzVXTWORiOj29rbruthRX+rvO/ZLEkJUp4q5eTUtiBmJwEKCEWW3KxtqOr+IIIHm+4WQfQgx4uKd0wzJXFMiE0f98nQ6aQKkiMQYp2nKabGx1rWTN6zrZYGX4mQ297sekcupxyxaAFNqrAO7K0+p72LX9YRCgd99993//r//ozfeeMbMf/Znf/7v/v1/+uzzL/9/pP1JrzVJkhiKmZm7R5zhDt/35VRZWd1V3UVS3Wo133sQSEKA8EBAK0k/QILW2ry9BG210595ECBogh4XhFbCAyERbA7dzerOrqqsysrxm+49Q0S4m2lh7nYsIs69WU0GEl+eGyeOh7u5uc0DY9jFHiEUwPMAmnxGiDEEaZI2IVrdfOU34ubm1UVEFEeEF6TGX54wIlY/HMwFXWqdNhYY6M7XBZ65WR/slBl4QwhKFvzExnEkiqoTagJkHsZpGixrbLPZHE8xj1ZoUBARUIgAW4IbAGmpdlsOgBRmzTExuZQIbGfhIlf7QP2qMEuNxiQK6gkEERqHQfVA/bdLG2hWQrVEpNhjMzpwc7C1tpY6k8zMm23FcxJgqXHXIYRxqsild9D4JoBqg21zA9Qg8zpnRyuKyMXggg0eJuGb3myOAlQP4QItDEAeP6AVyR3HEcKy2RG7ipeL4yQAGooAc4YEq0vmLHMxjr9vD6/Jpaendi1+YotfH4bFugA550uepI3frOYX76XRWTuTNpqRkvUCYXUy6w4twdKAtjLwr5mHXy+3hEtPmmFuPl9sAVIV++w/rFIgQitWBLDcPk997EW2ZKOzfg5+T0UE5rsjzqiz2L6rnNIkv5CoEBOFSBEjBAgyyHSaMk6xxCL8fnjAh/jRq4/+y3/wX/4v/+f/i//j/+v/9GvFYe0XIgIotr8AoMS9HSc0QWkxscWGemjAXBsxmCx+eHWo2uS3nbI1wj/126uXLgTmIp1tlj3jn6cnYiwvb5y7fP++orU8K9CDw6gFDl+1kD01zgJ5FktYjLyAwPqmvzxw/EErrf2Md5UzM4X41Jh+ev7DGvietqxvLujSU0s2kmX3L3TvGjGpA8Ks2gG0HW+JDUuS4t54acBj9xfuwcXcFrsjcyvJM6wEXc6kX5oNvjiM/wkXMx8Oh5ubm81mo6X8cs7aFvzvdfmzA9eQ0Cb8zEmBv/9arrqRnzrmnvT5NyJiLcghJIX7rksUukQIgbRET9QkRQ5RYooxxnGs5QkUYoiYuoATqkpTSlFfUwjEXDQ2zwwTJvSLSC1qMGdqRAScD4eHcTw/Pr6f8rDb7UII4zimmkJcEDQZKxcBkFlvJ48bTwHTRG0fJ2Yxw+vn1xi7YA1XL9txvwVXn2Rm5VCmaDGKtMzGLm1ijC0jVpg5xMi1oASISBEuwiIylSyMyMggpEVldL/nYJcWt2nl+BdKCJIM48n8inhxxKH5SdoImZn7Pjmp6dICpNZukJJzBhSBmvPcdV0pmhOHIpgzMw+IqApkKUW1Pm5O4/M46IdxHMfW+4SIhGdkQZHHnI3otH19RhUDm6ptU65tMy/9z7Tw45wpBMRJ97Trumkc3o7nLsLNdvPTn3322U8+LqU8fP82RLm73T08bKYMXerHLOPxXArrTggDlyr6M4gm90EtIyHCXPKlNYhXtKCtwXBJRKQV21eP6IJf+PNuN6VKIBfXiMdSe7U/KcysXuJciqk6BkZAMLJpZxyAS5lUlc858zSaU0RTRlNKZRqbFiQpRAEIIYQuqKIxU4qa+UdjEYyKWrXbqvEi1RqheKmvGyOEEMZRRBRtJHWhnfrQt4JYetZ0veM4xlRDrKGZJ5illKpCK4KpdqlIp5UUiSi2jDMN9i6ltCxBLkWQhVlYSqDOaiV4Duij6vzB7FKvkCe4cD0liRfLOmoIHIpAXHBx3WOac25DI1WgxUm39tVaatT7pagtCszI54QMLJfErSrf+PfiXL6x7TQm6n/rKabhtHcrLz7YYzZbt0w7NsVyxrxGB60dgj3s57M4ezYNMVPZE1cbn1DA23guBGgl3co1GdesIMxM8/L3spK9/GwBQHHXsGoxtwUoFm4BnLNS21PzlDZCBEJITTPhlqMCTfH1y+G1B9W92lFhDc4OeSiCoNWAYgwb2jCUKY+btCkPkxSADO/fvad3eNj/5D7cv3x5jwLAknM24hJTsqICdRWg6cQoPAP4Agk9Pvhn1mCx5+1PvyP27WL5/l1+NP8nkRakmAnNFVtWP/FoCXb8fZW8aw7h2Zz/nhrg+lrsrL1ica4rBJ4RpK59Q+6w+2H9Ob16LWz/67nVickVAw20HHq6tONrVfvmT/4g5izQw66FYciWM5vbHOsWKAHXgNxueoMIEBHgrJqOvq6BaElJ/KyIFPdmBTnbkq+kSyFW48tiUf69flF+IYuFPwWHZ/78/S9Vac7nsxYkPBwOSoWs/tbiwjke/uC1Juz286sncf3w1e2wEa6in9K3KmwBIFY9vAJzNQ4RxdBhq4jYdZ1uckAoJecyEVHOw+PjIyLu9/ucAwoJMyBxLuN5gE62/QaCjOM5atIakUpMIBKIx3EMVEOtYG5eme81IAgId13cbvuU0uEQiaDrYykTS1EJTUS0wpkAKOny++IvdWGtqcS65oK3aCwweQFhP+en0ACfxsmrxAERpfVMM7G+Nn5IiTBqIK6Rl67bIAaiS68UjYEtpbDYW2pRChHJrT9haRW/YC78YNNFVe9KXWBm61BPteDnBFBjyjQGz7ay75dF+ypUpWhXCu+1m6bJ9zFm1m4BNX5K3zUMVYUohaepjAM3b2H1GcI8TPSCyTH6uZm2b/suTabyOICC2CJvY4wh1CK6XC7eMKVoVPvsqQpeCkKBcjg9fv7Lv/3u268f3r7Lmad8AuFpyFKIhbAG8YZFHuMaMbiFX9r1PIOTlhfqQbHgIOhKCegDIYSSl4qf/tBCbe14VvkzzgpSxFiLCeWcSapGXUpp7RNQN0tdbaUUqQ0bIFDfdR2AbLdbzhNLZi4hREFWP1iXuhMXkUy1/A7ThTbWjNmq9LddVghVNZV1j0BzULEWB4oA5khnwphS0izTFK3+pRBGrDm6G5U8CSPV/nx1scMwpHQ4Ho/7m+1ut9tsut1u1/eplFLLtRROqaQSUkrMgEAxIgC12BxGRJx3elfavOBxtju6C8MwAEBEwhjtOKNgKYVbjqPaxfTnl8oxHhv8UZl9BRhjLJVNzPLWWDUnADvqaGKpLHopympVl8vrXZ7IrkUHfz01Z4+ysopg9L9aKIptRVXxXvw8uC4F3KKEDenXb1yYY21dai9Zw5yaXa3dbwBHRJfU60fz1QXXALQVgdNIF6AziNnIfpu8wWnOAq0/2wzYAKjSu1JCRA13LQACrQOMkRtjTn5iFb/na/FzQxfgav8WmVBAdTwZmc+MBWii6XE4nU4BY0/p/Pj4xeHtF3/x69PvDrvNVrePanq9+MkASdUzkLWFiwCbVqIHEloxLgAQbzVAFAC1OfmdWpB1Qx5sxma/L2uw+JvgLr/X6Lx2Tz0Gq1OzgLPeN8TGa5IogNoq/9M9LR7HbMLSrG6L94rTPxeTf575XX2FbbQfZEFnniI4fvAZwrTL52CDO4Na8nc94TVsPf4/9UajwPaV/+16jYtF+WO4GMGPjIjW7hVhxvJtgeSyDObkhbC5102WIiKTKDyoWYNzXI764qSsQeHx0w84g6G3Z+MVN/t/AvYqfg7DoOZtEdGCwE89/5Qw9xSCXX1Y5EqbisU4fjlrouFxdXasVm9c0OEQArAYJ8Jmhuu6jlCLMYoqycIMXLqeJpaUwpTl+9ffAQASxBg3aVdKCaHkPB4OD8z73W4ntepvISLNlXjz5s35fO66brvtQeNH+cK/Siv0rYIyAOjcEfH9+6OI9CkCM+dR8jSIoMyqLoNFScAVG5mH1XovFti1oBgLyC9g6Ad5at/93avIv3hdDEEEUkoavaaLUmdFKQUhhBBSq/xMdClRplcI0QLn8lQlmabFsUg9sOIkmRo+RzWazoQ675uybEMVvrVTiLSEsZRSjBSrqnChY3qxFGaWgg5dhQhCQBECQa+5Waiq9qI0ZQOEmNW9VDUlDzRuEW1oPS0ihUhae8NM2B7aa2TQZzQGz4rQxJgaA6bmCy0AxaoijZNQiJuYIvHxePzbv/3b0/Hh8Ph+s9lAAcklxpgiIFAIXdqGaRwbxKqOUaHUjP4LPdBKkXuWtEBL/+eCA9oaRSQ0acKTtTltv4xmpnlT7xW2hTNR9Ur4Q2HDNJQxv9YlRhcq+8AYqeScUhLhzWbDeRqnMzOFEBhKKUXb953HmkAkWmAmIEDgS4RlECf5NwRokjmYC1R8kXNtk6jout1ut7s+JS2MX00qgZJRoRDCOJ0bSawFWUqrsYwIp9PpdO632+1ut5mmab/fpxRijKkLPGUR6vv+5uZus9kcj4+BqDYFkKzhx+RSEkTEVqZzlhbCLXIxQEw8AgBjVIe8HoQUkkKfAYgRXNHK6I+iuLet915pDSIWvkRN2DOllIs06o6QKoR2x0gGESncvRPZ4qwWLGp9LP21IL7gFBij17YxC8N/m3AN8bpM+wl+DHOC4l+hfxq1tbNqbzFm7M/qglRBY7kyE/guuZqqL7TDczmothd+CWq6QwD7j5mRSJwkd1lFE7BsfDG3xtwbqfebyHsFVmtuZ8Tr6ldP/VYcHvpX28TsMQNplwJFKpKnicc8nt4cIMgwnUsej+8PecxlgHKGF7ebgPHhq7fbzQsuU0BSnqrFDJiHQMkHrSrlEADAS1aYBtWyVFOIiJDVpTXVhWZrpBZGyPOETAO7JVfgE5f9xKOfHwqevp751sxd0LxPeiT7rvNIdXXX7AjIPGDj97k8UrlBliL+NSRZJttcHb84G9CCvgHUBPRLHe1lqM9lAutzUb9q9xbQsB3x9211V7/ya1/cv7pMP8MFqfTT8PO/NsgFVu7+ZS+kNsuq5zfOIw70I7aIEq45JFdikGw0RCSKzHm1OjK+48EF82sB2MU2rR9eWBhlTrHRxSL+vS5lAefzWXMISymbzWaapmdyaNebBU+04bq6cBtkfRY8hBeDPLG/s5se1ISqcqqmhHqnAlxp3vK9ZFGFVpmcCLRzUoiEAoeH94L06tWrEML3335bWnXWb775ZrPZTMMJhDabDQD0fb/dbk+n01e//c3pNNzf38ePXhAwQULESGoBDMzh4eEBCQKGFDwZ5FevXpxOp5zHcRyIcLffjNkZcGEm2hJSaX2SwZ2+Z+ikVSlcnK/1r+ymPfPUnl7dlGe2bPGknbI2nxrzxsyEQ0oJmlsEXeSXCcTGknLM2t4aGidiZg2qtSU0/0rQ0MHiapJjk2dERL0r6p1AFPXLaN9IEUGUGFMVWKfhQg9RRARdYQ9mDTaslTZSSiB9m4BoeZuqmEEAIWHhAiWL6mDjOOZCOdc4L8SAeCE72NYf4iV4zRa4gG1u1bZtvVUNKKwKW9d1KXVml1eVQKtigmZa6QGJWxIBEUIqhd++fb/pOwLpEhwOp9PjGQp0aVMylenSy8q7UsRZwYoroqM7q3+a+Z6aZ3wh9xoErMTXgjoxs+JGPVeuI+5VtL+kCzreh4jjWFRhtvnrPLuU1Jnm7zOzKTN1wkV7YNK5TNIsUyEEzNWpW4BFKJJJo1AznNxaJikkVR4OoZrcsYXLqgpaStGEPWVPimA555T6rut2u+3Nzc1+v+169bNNUlLhCSFYH/la1Io5ho6IpmnSnWnaICo+wIlLqaVTz+fzZtPt93vEHYrEmLbbzatXrz744INvv/0aIRAhc8klQ1HCpX0XLw5/+xBau6zSeuEoalNofY9bVG9Kqdv043kQEQAqjaSrwzaqZ9+ju3598Yc6rU8/BLyUJFUXZN2/xl+LK4VCRLIsJG7EUQwVPMUx5DYved3XaTJNjKsr+YKy4tiqV+0WRDAE69QBNr6qN7ZANbABQEyk/UOh6VeOVNURrPyUUmGlUP7w6NvXjATnUublTDbe674CxbnzOFLsbS0m5PkpUWviJGakaZ1JfYnnBRXAVsNKu6MYeI168lyHDC1sOqVa/GaxX9h6bXuUaBWrxe77jbAXLVBlQWj8CNhiDGwvUorn8xlI7ra33719s6fd/mb3b/7iX6cQ3n6T729hG1OGiU7SRfrd33zx9vQ3N9vd6XDo+15LmdVOeiSIIFKz4S11kAJ5dF0gmBk7FD7TNAGD8Q9FjDKPthcnLpR5PBI2/mTUxBiweVqciQhN7tE4BHYOGazmmEJucIOwEnRDKml6aS0of7E8VXHBzAQiGp2P/hjCtcs20b9Xr6u/spue8iBimbL9zlESCCHINSnNv93fmabJDHu2diWGtikLg5c/1NIYWAwXeci/xdNAdDyPQrSzLy1dQfnHes7Yovc9RnkYGtZ5euK/9QY4cHEQDvhXUgPU7G1YyszYikMKV4u4RwldiyIPt7LszYI+C5hHRBAqrqcFAJhFFgBEihkcPT4QEc678eLcALeA3mIE/5UH73y/rlkb17cAACClpIFAx+Px9vZWRB4eHvq+tzp1hopr0+FiAraDnqx5BmfHBACa9Wl2fHgepWIvUvrgISCNlxk+zyCMiFpwRERAW02CVqvIOQekUnIMNE1TTEE53TiON9tdSklpFQXQGi9S8m6/Hc+n3W7z6tWrd+/epUBfffm78TQg4jQV1RZKKV9//bUKTC/uXx0Oh88+++z+/r6UMgyn16+nXM5ahP1wOOz3+/1+fz6fiWgYht1upzO/ubm5ublRMI7Hx5xzCCkQ6HGWzMMw6AkKZCFRgYswZAy0gIPBH5xBwZ5Z7JE9v9jcNfmSebaL35TFLthv/WPsMk1s5DYNSSnd3t7udruccyk5hKDGzcPhICI3SJvNRkTyxCEkZh7HcZrqY0Rk/qvzcBa+HFhm1ubSqoxpV3ci2u/3h8MjtcKw+nBtN8qTlpM5Ho9dFwFAtyzG2MWY86heLBGpdURDKM28AqDtwpFCTEljCzEgdTFdeC5tfQegrusC1W9V+yqlNImZYuxOw5lnzjQRAfVF65VS6rpkJXmsU1dxncH9puuf1EJMKVTBI8aoflEAKFkaVIl56rru/fv3t7f3RBEwaLWUHDJhGofhl7/6DQqDyKbfbTY3CB0zloKlSCkioTGORgd0+6j54qBp7xYm6lSCJhM6FVeHSunS7EGPBjXO7kx7Fzy8UCdg9YX6N3oW44kPIlaWKrVa7DQV3bVpGLte9THOOacUtKXku3cP1DyiACC5AHCMcbftx3Ho+u2UT4fHtxbMlWIUmaZpoBC7rhuG6Xw+d5ub2rmB65lSb6eemlImRQAtOZtzpYSBUjtc3Pe77XabUtrvb2OMu91mv98TwZS1LhGjQAjVbasoDYApddhq7bIrhqRg0bYTpVTSF8K5lHI+dznn0+l0u9vDBnLOd3d3d3cvUuoDAYCMYyaKQHkYhpubG+ZLnd4FC9M72g+j7XXR2qTS5EOd2/l8BqjhbBesRhKAqJqP3YVageqKvRkRVZsssGTV3vygEDGaJVJdJjZ1anWKrOic0TgbwSQbT2f9JNEJPQvKi01Q4PmCPTGFOT+2Mw8rtr341v8KHH/1D4NjJPYT7/mxB7yWdVkIiVwzJ+OzLdRkXp0Sq6XWZ2xeQm7sV3457edVDzHFe/2kB2CrARCMGHk4r2f71EXzBy+zmhft8Ku2iSmW65xPh8Nut4t9fP36u32Mn37wyc9//vPP/8Mv3nz38NkHu+F4gqFsMN2m3XAcwgQoQA3RgCWXjCgsohy0LkEuzNgD2at2hrFmBdCb8kT822LvYI7eRmfJ9ST0z+BKklh89ofCw+qpHQkuehmbbmmHCOYIucCE/8zrmdEWp0BW+uTvM9qaYsA86tvAwnN/OM/DyP3PYU6mFpc+oLhRaoZ6TfIZxlk/Q7uMhF7dXJuJxyI/PZjTav9hrSWuAeV32VTHOakEe7u/79Hp6n2TEurBgQCuOgkiisx2Yd5BfabG+HpOi/MI13Ae5meqfvh9CZJd18tIiqBG7DBDKUIUQ0giqFUHr4Ll6iQXCp5BeIHGvz8hXZ2LxfyvMNM2OAJgLZDojttiQKyR8C1kVGvsha7ruhDaEqDWFFCafH9/v93u7+9fiuBwPKvJQCvxABC3mv6AnMt4PB5DCCb0v337ehi2IYTj8ailYrTld875eHyEyuOk71NKiQhQFIFVl6mHGlzTwtp4DCkiChJzvgrGBaovaOB6R56i7biSba4O5c7OLLvkB/e9lBJCUM+q1jeaxotMP40FAELqVGPhUkLoALAUPh6PImIKoaGcQBmnEaQqSyKTHmE1R6oWdDwezXRo/KKqHLns93sNAJaWK6SVYECq8Utc6c4Ua/J/O8sX2YOIUuo1DvQCIuzNER1j5AIAoKYZZtD/ipUYzbmltFwCxav+Q9XF5NzaZDqMzcfzCNs+aUpUjHGTuqYQ1qhdZhZWx111mSCiKmUxRoEowAIFJAoCoAQkROBcBFIugAxcqiaDiASgogYb8miWG7OoymgfVNl1qHU5vM1gd3FpNDX+YnWal05MKUFhs1eaXdizIYNnjLPilGvzVnsnxVihGlw9EY//uh0XpyIVACYiEG2nwc0RV5WicTqXIiEKUlIgI1FAyUWDNRARATUwQGqN4+ZKleY4UX916kKKtQvlbrfb7W66rtvtbhQ/x3HUuqAhYtf1ZaqGg9Z4MCrabDZ9zvl4PJ5OZ3UwLLRlRQ8l9lqo4ng8j+MouXRDZGaE8PLly5xzAY4xiGApJYUYo6qXSznQQ8/TDW6FeS8ogQyggjoQEQQCQe1HjxhSoqAR5N4L0ZyntXHhYrdAvTpw2WlqfnbmmmHg2cycqC51tsXlz54PYLNvaeX19gKcHdcFD/OTQCfQkzP/mxjiRfA2mVl9hfVswZF+v0MLy6IdrRlmyMVpblOlJrTgvCtdvbOSuuwBW8Ia1F4JNIBc5UlEpLmKLfGvOB8CKqDauoAIp2ls2bfgm3Ly3AP2/EUC6HoteiCbk2OxrV6A9novIqaUDg8Pt7vbw+Ehn/J/87/9b373d1/8X/8v/6K7T8Nw3PT9+Xge8ZRid7PdvcsjEUXSDHYJ2ndbFiX4LkuWMmuAY9BeOPdMISyNTq73Zb0d5IoqcWs4uzgvHp9FBGDGAxbQC672ox7Xtbt+MbJfhYiSC1evzIGlPvPkrl4ZHxxz9X+uF7h+nV6WKb5YuEhteQpzQxJenp/NwY4hzHfNI5KHpJ+G30Qu2T57krIwly4ms94vj88LoOlvvevSr9GTnYWK6N/oPQ8eFDiX/tvDl6Iai83S4CuzTOGKIenPbZkzIiZkKk8DAlnNNL3PwlYIdwEHvAx5AaA30KyXsx4EnrgQVR1aXU/8wt6rcmdoRTL9uxabBfOtr8fc3V+gij+GlxGem//yM861kcXzC/i0v2vEldEWQFggj2G7Cm1qJt90fR9TpIAigNIl4lwwIjPf3t4TUUrdixcv+w8vvgXVDEspKvTEGLW1/Xa7/eRHH6lXYRgPakw5nU7n81mbU4cQrO85IqogrpOMtfsYlTKxZIJL0TKbvwAgRRIUIIaLQrg4HZ7SPnVdRbD1Kft7/GTO0xe7s/4hCIQQu66LtYtjkBbmEELIWHPqVIKexoKhQ0RBzAp8EQBQwdR0PBFBZCTEiDCBKYSquvtmD/5SSWbKp7u7mxhJFSG0YirMoXlOvBuKGey3UiUQbaExA85lL7BTGUNEQgjTWEP1pqkGT1r9GC1kGlJsACNECKEqMOp30gwxCmgKHrqoHO/e8YdRYauOst12p9qgwh8AxiEzq69SFF2HYbIAkBqjKxGRNbYGKUYkCSWEBBJYACgQBmCQLCAkjEWEZy59LKXkifPEzJwz54lz1lI6rv2ghY04za3CmSplRmNbjarrvymlzKOJMSbRocv8EleZRh0Dnt9VZtRSZuSSFljzjZscJdJs30SkXmVV1YkIqNlnuUq/3jmm7SV02JIn0FIuBaZpwpDcUUEBFu3DMF3SoMwapWaRzWaz3ewVw/u+7/sUY9ASnKWUYRjP5+MwDEiSUkSB7XarFUFjjH2/abIWl1LO5/PpdNJ4Li5QCrOMOecYowgjVX81AJxOJw1WKuOk0Xx9393f3+/3+8f3DxIgpTRNQwgBcVPKtFAIDSEXhtHKTJkDKRlvYSasege0bQ2MmCeGJk5FdFqKnVW4Fn/SuEX1ediE7Nib7Oh/u6BfduxLKUbfvIhgl63TM0hHsJbjL2B0WYsT3HnlM7Tf2jjsTOMitR3CmhB7gfLq5ceXuZq6/hdWPMD0AWa+mHXd2q/+yk/JY4Z/zCDpweVB6rfDA389/mImiwX653HOzPxFF/mjwRYQanwCiGMHLU4cjWwtKHUpJXbdVMrhNHy8vz08PP7mV78pY/mzP/3H/93/41+cHk9/8Nkf/vmf/ennn3/+m19/sdtthuk8TQMwk5ZzwmqgYl//EBlqIR8Nn5lJnAZGbLGgpg83mnjJufLAtJ+ACzTVdXk+NEPFuWjYln9l6/0GiXMdE6E6JJ7aDs8AdEBe2R1//51dP2yf179anOi1FopN/0TUWvbgBWSRK/U2xOcHrsw3Bl6vvInTi/xJMQ+/nz8icrmMzC4gE5pCrnwCAJrFeqZK2ZgLemIfqG2iP3eL+eP8mTV58WgDq41DZ1JpZvIlLdJaZ/oGw6gZnOdBIjarOkJLlMYm6GHVBmdartGpxeTrVyvF3i9tvfWLh+tPrEyUu35PHF6MD80SNE2TSodeIVw8fBXnpTHTxf7KKufncvyfUAkXxPAHr/WTbUXXnxQRlPYrl5qh54KZVSurojSg7myM3fF4TCl1XT+cp77bCmShS5YODIQ5o+auhC52nVqi7168UCWH5YWI9H0/TdPDw8O7d+8gUN/3P/7xj1UhFJHtdls/MzdqS1KqI4KZqaYeJaKItdwliIDA0oCyQMWrcPOPLX51dRz/w6d25yqurgmsJ/42oIrO0zTVioKxwrbv+xg6dt6JUopuB7QQKmwJHVFb87WiL1pPfxxHgUvUj16qt59OxzWfUkpyd3eHrYGbftv3PZJYxXGTxwAABRAQpLY3coDSzMbLXlQNJERlshqIWMo4DMPhcFL1bxwny6kTRoSgBlB3OmJK1XZDLdkhhJpk6YUi++BprJGa1K6u66MrAaqVh1XKN4WwlEuNtIiAFAQASWMyCTBocLYg1jocUPN1GSfr/bBAJ12vLdYKqFqYolf2pCGPURW9Q0TQvDuWKuwp1fp0MF+e8YCykc1WaFghmgZQSgjhUoPwspwqq6tCXoN6QUSKiARFXSiEQZUrw3kAYM6Aakq4NLoU4GnilNS32tyVYLG1Hsc4hLTZbLbbLQBuNpvNttNOLSoHllK4DNr7ZBhO5/M551FTHzdd1/e9KpMxJlWJj8fj4fA4DMPhcKzJHRC4xY6mLqeUiDCEUKiUEgCKKqWImHPIOT88SM6bLm1+9KMf/+3Df8w5pxT1GRHJmQmbgaNtqwo+LLmtqxFpEM3815VqmV8kQUHVWhEDEcYYqZUQzzlHQ3rbvxACXmMY9S2OGImzSZtKKU3dr7+iC//wL9Jd95i3eOMCNf1LPYKuuaCNb3THEyz/ugUdx3nEjo5gFuLFIVn8fDE3OyE2Ew/nxRuXQEbQQmGX09um5J9cwIFWFVz1MlvRgq6t993mbOZbs+j45aCTBszmZLzBIvEWMFmj0+Iy2oEC5oJevN3otecQ6PzJIjIBl5Fv7+9ev33bb7av377/3//v/g+ffvKjV68+2KTun/2zf/Zf/9f/06+++ur/+X//vx0Oh4fDu2kYy5QFAZkFq75RYUh4kY5FgEFEYtQaSGqaQqjFqYrafrQymy6dmUvhEC/eS0+jTf3zcacLLPWfvebpKfIa/+Ha2Wngndk3/DPB1WulFlsiIiUboVliSx3kP0me/sHLFMIFxl69PHn5PSfjT7pth23EQhxfg9f+7bpuceL0K/VjkGuhpq+LKcCcmKx/PjvFzm8sKza8BtH6gBtN9tTJLuZLbiS4jfbWXDubAEtCt565X5eIEEZxdczcM4TIi5sXIjB/S124i75ezGcBE2gB5AbeiwB6zW5ydS3PX/4Aqon3Yn2/9uRT4+CKaJs5xoDwPMz1Yhcs4w/+M/OH+W66z7PDdkEnbkue68P6r0ZEKyIh0DRNqvZrHyMucD6PMXYC3EplgAhYBnjOuetYdZXD4WAqSurCNE2bzWa/32sjstvb267Tou29qBEwzuzaAABCIhIwxhg5izASGGNCsErdgBZW4Imb/+yBsN67BZANAezDU7tv9/2/AFDkomU9vduzcVSZYc3zDIFCZeIppRRrcjIzp5Q2m/D2/UH5u/3LrYug7YW4YnKpryqlzz7abrc5TzYBbx7q+36/3zOzVhXSnc1lTCkxBS8eEEEIQSvp1+2DjlsR4CYyXcCOeIkmKKWM46ge48Ph8Ph4AICcyziO0zSVfNk7ES1LI9DKmVIrZNisTDUJ0PvHmGdhC35bbQTTCall9eech/N0OByOx2PO1VMkgtM03d7eqjcSIFMQrZ0JoAIDlRltJ4ICUKZSxjwCzWRjY1LmAtXPun02Q3DmSCJih5x1RZfU2aUnQBc+TRM6M2Vu3UeccXkmcBoaixOeRSTzBE1NLaWEUPP0hKXpyZfttsIHtpwYVP9Bdi0WpUa6grYtZCQAEhBCCZGQQosQ5sKsFkzC1mrSRa9QixmOMWrM5+UYtt6V51OZpmkYz6VMMcb9fru/2XZd9+rFC00yVBOAmaseHx9ybbvIiGgBWX3ft+SRAAAC2qSRdrukO19KESnjCIjYp/TJJ598/je/OJ5OIdyIiGb2qdILK5qDTiFf7aY9WZr/T/l+BTthbHegFI62W/qjmjtEVFxByMvpqmdIxMkZRm48/74Yyx3l8oRMrQULhoSr6qDgfu6XSosgkPm3az7nyAoaI79K8f1NmXvqF1wBrzFgu2NvMR+3JzT2ALvE5TbCZQ/bgWy7jsg82xTbeDuf9ro1zP01f+MVB6Y9ptMorfWQwU0/BEp6vEUAVfsHFAa1ZOixBKXmGpeLT05JxNp8zOZge+0ZD69cx/pvEckiEQlDoJimcfqX//L//cc/+xkAxb6bSv7t777cbPq4Te/fPIyiXYwAKCBWc70SK6SIANaRCUDUeB6sTJVHEmbllNwq5VpOBcxlgjViP3Pf/8r4kEdURHSKw0x8NG0ZHJKwsDUWl7lE4pM/DSfZhSAusOWpTXz+8lNaDLI+72s4rIdanNbFHXSMHObsyj9j33oLiJG4maC5IgIWlOuPts3HyCNYwR63hKfWtfhQEbvFnNtXns74+fuhLlt/TSEEZ6SYv1Q89jKzGFhc25ur7wIA9YLWP2kWcu9XZxgF12iO3b88MDeWL2B4FRRGEt3Wsxpx1m/8e13o4hQsapSeXuziw3qx0qxsCwRbguhpNcE7qI1WP7W0p96iFp62wPk88bJwlScUpVWLs7Ic0ky9mgAFAOOQp/F4OJxKEQqTrVRErBCfyrX7/V5FKwXmOI7bXa8lSVRq10hRlcO49RgQERXoY4xQWAAZkbnG9ZkjhZmlhn4gYiAEQWKZPASunj4PLlwJwXCNrMnvwYIXFLgOzs+h93q/hEX1ZPXQxhhDQPXGUEsQUPGaiLab7XmUUso0jaVVBNRx1IC12WxMwTBtpxQXmGe5fylBswh4beT29k7dudpLGQByGXOhlFIXorUgDwERIxERpOLqdemAIKgCA9IMmUVwnKoP8Hw+Pz4cx3E8nQbNaSyFp0lXdokqLO5kqU6oYmcIQZPTTGsCAGa2dLjF9ilIoQVe2qU/VOVhHMfTcVCF8Hg8i8gwDF23mabpxYsXtUoKZKi9uJAZCqCAykRISC6BlIUmxjFgh077Mn3JPIR2pzIgvBTAv/yQ2bQdQ91GahpKN3Xrsl6Y2UltI/RDaHU41wTZyKCIZJ70+caAqIpzhZtAdTGYmt3BpJfGpERTjgubL7S2vkyRigatSIZCRabCWAqAILM2HgVEhNYuJYULiG1i4zj2fa0xI/Kg5qRpmqZpyhMSUUxht7u5v7998eJut9+EEPbbXSn58fHx/fuH4/Go1cWOx2NRa4QYua6hyKybjRjCRXNDDE1PxrFwq8s5brru5cuXm83meHpUsEABQE4pmY/UC8PY6rasL03f8BfV7G5iBmbO0KpghoDaUdHbxQ1pFoqKvRkcifSvsbB1T1WVAYtcIa9FN23Fzp+ZD11Lg/HTEBcdBysnw4L++jUCVN7m4Gifl9NbYD/OyzDQKpTR5gara70cEZFLJTljA17CuGLOWczKflhcYps0oVCcpefK2x0EjBDzvGOE4SIzh4ZwJqrqZVkBXlC+CoTLnJsmbC8KgNxsitwSTvyS/VrqXnTdeDofp2G/243nCZA+/tGnX3/7DTAfTo+f//pXj+dD4fEXv/yb12/f7/ddkB2RxqvQVC4RXGRIUgnoxQcO8xwq5Sg5Z3UWaey4xQqWeYcSP2dv1DFsKasQTUMnWMkZhg+Lb200G8oosjYKt5HtLRrgjnNxh5mD00A8w7ioEL93FqHNeTE3WJ0OXGl6629x3u/EH3lwdMZ7TtxvL1Bao+V6vzwY7Sd6lTI7OAb59eBKKPgatPxGoFOB/NYvnvfvWtABmat/et+n9Hitw0/SkSxcvAtab2VA8mTEj7PePnGF6T08AS9ItdhZxFpN9AqYrmG4+yGuJ7MgeuvLgx2uRmM+/UNjHypbm6NmAQc/1cWmrCG/ftLfR8Sn5uPhswDOU/O3TfHzFBa0yN4FGlS5bZa8HULoYkLElKK9mZCwVi+QFHsuEzN0qc9TOR3f45w7cxElPlPOqeuGYTgNx/3+VhCGaeSjHI9HCikXURxm5hDGEMI4aXE/CLEDDCyQi8RKvWsP9BSi+lJEQDsZEpGmiAsSXLueQMvrIF2cVnTkBebnenHH3/cE2Y+8+HD1QsSu6zabjUWjxVamBQBYIMbILc0hhHBzc3M4HHLOKfUAcD6flVupLrSIJgUnJ5jibQljZinzsuLd3R0RnU6nLqZaDFOKitoldfoWIiilikm6B0QkQqWIhtjYkksWEVZ3ih7k42nSikTHw/l4PDLzNJVxHM3j4crCI8Cl9HpoFUHDpWhNsP5+4KLbrMSOnXEDiP62Kd71vKu78ng8ns/n82k8HA6n02kYJgAYxzHGrkVLYiklUAYAAWK+iMpEERExQEDQbDcRESoYBVvNLXHaYHFVc2Re3A5nYbd1U7hlstiuUazimYUC8ZoJCsCc6SMiNa2YWm1IQ3uPrvZqVF2FL7ys8c2CVeQGWwI0qmIqJbmODja4QaMiHpfCCKhO8pGFRALAxfpTcUkxmdDPljlPE6o+TxSqkJlFbUkhhJcvX+52u7v72/1+v932KYXC0ziOv/vd787n0+Pj4+mkUcGlmVGiSLUaqFNEESbnjHSpRRRirTPXtlUk566L6m8opfR9/+LFi2EYNDI0hKBWhLFc+LhxefvX8/223eAfs91h5lZJX89vIBIiimb4sREBoDyd5ENE4tQnM8TqBod5Mi4AcKtDAI4JGRHxFHCtjtr4Jm/pk0aC4RoxtVWsKfuCWLulzdizf94zxTXbXiwKnBgnc+nQ1vjUlAzLQRhbMxa/3/o0OmFrAYf1rNQlcWkgpZKWMBIp2dWuU/oVsoBwEea5X8gr5+vzb8ZI23e9Yuuj6h8Wkae4GzYLtC1KKwX3fbUpmjlT5qKwHYM6B5HTOHzw4uXDw7ELsU/deRwg0G63O50fP/z4o5/+0U//4t/+63OeBKFALeJLhMDILIgkBHIJHSkK89AIVR5HIjI1CZtCuNvtttttzvn7779/eHgA5S45AyW/TEeJlqqLZ8BrMcKWyS3xoz1G/mFDg+Cq7BoisfACqTyBMFWB566Gq+g627j/vGt9TtcHZ8Fv1iM886TMBbXFyOvj48kaNqlX6y6sAeJxfvEWI5Lger1QuJRkWC/cQ2Bx0BbrtVPg327WKOOv1K7s2uH4hYRV+S59zMo8VoEAZ4151lMyWzIi6lMVUcslTNqMHfbGORFGEES8hDV7FPVrt33BZvNaX3bfNmhNe/9zLnSawNqC6ee/eH4xSOHit8PmvMBe+D0O2lX0fgrH1vvoDsITCmHjOPa8olZoTZnbn0AIYx71sc1mk1IPQCA4DNOrfq9CnnZ8VcI+DhO3iMcQQtd1t7e3+/1eRE6nk4J3GAbtHgEtYgUAVHLa7+uY0zTFpBXOLwdERwbsbBdEBBgEBIR8EjJcQ7mrCL8mgzA/s4vzu8aK9QbV+4QLIF/dPrvMVQWOHwGAtm0QwpQSQxUfc85aUNF2rWW7FZVZAaDrur7vVTIehiFwVOeDauNa3/V0Oq3xSu9ode5hGGiDGkot0By5XRGRWgwWUbe7i1tqiScqCiNijNiKSbIpP4UnERlGHsdxGIbD40kbkDCD+nIaTaim2Jq93yIgYusSEWMMgZg5pdB1XYizosqmfpjkaTzR6Ce5S+d2Op2Ox+PpOJxOJ61+lFIvzeWl9g6q1VwZAIQFUdR/qyOKCAKwJtRJBi4CBUnQ5UCycwbaB7v0mRQvGq9ZpbNj7np8VK/zdMYoKpjCUC6NE4xGBQprTuqFCnLBJmw5Ml2Q2lULwdFk/6RfCLjIeW5d3UMI6Iqb6L81bhYwxI0SLkIKoZuKIGo1HX2jvr8ELBokZdNWrnQ8HrVpRCkFIWw2m5cvX97c3HRpv9vtNtseAIZheP/+dDg+nE6nh3fvx3GYpknRYaE2p5S2222KvTT5WX3vFCClGEKwnHxFy67rhHCz2Ww2m2E8KRBUpDyeHnPOFCIIahEL20rbHQBQpPWgaztbKzkvTus0TRS7EAIAlZYjhohxHE5d16U+lUs2p7BwK3jrhTNhkAKXDifs2lvVD/MGUwAggl4OMbxBRJFiVAVdGz0iQDTlsCJ9ShWDq45Z8xk8rqKZYbwovABTKVPLVyZE0OrD5OwQJsCJJsDQDGsN1gYBkzYqpYBAFGyZwoKA6OIe7SRU4gKIosV/XAEl0lw1ZAZmAaC6HchsrOiSpYOIqLqC3geA3MIwilMGGUHNDIzY95uF6aV2nSqXWlIehkpZ7GFslicKAihcszFAe/iJSOGhtagr2LLDUWrAiRqkQst2CCExQAEWECRMKYbQSU25LSGQCE/TmPMEoCqlhoFVxua5Zpjyfb/Lh3ETegKEwoQSIg7D2/vbzd/+4i/udvA/+Nkf/pv/z7+664IcALY9Ao3TBGMOIRCCcE7CIQ+p75jpfD4j4ma3U8to6kSLXQXE/X7/8uVLTQ+4f/ny8fHxq2++645dV/qpcC5lChRliTmegxqQed5myiCPTc32RND/KfPx280LEZfWYlg3wCs29jw0G3Bo3SbsGRY10rIgY4Cg+y4AtTC2nXQSAQYMNIsr9tMm52y0JWihsMWlQyIiNJ+x3WwxvCiuj2jhIhVvm8NaoOWDxvon1BgVABAEy0hxMxEhrKq+2oOtGEPOgRBAhIuXg2ieW+IXy40F+d0MITALAlipr2rEa8tDnU/7t51mEeG2p7pqjQ+pleX08IoIs9lZgUgxRwM0oOuSWpZFWJmQzjC2moSe+wKAGXRyHgEohg4RSy5I1UsfU+MCpSDiNNVGt4Y5ugcxEei5RqIArKUeNJJTGLiWKlEKEpGYl2gzA7e7AFFao3BjN57z2c3ZVyRqj/dEeI2Bl0Hcl/5JBoldEjWyxJC5DNMYY8TJXPFk3OeZVwQNr2UGX+hr9bjWUELES9m9FTfxC+dL5kU9X40+WB/UtUAPABDiBgBYRKCd1gIAov4CBJBcmDlRQAEpUwzbGDeIOE6nzZZSZCkjBULkSBS0nD0WkQzAfULADQAw8SakyqFauk7sO0Tc9N0Hr16qTkJ4N9xsEF8hECLe3u5NFA4h5FwRGLyTqvCm64sAskgu01QAaLvdjlmYy263OU0ZGQsWRCwCkcJCEjWUMJAaIpELDAEnT9uTODfQuLPg4XwhgBfKY25GlsXTFd9a1F+uuVXVwxND6Tu6v9ttukhE236Tc95t7xCxFMnalJ2ZKTJEoUAANze7EPB4PJYyqY/CQm+UU6iGqX3hpAnrMSYAGMexTFntxinGPE0l5y4lkMJl6rruw1cfDacxxjhNBTFobVIRSLHPwkOeIFCkiCqPYGAoIlymzAObcfM8VlWQS43G1B70AHB4OIqI3uEakC95ysKMGIWBpUpxIdR4167rYiJAzqWQkLK4lBKiWFyb6U6JAglo4itoJoUgFNmkPsbYp36z2fR9X8uTImWezsfT4eFxGIY8jXkaxumcy7jd9aXwze2m6+Hm/kXsYSxHCoRxy2YcJIwxIIrKtMNpEqnmdfULBETVYBFxGIZpHFqIJsUYVRdqKIQiSBQYMDMQQAgEgiBIFDabVIpSJJXiRCa11AM3+RYpVJy2cDBCRIyoCoMeENTav2ZfkHk3C5qHtir50Uo/jaMxM4Nw6ir7SKlXZel8zkTl5mbHzIfDgblQTOdpGIZzTPF8PlGQ4/vHaTxHxHE4AZdNSgwAXRJGdQwSpcIwTGUcM9XySDGAgAi25rpaSlbdrIJRi7CiIDJtNpvd/c1ut9vv99vtPqVEvYzj4fHNm1LKMI3HY60WM3IuCBKCAIRAfd9FCtgcGCHU/pxWrz7nrEvWP5tfmgkjBSilUCBGFih3d7cx0fHEGOjh8BgCbre70+kUQ8eZQSBnQTSPF2o9oEBe8pLqWhcJ6ns2P3AB/a1W4QIWIqKgZE0IIVponxdcFpTLPgAAUTSW4zmuf9iTQpmn59rzIZiCB0aFcW5F9nQWnr2eYroLmQBanxMjxKW1H9XpLeSJ9c/tKvMK4xeDtFyXSPxoa07jwY6IFGaSzXwQWEAY52UnFm+0B6QpHjqyUmqLKbXJhOC0WfdBQ7+8WovVBLW0fcoTDpmrwDRmrMEqHgN1O07jscZetp2Cyoln9n4Tx0teJviBCIiklKZp+uf//J//8c9++vnffP7ixZ4oDufp/fmcUkq7HTNrZEsIqNS2CKulB1qSgIjc3G622+3t/ma/393c3Ni3n//N357H4XQcch6Jaq7YBkMel/NZAGSBt9qyqbE6XvzKLpxLtAtxsOlRMxsegAp0F9/X+u2LrYSVMQ8cEs6fvOL9WO/4MwfqmTnMhoUlvvk3wvxwydzDvxp8mefDLBrGvxjZIPAM3OxctKFqiJHfoGeWf5WWggtRBmeuXqzXDoU/ep4NSGPbdsciwL35zEPSKHxwDSpDCE5sqIdRJRg1NBg+wpymLda4poS2I7ZdCwh748XzY8LT7GD9Q24C0PMPr1+9OLwLrFiQPnhi65/hdP5X68HFWTEMXf2/Nr7pMLBC0avX1TOl3/ibdqmYTkQpqaRYVBcO1upaNHYPQdOlAOxcGBMpICTxgmOuYuF2u63LbwqqXn3f55xLqfyCWll8Zi7CpYi6uJ2pd9ZHvj7JlwC99QFfIeeMtS0Y3FPX+tv1sE89uf6V3+4GDVDHglV51aN9Pp9NIQQAElBX1abfIuJms0HEGKM2r2dmzSHUfbGUS1W27V3+XFt0tH4Yhknj6yyrrS1n1jZJmqSkN0VkmqbmqBEDss4n59zaKmRN6GLm4TyKSAsDVmhAHQfFC1GISvRAu5IbDBUf+r7HFhfWEBPgkrJRF6u2s5RS3/fqyVGFEJpI8Pj+QWvb6LpSSnvaa09I83jbYTHdycNTmruimgsds9CvdOFaA5YbwRJ3eSSxQ2QpjgZ8s5+uUVeq2l/YOZ0AQKSQzGivVH/dZebSvMfgwm7NwmKr858Nh3Vd+qTaJnQocn3LmHmaSgjBqk/Ye6WaqgnAHFgCtXFjFaIShdinGEItYlTZYnWLadIcAN7e3qbY932/2exU4SciADqdHmsyYc7ncTifz1rfBVrbhhBCCiGlpK3LYtRgslbSrMXKppQEiqnWgEFjvJk5RN0sSqkWqJ+mooeaiErJ6JRtmjcSM1S3KO7FzmqOtOcdbe98scmi3V+YOS48V4ZPOc8ati5YV90Mlx9lWCVyodFEVFbRm9jcBQt2tSAcCxT3k5k/dj0UB5xIKk4+MIeSOJ3WyNNimU9dHlY2WzsSV6fqrYkeob3MZJDRuOE53TdaOUs38v+alGZHzs+TXC8EaSkBfuZ6+aPr12Ijg8ss92f76r/rjZMrLE1KK1Jne8GtVu84jTYIEbV6FRDCLDGJfUpeFV5YhASqQlhK+dEnH/9v/lf/6+++//Zf/ff/apqm0+mQYieSpmmiUhhNq+RSyn6/V0KfUogxdl169erVhx9+uOlTC9GRcRy/++ab77///t27d4LILICBAqUYp2nIOaPrCm2g9jjmUVeaeGR7tEAzv+/uPi5GEKcQLlALiaR5Ihf7viDxHm9hfn79ctbX+qQvbq5R7gfHWazEP+PPrMyb5Tz1OkRcxIldBgTAef9AA5QxKpgHfq/HXx8Bc76VUlLq1iDCeVuLBRz8CIvxPYSlqXmLSZoNyNtZjHSL8yZ5rJAmupkZCBppMusSrhql+kXZ29fwwVbnGp5AkgVlgzl3WEMb1lu5QoA1EhrpYBc6tbiu3l8s2S/BWMn636vjP/UidCqHB8LVca4CBOqJmNGTq48tFneVv/C8zyo2Fl9KVrzSeiHabQLU8MqXShIWIoVW1ZOq+AaIAS5xy1bv0bMYN5/LBFKMMYhZuIiQADmP6lbKOQuSKkg5F8FLhAtcToG0mmdXlED7c2GYW2zN1cvzrAWZsoMDbUOf4giLAe0rf5BDSBqiwq0ZwzAMRHEYBm2MriFFhKRK1HazU8Ugxtj3/TAMupVWFM0gX1znZCMmqh11XadFPvTOMAzTNN3d3Xz00Ucw1/pCuGj42AiOhq8TETOUPCGiSI0XlVbvtJQyDtlae2veYM65TAytTI6iNzyBzLpRqQuhFREFACVf+lXdX6wyZP2zcWH9k4j6fmONLi3ncJqmw+FwOBw4F1XVqkMMQSWZGiJI1CJa0IC52E1VaBuULqDWhZcWaWLxa6WUaSwiolpKe7LhJyA6hdDjoUX22XxEBJxbCVzeQaWKzOAQQETKnJLoIBYI41VBwwRjKJ4KoTMXGj9S/bD2m2nFjURkGseUorToAIFSDQ0kPDGjNvcyxs0E1HVRRFIKfZ92u12XNKAd+n7bmCkAAKEiBr548YIwal4fUWRmxepTPirjnqZpmEY1QyhAYoxdTDHGFEKk0OJuFPOr+VXbTujqtOaRPmCXQiPUON+L9NIaWsRxtFhohBaUZJBsiFTTRP3O6ocyjWt61Sib5wgMEBAxetbop+iJo2cDC8eVJ5EwJ6webzwetHGWHjCTP3juNjSkWZBgm/lVqmoPr5fj4eJnu75v4/v74iSnBblHxMK8GGGxXhvEdtT/WeHwpHvwigJsFNzPCh3eeDj4ua3JqEFpMVt0rsUFqpSyTGzVr7xiCU/oyXO8miUNawBtKYWAantkRBQEFhBBgRRaVaUaEweCKvWioADVbGy7hmE8HA7ffPPNL3/1d7vd7p/8s//JX/37v/rmm29u7l8qtYWac1zJyuPjY79JH7786IMPP9xut0jS9/1utwtI79+//+q3X755+/p8PtsOxhgFQSnKCSbgM+dC1DIZHMQWJ3OBq8ZaDIweQ1ZIscQ0fygW9yuw5wdhjfY2gvEA+8ruLGLasOmlhHgVddfvWrz073Ut5ukXuFiynpf27ex1NucZjbp2LtBd6wkbAbQHpKkZNkO7U0rxxcD8tBfz9wjgVTK/HTb4mgCSU2vRuQr9CPqtWawXA3Lzm1FLtACAlFLJmVuJcAAQxlyySniGyHJhVDOAI17Aa3KDp8z6xALs/s/FVPV/yzuX/Z1dlbIJe0BdHXn9K5jjyfqNBiK/s/6zjbYAy1M3F9d6l6+eJnCIZH+u8fmpt8DygFx5kcd2aqlKIqKZZswFmJkphGTRqvqoziQLR9Qy55dcZW9n1JFNSze1pEFJzyipRRwRoTaHrbUuGAiAEYJIdcjoCEikKF1YWEhEWdLM0bfYFPuXXOLJM6D2+7hA7MXzTxHzH9wXP6yOcH9/f3d3V0o5Ho993wek4/Go9QP1X4UX5zwMg4ik2Gn5UK8mxRi3262WSWSuIeWeg/ulERFhFT1D81eklG5u7vb722E4rVEltEokGsCmP1GJn2vsYg1b5VY5lrCGRFblUMvSlMK5HihuufQKja7rEFHrQ8KS7NRN1NxFU28Ra5jRgnL6JJQYY993Xderu0YVQgBQ7TTnzPlSol97lNljuvDcMBmbZOXRhtnb5cUDp2pEkyrMlwoCCpCaV3mpYH8JOLcPHvEWhhU7UKVF2AKL/6oaKGXmElhQbK/7mTJvHASbE0I/27pkzjHtkALANE0ipes6TVU9j0POo75lmiYp6qmrnmEMQlo5loGRkQoAggBhpEjENb7g5ma/3W67WLG96zZNJ285z5QQcbPZMrOwMr5pHMfT6TQMQ+igtDrSChPb3K7rNl3fxQgABHUhVEP6a2MSLcJfo9vIsqVEQNP2qsFCf+g3aBxGPaHThOaNZxfMuMAfSxz1OEYrhyHATISQmdTBqArhwmbALRJgfbABAPGis3m0s5/7Fz8ViiPOmO11KlNzF3x0wenFCRziLj8Zf9Nf9sBidc/T6PUBW6/oqd/6Z3Albl7VssxM65fp7ywGkWdLJiw2wtOL4loOeqAZ0ixeauMYC9dv/OsM1Twd9N+i45cm7/Lcg2pDrokpODXJQewCUWjqCoGRMw6BXrx48cEHH7x5+/qXv/zlb7748t3rd8xyOp0tpWocRwpwc3Oz2e1evLy7u7t7+fJeraHvHt59883Xh8Ph8PZBRChgCnG/3UET9BERUIiAWjkcNRqNE/tJ2toX4LIHLJmeVt7X3/PyILK9qP82g8Ly/opk4EpItd0hIpkrfdiUK8BLD1y4gtVXhEt4okjpAm0W49jnq3aip367GF/EPhi2L19tMCTXCdOPYxbfBeVVjPKWPAWd2XRtIVdft7hsKC+veBuN3jHxl5r4yy0jqLhYIAvX5+b2XyOYN8yB03JZmjW31Im5XZgJxJ4mr/9dQKatnYpZf35IM7GZXbk5n/b6WuzXYrFP/eSpI2xMk5lxHk294Fxw7aDZfIw2whPnBa6dKb3oWr4iXjO8+rdcXWYdfPWuBXLahGMkTSgQweaKCATIEUOlNoAkIoSYQQCx1jrWJtVtMjWUQ5AFQZDVwMetQKK3TwGIeT+4no1LZBQixhhqxEeoJw5DVP5VWARU+lWV4AL5BVSvnsTFviweXgP2+T//vtdiC4joxYsXfd8fj8fHd4/b7VarqgLUaiv6GxEpPOWch2HIU9nv97e3t33fG0hDKxyKraG8CgYmqYtzSnBLD7P40hDC3d3d7e2tsULDMZFKiLSLdwhBS4bqOOM4lrEols666mURGVTz4ZrKUcVxgXamCAVQQBCAQmucy7UML2o9zMt2sEjR3txWf1KglswVEURpzC1ZaL2qdr1qhH1PrZKHxtmeTqdxHKdhtPmQECLqw6L1QlJS16f5vVtovU5DDJ203oyRTW7VdErrZ4PNSqLQa8mZFZ0sDTJ2tScnrM7vghbpNErWTD9mZu3erl8xM3ItPep1UWnnkV3Vd3IpS8aSPGc0pGXnGLc/tWMNVxNkTQyOMY6PDzmPlsShqppYyEyTc4oUkQKFgIoAUEgxhjxxt+lub/d3Nzd934eIagcBqUXdYoyaZapOQkSapim3YqHTVHR1koVbmDFwddypPaVP3WazSZqNX7ixZgV7ddIGSsou1XkAVsUNlF9rieCu6zp1WyomsORpHA3fKnvV2F0pSptbE+xgRgG/UwZ/kwMXhKvJA87Kjygil0Lz+jPzTvi+FoZD0HIIF0zlKg2tv/09LI4eRw2x/CByLbSyDYVX5+lVVmUnOuG14uR/bv96sKyft195dU4/k/MIreGzWBe4XEQ/SZOq7SyhKfR0WR3M+ZBNbG1IsDWiU8IXkGzzn8U0wmXfLwKHI/eXPicyF25sH713wi/HpodzkdTNFEMIuZA9hQiItUnuekD9N/OIAs6pCFKbD+OvfvWrX/7yl/f39w8PDw8PD9oq9DgMzNz3nSYE7m+2t7e3223/yY8/zXl8+/btr371y4eHBxHJZRyGYZd2hVsBtCmHEFKI235zHgdteVpKKVMmWJdX/WE/ADSHjFFAQy07j8/81nZNXNy/f7WIPPfua0MtuoHb/evvnauX/r0LUPw+r/Z/rhfuD+zzN9toM8PKYjlPEaX1Hbmm5CzWCA7DxVWcijHmvLSRXZ2PjdnYZH2vCStGeRYA95PxwLe32GTAkZH1krHZ9e1X+jlQEs4lM+Nks4oxAsyO8BogdomI7YWfZyN2V+jteoP811fn//xJ8ZRKLqrOD6Dl+gjbn0Y/4zyGzdPSBeY/hXJPLfYJrF4y3/VbjEguJvPMSkVEYHmUFEQesbGZvfJ0rGIrhIAQQ+JSEAFiCAIAIAQgJUSUIlrhEIFCo0Zrw4ThZ0CJBBGJNFnAFohQDRBNl1TkmdRJ0ErvIIRhGrNwgmoYZQFAEmHAQAQ4z6leUksHtzVNuHr0rsL2mX1/5uZiBNtKoyqhNVochkElzhSiJsD3fY/NvgUA1S0B8O7dOzO7q1Kt2fvTNGnm2zRNj4+P2pqi6zqierTFCQ+IiEAh1IoDKSXlnpZepfNsxl7Ut7SQRUBAEfV05TLVxuU1IjTXGBn1ZwKARpNaqF6bycWRAEvDBzVfjfp/0LLpANQYQSEEJJO4xEhre55S0raWndWPQaxawfl8VleqtiMHzfoLZAzXRHBstNrjc3F9rf38p2nAuRQn81px0vxUpZSc2eCD7dL3dt3GQd7Lumqv0ZNCWgSuFCZAaXHenhrzpK6n4gUSaalG1AoFqyXIgfeS/uc5lF0X4CAq1qlmElo/D03YMe5TSkEK7QT5LAMUAeZSXycCyAABhSNBH+Pt7Wa73d3c3Ox2uxACgIQQNikV0UiWipAApMzoeDzkXMZxnMaaqFmKAIAaSlhqI4rNbqv96ImoT13XdVEF48KImChogEQ1ERBBk5dTSlMtghWICEl1y4iQ+r7r+x6Ap2mYpnGaxsJTCqQzzDlnLeNMIYTA+eI4oeaVMeNvO31azAtEwDse13QGEdqT9Znoc5Zs/xaCBQBcvePRek1G9QG+xkE9qfVG7rDq43R1Gf6Sa/Kl59zYIsIXQ9mR8wR9zbbXk/GvkDm7FZFFepL96202i+U8/8bFV9Qqbss8xYJdjp/t0WLJftU2mr3C4wCseBjOBURp1o4QZgq5QQPmmOC/pVp/WdYujgsq1uKpqEZKpYzKwIgoxmb3da+rQAMQZASNYCNEJkZlIA8P0xdffPGn/8M/ub29/YM/+IPz4YyIH9x/EkLYbDZ3d3e3L276vmfO0zT91b//dznn94eHx8dHCnB7e7vd3fZ9z0MJVNMImfUMD8o7gRAEx1wQMSARcJkmpATXjtUCMnY/usashpyepMI1LF38+SRKI5pDbgG39RFrZHvmN37+JPIPyTrP/Hz99h98/iqey+8hxrXfPDnbNQ1dHA1veFtAxnAS5yHlhvzrTcQWouNn7imVjUYtgHONOdCIgJ3u0ApEWYKinXRva1icQbvIBYsanfHkDtvVqp7KehBvnBKxLZsV4XSgAz9Pe8tio2eovr7zBObU+cOMYvw+WL0efP3VYjsMN55hKOvrmRetb66ftIXMZ3J55qk98pchz6K/6AJENlRMFGOcxuqyExYWyDlzVvQgQRSozXsUJT3Q0FVR1kQ4bgHJ6oLAcClAbwvEpoEbGl/MqajMBbkIxRRjzBPnnGPSYg4CgEiAbA3BZwtc7Nf6aIPDyWfA+BQ1Xp+1q7v2zLDoBA91rejpVg2wlKL1A4moJrUhQvNLEFGeyvl8fvPmzfl83m632+2WajHG5N01IrMyFepI1XJr6u8Srf9B9XmtuZJzjtE1uyPSpCzNJMTWAsR8XOM4ArMwaosLK3RXnWPlQop1ySpqiwji0uTtdpCR8KKlBAvgFGl9IFIXpWrUdk6lRu4JqQtIZ951GwuLLaUq3lbPhogyZwSI81qMpZTU97ZkC4UtpWhsJ8wPIxFo1P0aN7abjfpIFT6qG+eJtRiPWfda2OEF7BZqd5V4SgvZNQeEg7CQQK59a+uTFUrtgOge+XPnuZs4fzI0NdjOlz1pLIBdWGkIpIBS+E/TANVrIqXWOsoo6kjkUoAiAkAAxBBAQAgJSoj44u4mxX7bdX1TVikABYihbxRGz47aWzVbFRAxJuJy2Uqh2qZFELuu2263/bYGnaotjAA4Ny8a0lgmAO1yMQNC13VagV/bTgiUEELXRW0rWErJeRyGU5mGUjJLTttN39dAZUMqTzCpOQDpEtB+uQMuz9aTI/+BWsA/XIR/iT4qwD/q989v/DRdHJT2K3Zd4xcEml0dy/m5vfhA0F12JPyfz1NJf7TWb/GYCmCIvrSY0jxL5+rlx1+AaEGeFlReWrE++8ozicUqRISlqGKA8zil9eq8YIorro/z5HWAy/G2nxvZguauxDmfs9GMxPhyw0RLJXMBK5yzxgXmiDNGzP5sxqSIVMrEpQASBQhEGk8UoIp3KrdcJkAMUCuQYitxhoKZ+YMPNooAn3zyyU8+6z///Jc3N3fd7QebzWa73VKA8/n8u+9+8/b9u9Pp8O7du5hCSulmt7m5uUmbdD6fx/F829/Wc54L8wTMfUpd12WGGKMAAhB3zNyL4HlV3F1WYoQHiO6I7gK7MpULvWKOqNflkgUhcO+a6Yr2rw8qnp1HuQzoCNxaWl198cSUFtfVyS+ef+pIPnXqf5+LmTVofjGNhYUILmf5AnPj/eg84bZ95PpWiYuwKq1zmhvz8mq/p36LvTHLNLRpmpRVLCZpnLuUoiEr0uzKnkyZEmhLAGdxNLpBzouo0oaKASJIFI1uGA1fQHJBAHXFBszF5O3DGh/m2D671g8/g072wIIKeZJ49VocpTXvgJV90P/k6nlfLMcPvnjdAiZPzfMpKKGVUnSTvArnNk4zQs2ry8jsmQv2tgpb5gOBPOWszZRRkYRFQKBoJD0QSqs9r9OSlioSWrlaAowUBKlMOYTO8tn0jU2S1Goxs47V0Prv5SKCoEcAXHyUNOcS1tRz8VYhg5Kda79YavE7+EPa4Bz+M3wDZ3hdEK7nBzTUsikpd1A3Qtd1idI0TXmcuq6bJnUaMDNrKRcIlTRpddb3798fDgdtIq8ulN1u9/j4OI5jjHG32xHR4XDQCpCmxIbWKQRb3UEESmlW/nfNzkJNVoyqR/jiKDlnABLhictY8pinpj4BhSiSSymhFmwEzvVn0kzh0nxo4GoWNOwAJEESliJC2JIeKWBMoes6bfAQQjAPYVWiRFKKXeoVttrtLGfRBoOHw1HbXZRSShYuMI5jaLUrTesGJ1ZZ+tlU8jRNXUyGV45ykolehhsKSS38oxVNFXTTNI1DNiVQwZtSb0HU6OQ6adG/hr3+7aUU5tFooAAzM7KUS6hhs8e1lmYtVZix+ZaN36FLWvPGGhUXi6suIyJaPMZOhKHEzc1Nrh3etXZRLFPOOTNPZRprz8waiiQa8wcASBARJCAXIUSUXKZRCgOyVn9Q9TLGmLq+1F4jBECWoaOWEaKWsNbaRaRNa9UuQkQpJSBkZg1kpbacUgoJZORWJwsbVC+7qZvV9x0iTnkopYwjp7jVPT2fj8NwQikhEAViZi371Pd9IDJXagrVxLDY667rLKoutASo4qpRAIDGadgGiRMm7ZhHI6n1HLUi4wvWcvlB7Lh1IDHmh3PBSH+idByadLsgFt42YGpucWnlHntsDv6oQC0ZCh7XDTu9AlYuqbcXquo5t4houzlD6AXjNDkPnhC17eZVvQhdo2e7vFipI1ygSnXaZhUwnmSlvfS35VLfGdbwNFLuuamBl1pQ+2Wz2nzIBRvolIqrUy9y6UVpQqrtjg1lJNu/V2diPN4KW9us9Cdc2haUstvtFDeaQ/KiYE951Ptd15HgOI6iIXkUQ0zAwMzIHAhV1Jim6Xg8ImLf9/f39zc3d4eRT6fT97/99v3796fToZSiRPDmdt8iTWiahpzHUkqikHPebreJwvl8BmSt3z1NU0BNEqgnLYUwBSRAitE2wqPuQtqwrzQUBOeqvuZpyFzma7u8FD3twPvXGWBjuMzHH5w1bstcyUdXAbXwleqRhoSLk+t3dnFkmNkqY/kT5+W/9akpLXOd5pkSZhtbmJnsAVtm+3fm5VOkQ8AmKFwqW4QQSskGMTs4isBGc7iV5DZCyq1zAzo9zeODHfPNZrOA+WLyiOgrA3t6YrtArf2DztBoe9d1+ryhhJm9FdF0VMUlPWtEFKMa2pP7FlWXW+CG3+vFnWAdkJgBWgm7wp64+U00krXAZxOYjCDrZ87FP2Zv95Dx/0rrnOllyqu/1Q+L1InFuwxjF/NHJ3uZRg1zUrwYwZbm1/vUtR7HfrsiKUpEZzZfcAWi7PkGlugH9KBgZrUPBVfAIMY4TbVf/Lt37968/vZmu4mqLSAGjBiACIggRCICBlGa2XWdSC1loe4RRAQWKQyR1CXS971k0dNxOp1yzvv9vhQVP+Dh4WG/3/d9fz7XfJsQwlggpn4YD8MwEAVBOh6PMXQaWhlCKIKlFKIgrIm+4PHK1rugbOIs9B5pPUkRZ9y0x7jFelxFsAWiLv5c/MT+tRqM2rpaBccylhij1lpTYlLKVErp+r7rutLsgMxF0+E05O+bb77ZbrcfffTRdrtVuqSup5TS3d3dNE058+l00rWbZM/MqqWcz2edhpFl/TkiWu0Nw7HTaVCsyFkbzbHm55t7sBQxPVaVDUTkIqXJRTFGqGapYi8y6qpIZf2rqpSMLWQ0VLoXYySClDZEYEoUBaiJglJFl5RSDJ2udBim8/msVUbMjZknVlAbJakiSkraQYGIVChPfUcxlNo88+IJLK6TlrTiq8E12QohjONYY1MBSikaGKw5irpScJLkZrMpIIuaQOiUBL1jOClNDq/3hZEvkoAyNGrYruhBzfUqzUetW2+KonE3fwpM0jAVRUFhKO0XTi3jTl808hQCpdSfyyTARBSDmp4xhMggfZ9KKXnKzDnGDpnHYXh8eNd32xAwV4VdtceCodNWE6WUUkYR6LpIFIm0PHwMl2qFKCIYa36miEy6XwwBZ7pA1Q6q8R99sR+i2mdVVbuu64I2/UNNmp3G4TBOur9MBJGAmQGrIWC326WUzict1IQAoDV1AASxtoJ0VKISHOOVXdchuj7D8SLPHA4HIkLShq2CJISAiNE8hrZnOvpahLX7/is7ikbgTFwzcUTmLNYzIRtzfXNxeVrpKa88kRVFKw+ezRZWVnm5lmhh4yzYADaJZDFh/ZPm/fHsWg++4Dro9NVnbIULxcDgZq4k+7G90X/wq8C5rFxHm8958a71zOxU07zLeXCN7MVJXV7VtE00M61/vz4fG8URKSIFsRopHx7fq0tdRHIex/GMiDESUixZpIgAI1JExEQBabvZx0R/93d/p34/nfmvf/3r798+1DCMnBlU54wh0mazQRQgJKLUflJK4RGS9jKWalvSvHMiOp7Hvt989NEH7x4ef/WrLx6Px/1+f72ZQFvsGuHX0rZBY7F32OT+qxjjz8sF8ljbW6+38urRE5E1OsuzjkDDQLzmM1lfpfW3XCxzjQngcHg9/6chIAsc049rmKFmqfLyVPpTPwNmE85smUZCbfv8hBvVuqjW4rj1OkcUKxsuuKISi/X6zyZIeQLlkecq0BbfLuxfC6rQpjGj7dyck3PgXy+7T2TKLfjn/TPrqfqN8NTP/8Tj3mKBfjSZX3DtegZpr87w6lj4rHVjMUN5Oj7W/l1ThquXA8vMDuIPjgfd4tXomFF9RhHeQqGonoJxPBPRZrPZdn3JMpzHRAE3SBhBk34mAWTEEgphgLu7u1qfVsQqNBLRtt8cj0ctdzkMA4ps+z7GOKEw89uHh77vUwincSSi8zQi4uu37w+nQR1cWv+2nM8U+lIKs4SQWFBEKEVVPi9nAbSY7cwS9wwk7ag+s4nrU7mA5GJDYY7zVwdfDysuUFYVHk1yizGSkIhQLVYRRaTve2YGDUgTAIBSSt9tTIhX+5EqLR988EGM8fb2Vusrqn08hDBNxUSgNqvqyH3//v0wDNoocjiPhGG725QyIKoJ4OLCAgDVatSIph9UrxvHsRSZptJcNXV10zRxUbni0keHiEoBQqQYtZEJNjepp6V64vRzt+1Nh8EW9RpCAKhVQLRLIbWEFARR9QAApjxMY9HWc+fzoLqrirUlV5RQYNoVUlJ/lErqpZTMhTjYPo65Fu8RvshFnlybYbEGCuV8Pp9V/babune2I6EFiyJiatogOMu+Z16e4BCRxmRWoxWg2cuqUdXRWI+K9jobfyHDG87bAbejZAqCx3NqaZ/aD1NrxmoFF+Gs8a3MGQC6PvYxTHnMXFJKdTBA0mYbgZBESinjdGIuPJ3P5/7Yb7fbzW7bdd15fK++x0DJpA6REmOnTcJKyZVHRwohFmGa251tgbbpKICqTItA3UqoUELXChLV4DKez+dhPOWcmfM4cOFJLeN9n7b95uZmv9tvNl1Svz02B0/JgoiRklyECm3SOJVS+n7LfGkpTNhihvES6YBASpJVal0QJb2iKRhGbvRzceWePQ9ehKXat6Vc0k/tVDMzXqt3tECvNV9fU8YFGcVL7kHwgy8GWWOe/uWXrJdDjtnGL5bvOasf/BmWv5jY1Z94toGIVqnleV7lr6vw9K9bc771pqwXcvUZ/9njCTQUEhGLB1hci8cWdgeDg0IAERMxIhSs4mfJtbDVtt/kPB7GAVETVRAAESBSEJwKIBHt+s12u91t+74P03gaxtNvf/tbCvj111//F//4v3r16sP/9r/9P6vVN0RMXVd5WNKC0ea9yVLYCqBF6CDEmEj9lrk1rL+7u/vo9n6z2eTCp2H84IMP+u1WRPL4AwLcQlzwcrzfRw9Aj95elXJbsxT7HHiXwre9d/GKulNwrQiKzN4I7lBje+b3PBFGcBaWl8X8/Ro9tjyD0v5XSyDU9c7aIiGigNgsFq/Da+KyLYFXkQ688qA2qlhFJZut/QquHTFTJ545yOvFSrMxe/CunwQAXrVC1j9NubWb61fZTMGd68X480EacF0HqjlsGcMVWwDOw1nBsarLJK6J3U8BSmYmxRnOLGjm1Z8/c/lp4EqtWl9XEdhPwPOFxQKfP2J+N/0g/kWLaSCi4oPIJRkeqwgYmFnmCpLK4tBcfNq2m5kPh1PAWgqCCFMXUr/Zbqv3hgsEopKLCDBDpBBTGseMGDZpw8yP58cY43azOR4Ooe9T6qbpqC6O82nc7XYxdF9//fWXX34JAJvNTmeiQnO/25fC+/3+eDrF2J1Opxo56X13LpN0fS4WZ1Bk5rR/ZvvWPHcBf78Rzhi6fPtT+yVNUufmc+77frvd1pb0oIVNUESIYs45hMTMU86lFMRqZ9S6L9gqyph3bpomrRSaUtKbyme9h9xCFqepPDw8HA4HDe3jlvjH5VKziugSsiuuKuY4jlq+kRlKyefz6PXAumTBPF1i0NqRRwDCCDSrosmx67q+N9dHbNVrKKCCCKu+cVEXiQAxUIAYTWmtFI9aMLvC6ng8no7DNE0KpFzT2FhaU5MaRKRpe6leRDUWscgl3YtbVOSCm/jOwNBIU4unhWGatOWGzGvFeyp9KZhJFOeCvj1Z5+M62tt8AC4t/PxFgOi4z0IDVPzRjfP6nu0jt9IyphMaj7usfSX8UAu3gdb0suTIXGohVOZAFELQAi2AjKjzoRCAqjoDzHkaASbKw4jxFFN/Op02523XdVOhlNJms+n7PsU+xhhjRy3JMFBoMKnglQvALjZfABjHMSAKkcZiR6wGhYkzIgJcYn9auE1U88fxeDgcDlMeRIQIxpFDxO12u9ttttv+Zre5vb3Z7TfABRE1rkE0kk4YEYUnWPFZcn1xREQYBWscH0tN7/KchZktxrAJKkKERPM+hDJvFeBPo0PW62qMWfrtV5f5te03IWNBKxcEd43QdqjseaObnvguCOuC5q4nBo7ONqJzxVDtn8Fmy/GDzwD1BI/GuVTq57xYNRHNUzb8w9IaXy5N+xaKAPM+JIvV2TSuWpoRa9Vme+OaIS1mrohl4xtx94aGBSItSJLeWwABLWCSq8LZdanrktRQvdL3SUPYUoox1vwE7Zt6c3Pz4sWrV/ev+m6bcx7Ox5zPCqJvvvmGpUzDBACaTbG93S22gLOmBVfzD1RLD4UABKFPm5yz2rGIaLPZbDab3W73wQcf/OyP/0GM8d/+u3//69/89nQ6iMA4jgDxKXxY31mcOwMytQDgNZJrvB8s8V+8x8nIGUINhIAVqq8FI5vVAm8R0V7qkOGKcvgU3V8sYXHcVmv5gevqwx6YMIc2Yq1kuT7shqOyEsjQKTZ+OcqqF28Mq8Rse8Awf7GEci19FwCU4S3I1DPL91SRXVG7dcixnVnF8wVZ9rT6GVAjom695RJ7+BjMFvBRrXgdFSlPxXu4Q+FJDQDgiqKuCex8oMtXfsmLnVqPsLjpzcb+5wtCt4bheslXF2s7iBcb2aXc3+JfP8MVNbiiXcDTBWb1IzYOdBnH6Yf2fLOQo3If9e/lccw539zclCI5j5lLLiVJBMAQAgKM4xj7XmHIuRQpRNSFOLGcz+cupVvNI9LmlkAsKEDv3j9qlT8MRJCKwDDlUsppGMdxLLlG3GF4BwA/+ewPU0qbzVY0eSyQT6ZBREBEKFo/cw0cv/AfpEUKEHaxfziXTK5ez+/7emsWh0iBrx3bVP9BwpRSQGJm7ayt2T2sonxMIjKOY54ytnhOHUF1Dy0zcz6f9/t9jHG/36tDjCgBFObJGJBe5/NooXQaDAkAh8Ph9u6S22yQUe8WADCDFvSXVtQktzY8lTwyCIhQ5V+IGGOHzjHe9z00nqg/0fgdhY9qaDEF85LFqBy8tQEIFYDWsD7GWs5Rp6SOVtWQz+fxdDqdToO6Mc3GR0SAtRIPadEjzVGrhSub7Q9r5F1oCdheEP9BygAAIhrZeHGpmVZmuqgqvUS17SFC8Gn/ACgCApCXGyj6WT0k7fZFwNDGekgXCmmGSxMvbQkKAP3KfJjG6diFidnPvaGBXQk9jflSGELzi6hy0fd918WSR5aCiBSUiGn4OomItv0QQQAszIjCAJIrph2PRwzEEKvZP6XNZnOzv7u9vd1u9znnzWYXN1Fzf2xKFWTNeCTO/q5B15wLAKhCGJAgKfLXTirjkNWDNwyDeQinaQJkxZf9/iZ1Yb/f393d3Nzsdptuu930mzSeT9KqudpkAIBgVqsGsWJCKZWPg1z2KOcMeIncWZAmRARg1aiUTTDzJaFoQYYWIYi2edBsa4uh7YEFebXZGzunVgNnTfU8vVvMB1Yk+4kjJHhNdDOIkGtF6qm2/9V6zPUrFp/lh/jH1fuyEpQ9bH9Asvn9Lv9DT4MWm2KvlpWEsfjWj2O/XcCT5lFSC055dZLrb+srQAAEsaI+c46RkHCcaoyoSJmm0nXpxYtXd3d3+/0+pW7bbUNIw3kaz8PpdJqmE5ex79NwOp5Op5/8+CdfffXVr371xc3NDWAtucxYpSFEAKEUIsXgDyS1rkTKaDZdpxRZFcKvv/76l7/+zfvHh/NpQAq3L+4f3h8OhwOk6wohzPHZNmIBH2yC4NWcIgO4P/DuG1ncF2bhpYav712MbzurrYSXX8Fs359Z1w9e3hMFDrWewpNFsOp6+f5aPOnvMDO4OjH1vXMD0OK36wH1MsMBOz+hJ0HgJBijvOiULnZpbAusWANhMZMFzuDcDKRXaNWbnyEjeE278D+pi+Em6tFCQp0RQBEBuBSdNdrfID/zi8LT+74gC55Y1QnL7GFYbfRymauHnwfLM9eaxAGApQxIC5axvfjBQYzbwmpbF6fVA2S5QKcr+vFxxVOujuAMQ3NjqEuGNOMFEWlsoQrHmnJGRHd3d32/y9OUSxHJpQBrlGmgCOGUz9iTFEkpFSxQGAOEFF9/9/rzzz//8MMP/+inP53yeDoN2+22UJymiRl+85svb25uPvvss3fv3otIij0ITeMYQtA6OJoqNpQSa5XFFLuNpgOUUkLqLuvF2nXu6ibqv3ZePLN7BnRXN+6pbZKV2LB+fk1w0PlnjB+ZeZpc/wzV9EJIpZSkeV/9RmX0GC6FQKTFnSqWnk6naZrO5/Pd3Z2mPMUYh0E7FQO43NFpmm5vb3POOY8mzpkvwRxxBkkdliiaQ5JaIzvTsrAq53WlKSXzwqErfKUaLACoCqak11wRIYQQSTFB39510WalzjxEVHFc9cOmKAZ1ywHAOA7nc63qOY55ysM0aV5MdccBAEJQYYCRQ00bvGiDpRQgLC2XzB/eeKnKfsX6Kc6Ql3P14FW669RRaoVhqbUe0dTfUJu8XFDO62B+HCMOpuMxs2YBKIgKFwAgvKCiaba+LqN95ZF2jb36agucsShfmafd2iA6n1r/CVFEUkd3d3fC07u3r5lzjBGBmTMzp9hTIGmaMyLFGImZkbB2p+BcRhFhEIobdWYDQNd1N/vHu7u77Xbfdd1uN9zc3Gz6HVW/a9BKpPrDupCm0XRdV6ZJA8SGYdAuHSiAXTB4AwAXsDDjEDHGSISbzSYm6vs+pbDd3IaIu93u7u7u9na/23QpxZjIKugQEbtatZseCaFkQxtW2SS0YgxWDqdB40KUDMKVYtBl+0wIiZcD2axcfmNgfiGi7Ts4biEth81eZtssqxH8tKTpD/NXXM6G/cnzYhXu2ysmc5mbVD2jXUx7fXMxkwUc7LMl0XrqLyJPeQj9fqxXt74WE7YPmh4A7kjr5T115MwYxtLwCXs/OR/9Ym5+zuj4oudb3qoETqaxOSygvUCw9q34sHgxFgTQ96lZnibmLCKA3IVOe3r2fZ9S3O129/f39/f3210fMR4ej2/evHv37uF4OAvnlNJ2kw7jKZcpxng6nb77/nsp33/xxW/7fhuDmwyhJkggYp4YWJhL4VoqWisrphRi3FZxNud37958/fWZGYquLkQMlHM5TyMA3L24f38Yn9riq0iyOLcGZ7uMRF49hraJvo2B/3EuF91jMb7dfB4tES+JiODwcIEAMtdznrrI5U74317Ff7u5Pkp62SCebnjoVsCqhwCWZ1CqSXXZjQbmeLuYf2hVDcy45heymHAI0YQncpE2NA/qsB0DWJ4a/+f6SPrZGj33q/DExO6s12uT8dio8GnAIk3jac/PSKjIrM744oOslB+YX3MKMzPALRaF8+fh2p/+LeJsmp6VXH0Y5mdqAZmrzy+eXG/NMytdjCNOeruKe1evxTPidA+ck5Rnfr5YY+VsIhf2j4rMivtQ5S3mGCNwRgjYlC6EQCQYgqbunN8f3r55w6VozZguRCHpUzcNIwD85te//uXnnxPAjz7+JPabPE6QegGIKb1/ePju+zeH4/n9+/cfffTRZrM5j8P7x8fNZpNSIkTNS9MYMEY4nE/b/a26etBdWsPaSw1Xt8mfCC/vLggvrA6jR6oF9vqLnL/3me1b3LdVKMfXeEgjO6UU9fUghhij2l/UhRj7vta6HGvQo5UKV3Wo6zqVbrXu2n6/3+/36oGUS0WAWtAuhPDRRx99//33OY+qlmh50hcvXhBJU1EunfdyzjkzwGhRl5pBB4IsNbldmqyo8eQYIohQU+MMaClEnbl5R3W7Yov+1Bi+2H6Vkja4u/Q5QBIArcwZnCEPNJ0SGc7n8+FwGCugvEvjQu1VIdQtMJUSmmJsLh0LsjV9VVrpARHxoZoQQBo1LaVMU6klXvNkkYdoHSYoaPkcrIpu1MwpIioNmwwP2QJ62+W/pQj2GLSueQGpqObT7lAtmsIUQ022RATXzuR8PvvTZFKKnSb7rNvkKZvHbZ1hzjnnECk11ZEB8na75XJ7Or6fhlrgDYCAYwghEmmiIbaMDGYuDLkWEyYNErcCOSJcCqu16PHxMcZ4d/dis3m3399ut9tNv9tut1oqyUwb6u8tpUwAzNzFWBqsuhCHYZiGUYE8TZNaENRhq9i42+1CVMNZ7LoudYo/mOI2deHm5ub+/naz6YDz+XyeHod3b17/5je/+e6774yMKKiH4axoHC8N4XVZoR5q9pyIALLfi1ZATonSIrGIRSCaOOJ5pKduK1ZX8WMtvkBj2D6FBp2zaM192yxnXoKnLpPtDI0QsYbCzcf3D3js9Jcf2b99QZH9n+i4slpW1gT9qSUsBn/qh+3f5568eqHj+sYz7Ie2QR6AflhDg8Uq/LB+CfbZQohXeDKbmycNJrI7LLo8qQ9rCWQACChEIAEjpVYfLO332+12++Ll/cuXL/f7/WbThRAeHh7evn37+vXbYRjGoTBDiCgl5Dw+PJweH9+P03G/3ZRSvv3mu9v93WazOZ3ON32HVexGEhBkZkDEXMYIXYxx1226Td91HdUOpLmmFpxO5/O5TIOIMFDXdTFGKMwCoet3afd4OL19+xaov7pf0myBdofndXf8gVrs8vM4gIiq1XiQLtBj8Suc15Wdb9ysU1yb+VLotDuL8/KDqOsR1c/ZlKXFmLU/9UoR8hTJT9hPfPEJcXkuTGk0QLn5X7EKgaseCW7LSusT6DfU5oauqIwf0GaOzoblR56Tb/Cf1zTWzH62HJgTanvXM9Ow+/YV+ZkICVxyzg3CioEeAm1WJl5coX76ALiF2Agyv2nzWdC0xVQXm1XvP5Hx6K/nT9kCzrDancU4NA+XeP5dT+2sHVJe+VTXo83hJuuRn1mjf8zT/Lrh7nRRixG1kh56ZzzlaZpubm6YQd0AgEVE09Xo8eH49VffHh5P4zhu0qZ1/Q6IfLO96dLmm9998R//8hck4bPPPitZcpHj8TwMgwgOw/DFF1/knD/55Eddp+1+Oi3oZRJnjJ2W5mNmjefvum6/3495fnwuBGQGavvArie4R6GrW+lHXlDL9SFCdy0OL1zDpTWtayuNWq9Vb1a5S9R5ewnvspqTIYTNZiN8lma+r5VgQ5VZdShttaddFrbb7X5/j4han2YYpJSJW6PIEEIIKTSrgLrsBEZyFSMBqqLVrLoFANQZCBrhOe9AAxdTUS0ME1tLPZ1qwBhjBgB1kRkgq1M0XjLcmkKIFl1ZgU81piPGCoRpGqcJ1I1ZxtwKe6rHCUspIpcDWOvTQPWCmmxp7SWYmUG0VI+dVsNPLpfYTd0p/VBKsSbmXn3yuX/UYpQCJfVwGrgW6MHOJWhaysxP0y5C9j4hU5prSCcUu6mrgHKpSG8UqbhyqXrfGBzPMwyNUFiVbJxf3Cp1h1CN8zFG5jKMA/Vd3/e73e7IJeczonRdVzAZBAIShWgBuoDABQFFj4MgiCDGSDWjryrtrcdjiTF23bu+77eb/X6/v7m52Ww2+/2NEplWWiJIo+q6Hdt+I/uScy5TZmbqI7dKuQCgGToVOKg1WkPf9/1GHbzQpZ16C0XkcDicDg+Pjw/DeHr35vUvfvGLr7/+2s5+CBEAxtyKAuDF9UJErUJvURWUiNSWYbS7Ap88ObpIoVLpRvMQejroBQX7Fy/jXrbQW9Fs6Jk2OJ+Nd0PZ6/wI8kPeCc+DG9GZkcsFhV3PYcEg/bdXXypOlvL3LZJwOc5TLsK5dm0DLiQAu7mGw4VwXGMz4ODpT+DVN+K8QKVnYwuLu61u/ToPWz8Ou76Z6x/6hiX2K3D0qI7TahuUUlTs6Lpuv99qb9C7u5uXL1/e3d/GGN+9e/PNN98Mw3DUdN3DSfsjbTY3m67POR+Pj8Mwas1xZXIv7l9OQ0bEFy9eMJ79qxGIEQDo7u5Oc5RjrDWjH94/Pj4+Hk8PwJfePmlT+//kiYswMqauT31irjkewxNlRteig14LuNkzV8krESEuVXRs4vgCN0QEkNUq6e/7XVjsuIg0/WthO6h7up6/rNjSAm2egoZ9XuDV4hWLw4jNue2FQnB0ADHYTy8j4OxFhrqwogMGh5yLxxM/c3Ypu+TavSyeb/eLIbn90J704j5Wxnlpd+FneBVDYIU/4OjAYhc8rBouXUCxJqF+fA9hB+olOQIAkQtrsEpROC+i42eCiz/nWiU4jNWvCGbI77dscafed3/ak/IE37mK4Vcvm6fwdXT1Zh1/kYvvXS/c5iaODa2n9NQxgWYQsUH8e+1d/oeXP9sZaQPWbxWNKahIHUGk73sVnkop5/OZc95sNt9//6YhPHd9dQ/2ff+2lMPhkHM+HA73t7d933cxAUui8ObNG83N/ru/+7vvv//+n/6Tf/Lpp5+KyLfffvvll1+GED788MOHhwcA3O12yhd2u91ms5nGMk5nYez6XvPZENES27SBm5anrgtc8URwBHZxoNaI4bFlQanWuyBzZgfuQF0Fux95fV+cyKTdJozmgMrrrIoW55y1rAu1+IWqFlJUIIvI+Xw26mQqkw6Vc354eDgejzmD8l+bqroTv/vuO2VwGmeoDDrnjHSx2jPXRhRWki23JoRGTKxKpy2TiEJIBmdzvhFRoAQiAFH1QmoZ/loBTrMHjZopGnRdqG0nqJb7169KmWy9OitVDMbT0BxyU1XugFTqBjAHXbIdtNoNpVZM1a5XqN2tAMA0t5xz5kJSTDH2Vj9VCKnVG6PmmxocEirkN5tNDN1TmAmAtiIjvJ4feQQWTTMDaJNpwbeAfd+XUliqyhFbKvuUs0XE2DiXtjErfPaP+Qcsl3JxIrj5unPOsRRE8s8oqZGSD4cRAbbb7VkCIkphNSyYeUKPORExIFMQkczCXKiFNKcEoo0XCzCztmEI4Rxj7NJBbUkppZcvX+12u9vb2+1+V20BratKXSYJxSQteWHgKYQQQu1sMY218WbXdYUnAIgxbLfb7a7X5KOSccrD+/fvz+fj6XSYhhOAhIjjOL57924YhpRiVhMziRq5xDmfiShG0RzCaqkJUQ0latGw+pSNptnn2ohVmudfIRwZpJSC3LSI1onavLrULhF18M/4qO2WcS9vHyqlkJNUbFUrPIYAAQAASURBVByThMwuZc+40Oqa59OwJViItkP0Yj5QcMVt13hm35Yys0x4pPQrMtvPWqTwE/Y8ozJLuM741WcteFHZ688RWBhqodsWgjOvvzczKVENyPErVYuIBoq0mahdBACy51IeGthCk3HlILpsqBpKdeFEAJBbmVkBKMwIyRane1FKEWFtRCsrSZSpCDIAoYCABBQCJoRQcmCS1gQpS544p5Rubvvb29sXL15st9sUolKE/X5/OBy+/t2X7968PZ4ez+fz+XysZTygSyGllITL27evh2FCxJS2H314G2MsPOU8MQD1CCiQSsoEaiVgLlBIS6hv97f3d9OYp6l8/+bh9es3gVII6fHxKMC3N/chpPNhmMZCmKTgmM+73a7w8PLDD4joPAxC4SRDYCKZAEGgMKtZPWolJwQUEINKQwYuZekgMpa/RjlmRrT+bPqVlHl1uPlZiOxCfD0VvqjijYKrYXUqeRFhBfVVs8NSfyWiCic01m4/kpVY3+6j5mYAgOoIIVzap8JcnyECaRnwl/kTgkARBgQMpJipdhkBCWTI3wa8YLjeRO03ASBBpOWUtuPZLiKrmhgsrYWZVb/WyBQiDDEis+RcSiFECiQABnMMRHjRGBea4XpTlMF7Vc30CpOiSiml1EJkmtlvxBCRpBWVVYOokTVmzbUIMNuLmc3OTHszZRtrXYScL1lDzKw+Gb703AtmHDThBsxi2HqZplTLD2pRBOYiEhQGfhdEZoquJ8tcQ6AdheF6oNydln4TQDiI2DiBCEUKc/HiigGEmTUlQwCQJCABMCKIFC5MFScJoAiAHcC2j0EA9CQBQAzdOI7YCg6JSFBWSLkiJqqhAjEAIY7jxAhICBgQsZBiKcbpSu794kzZQRMRAGpCwIUF265x64NyOWikzc24WQYLCogIAQZhIhTmTd+PpxMJpNiTQBd64pTiLsAmRd7cdIA5QAGWTUgqu0SMHe3Oj9PnX339m9dvOOfbze4Xn//68O79z//oj/+LP//k7es347n8w3/4px//6Ce//s0Xr9+9/ctf/XrzwYebbr/Z3f+Df/TycDh89dVXiN1ms/nit1//5Cc/6be3U0GGWIDTZtsTHY9HmqYynBPSi9s7IppKxtRR1xdGplQwMgbAQBSZMzARFoOe0RkPQ5hfMs/PFNf62Nj0QrQAx0+xVUlpbVcY4MIHEBGxLH4LACAFq04uKVZL7nbTYRl7dVwAjuM4TVlVqeP5lFLiMiJiShrewkoAGYWC9h4H7RghIi9evNBg0VIEMSMyIpbCOU/ffvN96sJms1FXMFHMeUTEGOP5fIyRzueRAm62Lzdbbea+SalDJOba0KIURgzTNFiP1pxHXcIwjrvtnpAILx6n1F1K3yskQ6g9JIgg5yphRkQiJIJASAQkmCLHQAjak6NEAkSErgMQgZr3gRhEME8CkEoOWvf/fJ5yqeGsY47DUKZJACIAqjgXYgClYBQzoKAQYQjEiHk6Ks3JOWvMZ8485ql1nmAAmKZRKSdhEK7SvDFZIgKoVShZivZiGYZhGIaSOY+jhk2qYqwCMwXo0sawrpSSEoWAOY8AgACRRLBK4MCMglImEQERaBI2AoSAIpaKxVyYS6USu34nUDjXCE+oEaSQui6XklvlTEAExJiS+Ho1rsDmQm4x1SC6voWhdQphZiIg4hALhQAygcSUUoo9vEeEbrtN5+Pp1Sv65OO7777/3Xl4f3d3//j4SBH7TZymiSh03WYaSx5zv9kfDxlDjxyl0G5zczweic9q6A8pIlIpZThPE0/7zTYLMHPJMsEEgDkXZXyvX2NK6ebm5sWLlyqIxhj61CGipggKCFL1EnebXqV6QCCkRJwghRDG6Ryp2sVCwMLj2/eP0zQdD2+1rUhQ2wcSIsokERALJ0EYObJsUy8ik0iEVLgwYwwAKpIAYuHAqBmPzKDluAJBl4IkakQJSNmicEDoVFZnzgygbBwQ2OUQBpcESLVw3IUHV7eP83/5E6sEguchlHTN16G80OjmmquZRd9LnDYBdP50bg2UjUx7+usp+FP0/amv/JQWgsiSUrupenFtPY2FKPPUxcwLiBkjoZrsuyw7u/7Tbtp24FzHMPP8wnT0zBrXi6q/kjbOPGaAW46yzCNRWzYCh2qLEm003O/7Tdcz8zhkEbm7u/vw448++eQTijX+Hlg04Pv169dKMc/n43geWGqZMqWY56HkMrFoAIZ0XQwhdDGFEHIZT6cjotzc7kLop2mYpimFhLXjTYoxQrN8f/mbLx8ej9NUCGNoEZh3d3dImVmkTPv9Jt53BDiO4zBOL1/cHM7y5vtv+u02pf7tu++7tL27u5vev20+tYsfYCU01EMEqEVkr2zl1S3GJ7JDYY7b/t+FJQ8c8izeogROYwyuju+fX5ziqz955vJLEBE/wTm+wfOjXj34Vw/LwgJiT64tI4sZyjyugeViES+tsqjRAfuJzSGXS1U9ceaSNST18na09bfYophs5uvoHWganZ+VXf5sLuCD1wKJ2eVyQ0Ncb4kz+BieL2CCiCGQ5QwbHOCHoisNbh6Azz+54j41wt3e64X4BROBi9VgPjIAwKUQtNYbQAkAgAACuaYGAQCQ6Yfa7kw/h7bwaZqihUBLldSQCTT8CQERGZqiWD9cuZ5hT4sTbagoKwd+/cp0kzn2ahkGAKj9Awhb7hkej4/DcM45pz4ibZFKjF3fp8KZCHjKOYeUAhGEgPub7UcffzCezpiZAtzc7DbbbhzPmaebu/1Hn3yIMfzBT3/y269+R0Sn8+Hu1Yc/+9kfllL+8i//ElH+6I9+9uLFi9evXzNnIoiRYqQQ+lYEPyfsbm5uur7XeWqoPxGdh7E2RqaLgPEMr3uKeiwwClz5mcUDFT1aYUmzmvle0vVhR1twLsx4tEREOzX6vOYQoiv40exWME2TspKcWS0RamFPm+4iDLTam5qPl3PuukmbxWtJlZpzyJM2J9QozRgphJBzHoZBp6GmKFVa1IuiOo9WxfDmbGiRxka4NGbYvDp2ZO0ZNXwZ21KnKCJGpNpLkIgIAmgVkIgAiBJCiBQQsbTaCqpzliI5T2bJKqWcz6dhGEwhnCbSaQNArV3Z0v8Wp6m4OtIqw3BRARVEC7rmrLGstc6COieRPB2uIaZNbAyuUQcXyTmbHnhJkmyWNSPROJe6FeDZtUfHFk9ne3HBt4qAs4uIrOchrOjG+pj44+AHCSFoFW526qI+oyrx4n4pRavClkwllAuPANhut+N0Rig//vFP/uk/+fN/+k/+/PHw5tdf/N3vfv31X//1X3/55Zen0+n169fv3z+mcEz95v7+xTCWECikkAucTofMcHNzkzbbOmdCAFRtU7dg0sgvVqyrTpfj8agTPh6Pj4+Hh4eHu7u73W633W43m03fbWMrhwsAKSWWSXP6RKRwAYW52kCROZfhdM55tP6fgLWdiVaOUaWRgh1nQpRcKtGgWClJm38VhxTcsCJn2IoD+Tsax2PcPzguLwg1CtnMAIpq6OJCYaZCtKI2197t+Yqnp4Y0C41xwdTbh9mf/rdw0TQujGqBoOA0WJiTb3ji8i+ypS0WKE/LHwv+uhah/Kz8sPZG/4CtazFzI47qC5G5KCkr6Wc9lD1s8PTLsY1er/2pqdY3mosDLg0JyYWELbaYJSNiiERAAVBIMEYMoe/725vbzWaz3+9fvXp1e3tLMSDim7ffHI/H4+Ph8fFRy6C1hRQprP4KapGTiHhzu8tZvYAigoAsPI1THg+j1vYVKCJlPA2Asu37j+4/KrUTkJRSDo+Hx8fH43DOOaNWw+pSSt00FQDY7/an4TVzQYypQ5TpfD4jwd19//2bL1+9evny1Qd/9md/fnv/6l/8d//y+9dv3r9/pNhxjeOd2ZXBuao8MAFmaO/3cUFwYU6FF1gEKyovc0Xo+VeAFRVsucv22OKMr7H36vgLpFqfpgUCe5R2IAKR5SEy0uYf9qYZj9uLJT91Hv1ndLqKXtwCd8EdTK/w0LxixOJE4yqoG11I4WKe/kXe+yeuvIqNtua7MhcI/EwW1qv15SfsF2Iz9Ni1foxb++yrwDcpeQEWXlX6WW+WOP3N79EzS7Cto3AJ91oA8KkRRDTWeoGcAfSsMiCqtx8FQVqbmaa8XWq+mY+RcybNa0qIFJmZa6ZuQSJl8lXjFKiKJov6jtYc6Cp6ewRYILbMqdCM/gCwIAADVNOLyCWrU6UTadXztTKejI/Mpevj999/ezy9CwHH6ZgCxRQA+NK2O0VNNfz0008++eSjRAFy6WLCwrvNto+p72v1ywLyyY8+un91T0T9djMxIMput/nss09fvrz/yU9+st/vX7y4+8UvfgHAIWir8bTdbkVkmoYgcHd3d39/3/e91n/WKLv3D0eVj0GEmuqFAr4DzVXArgnXAq8W/y6+5XmZKxXIWavv2MMkwFdCte2D4hI2VptSUmFdK5ogYiDjua5W50WZ104AzMznaTC9IlBsdbO15qH+1atCqK3kDzIIFA0Gnqap67rNptZrVVm/73st7kIa/4kBBErmPJVpvAQuIhAIq5wdW90XRSSrFwoAlpMcW08FaxQhVQfe1DMroK6N0DTGFEOMUaVxUzKxFQUFABHUmi5VC+OsVU/H8VxKmfKQc2ZOXpDTeV4iTlvJMTHligdm1l4aJTMiau7WeRyYOSVN5uwbOkguVRGCGZ2sIYgW7Wzutdh1pmxf1H6sSogPvITGI7wG3jDqEh/o0dVTUcM3ah5vbEodOv0B2udg1VaUMjvU9WeBWq2Nq+drTcYVh7kVwrF93Gw2m23XJcr5/NXvvj6d/tFPf/pHH338wd3/7NXw/v27d+++//77r778+ssvv/z1r3/zmy9+9+33r8fhzAIp9B+9/ODjj+M08vbm9jQ8VL8lQsWvbdIkQ1UIudSlNGaqe8qllIeHB80v3e/3L1++BIAUew2lZGtT1oJ+NW82WEJji93NOY/jWe0FzByTMDMZfrRMUbm08RMAUO96SgkkEwYiEjQgK4cy6LOlaYj1vSRAUOdNISQKUKaWAhMApPJoBrkk7Bp2cqvV4+/P3v2ELLXARWg2LXE6CVyzzXtMMinlgmoOTe0ru7mYDM4FTZEfUOQWT3pMXTy2eH497POiyXqQNT82gPv5+H/rclYamj28mK2/uTiW3hj/jDx0deaz+YBNDxeXyJV1acN6YS55YpYupe3tbtv1P/2DP1TTy263u7t7UUr58ssvv/rm61//6m+0i6ieSW1hRATbfoMhahcgANCkWGbOw1GN2SGEAEEIhAkAXr68z2V8PBxI+Pbu5ub+Vd+n7XbLJxwOh4eHd9rXmDkzSECMadP3/WmYTscj7XHT9YghRcpChIIoIGPJY4rw4uXthx+++tkf/Y//5M/+9Kc//emPP/vDb79586//9f/3+++//clnn37xzWuDmHILg6dc9u6SdgwrJ4Bt33pT/IZ6qmp/+lO8GHMhxJgusTj4MEf1BQVf0IFnLhvkmQ9P/dB9WGoLfmIwV5/8Mv21cGfBNbc8zA/gmqpc9DcUH/EiLkF3PT1wpduNWi4WsnienUfOvH8Nl1icJqwT8AqYX75n3l4dfQrsXr+1f4NrokUrp+UCbTy0l1OSCxwW2EXXiN4aPjbmVafZgh56TF6Qo8VbrtxBrmaIqjyo069WFhARTbLAZpHiSwwNAoBADdDiPAWiPgUpPBFvYtzteoD+OE0FgKvDIaAgiVZLF65zEGCpIfbXlBPbL38qr7KtNVYsLkSstQ49cBAAwHJR2oZi7Lu7ly/efvvw0Y8++ujjD4ggj8N+3w9jFCmAQgQowJxLkcdHPp+PIjIBk0DO+fx4AJYUYh8iM29Sh4gFpJQSUuw2m5Di8Xza3b188/Zhu93e3O53+23h/Hh4CCG8+uDlbr9FRFVibm9vEfHh4eH992/v7u52N/vttiei8/l8PB5rm0QiAgQBFia4bhcDh3seCRd4taYJ/rwsMEraZc8uqCgoKpXZKxaX7Z0/76H1BiMMLQZP60tLSql1JDMqJznn8zSo84eIAlX642Q2VJedxrblnFMcVSGs/tVWzt5KTWq5S2g+Ky5V6rVYMJxfij+qB3JNgqruL8CZRNTE61kAQu364LrexBCIIMaYQtVeqNFkAGh9d4tKyaWIZlupRD6O4ziec86lTNOUrX4MKboLEGJACkggauJBYcl5smDIKR+ZuRRRhVC1O1UIERGxtxWp2HZpxtei8bWnvBkIKvQyK2yptRxUhbApCQacmrxQSmEGEci5pkRZ8pE5PBUCa5akNxd4q38aa7MHZP5b+xxd1X1b4CIdCS9h6mI2U1tOe0U7Qa19glZJOZxPiULAJCLff//mbz//JQUZhsM3X/7m7u7uZr+/v/vDP/3TP4HNpjwevv7q+2+++e777x9+/esv/92//6vjOcfQf/3N64d330mobIu0Y2XsU0raN6QAMrOwMtZ6KLSqJ6LWxBqnaXr37t3j4+PhcLi9vX1xf9ztdro7ulmFs6qIegQSXVLAxBlJ1QJSSgkRmDlgK3REWsc+SkoAUMpkWXVQJSXQqkgsF/qjZ1yktC1W7CLltUQUUDO0AFggwKUW3NzkB9Y1W0R8tQ9oo3pihIiEhLKUYIymgBMrwcofXTMPeFxcoBe0pEFDGnvAkKaB5klB2V/yhLK0/mxz85xgMVVwpNmeXxL3p9+yfp391o66AtCnHYvbDqTr5nMD3UL6sfPsDyfijMgu4GPrWrxivVkigjKjIPbkUzIHAHApRHR3d/fRqw8+/uij+/v7bdd/8PLVhx9+KCK/+tWv/tW/+u+/+uqr8/kcY+Qi4EIpQgjMWSOtQ7Seb4WkpszlMiEAYkAQlkm4OcCx7Pa7D17c95tus+nGcXz//v2bN9+XR9TS5CJFc9lS0K5EjAIp4Lbb3d/fAdDhcDgdBgkZUVIIt7fbzz7743/0D//oT//0Tz77yScvXt6O04SIIOcX99t/+POf/Zv/379hZmw5Y4gXRo5mwnXQ1Q/+7lV4+kE87i0w0P8p7jIMMTT2bzHcwJVkv7hMBVq8QsNs1ngCT6iCa2QTqaxzfab80tYwWXOpBQG5ChPPsUREmod2ccC9wA0+kxkqe/M83r9a5hLeU1D1ZMcvx1p1+R/6N4prhLj+ObhsQ48nRhbW8LcF+vWuh/WI5Nj5jJIgYrVQzreyqlqrYqp6dzGl5am5Bjr/pzTuY0Dja62VpCnYXpT3b6kfWADAq51SW6NaXCiAEKiF1mWAISLVEuCMAgEFuUDmGML+Zv/i/q7v+2EYTm+YEAUKgiqfF+6me1YR2KxH4TnjHc61FE8i7Ca7wmMGMbM7aEytU8tRg8YLFMmXCC4trY5I9/f3H3/y4Z/92Z9pjOjufg/IQACnI4QARZtB1AsAjsOjlHI8Hofb/XQ8g4jkAkxffvWlZldOJWMM2+0WEKdpOo5lGAb9arPZvHr1SksmNCEsxRhiJK0ycnu733fb03B+9/YBEbXDXkophZhCi2YUwJrjIEggAAuI+Q8L1MK5HcGj7lWiJ01/89SmCfHrzWOAJ+ntggRRq60/jmMIoUvUREylDEBECEFE1BuGiMxCRKGLalQNIQSKRsZb1cdL6ULVDAk7bahdWkfHUtrPQ6DaLrwzcJWSTXv0R88zF2rBosxsIZEUrtIrMYXBkLneQVHvoCqEXdcl8rpHBVrhwgzN4aZJYszM4zhN0zROwzTlUnRdWqfRiZ1wCfSwOWisrIILEbXMuKafcS31ASISkDCQObhKKVMeRSTSLFKjJfSBFvjRV+eppljHGKkBzWIIA13qahoGliItCPPS70eabqb6/6JDmEmIni/AXBIw+nmJj3UEyhtSY0v7VCV/wXYXjjJxqqM+qeYDzS1ULBDN7WYNuk6IIec8TbjbbhDx8fE4DlPX7YLwcD6eT4cYYx8TYiCM97fbH//4HwOm33zxOxb4/G9//e7heDq+P53H/f2LEEKMKXYppRSDOrdjSkmoNhQx+ADAOA4KdsWZlmpbROR4PJYsj4+P6r/V87Lf1rrHkgsBkIAwSy6JAjMXkBAiBCoUpmmaBGJARkaSGGMXE6JW6lAIVx8phpCoJmEZKkotsBJEBNVa2T6048aIEGNtuYJSGIOAIAlR8DUmKlYQIuAl8U+cZmLykuGBYcxCkvCED+ZkFFuYODjG7IdaTqgiaWuwuCKI64cNr/1jsLrmP7xiMfWDeHkCF8Li3CK45hbrCdif11Y6k1+9VOe3w+5wDeFbqqky13gXr/BwRqc/rOdmuwOrfVyPcwHIpe3E/D6A5Ssb+RCR4XyMFG5vX/z0D/7gj3/2s5f3L3jKx+Px88//5j/8h3/33XffPTw8pJS6LookbYBbSnl8fFQci4QUewAQaC51rgUX9BXKnDzT3aRO2+ymlMZxfHh4+Pbrw/l8FpHNthsHtXdSSj0Fpa1ZhFMMQHy/39/fvRCR169fC+f7uxeM8POf//wf//mf//znf/Tppz96cb/vNxEwPz6+Rcw3N3en01Sm8uNPP+m7eH93c3xzbmCfbQpqgFmzy1aoIoJzwnssWohxnoI/c/lTc/XzepdtzDrJJ5wMhkv271XEXhyuxZPPTNuP5jHKqqSuedL6hzivhPzM6+xbcp5zDxaee2AuP2xFtq7C0G5CA7tVVsAmeF0Fix/T3uj1Oj9PWyC16i8yp9jGjP1Z9vY1WBGuNUgXyhs+QcD9n9gUQotDqSt1NSTIRdvC3CO6ACNc28Tnj4DMmQ4RFr4gz3ot18Y3ohdEBAiFRRjrEq6cDlLmbAsKwihACBoTsQ3bl7f7j17di8h3xwdoBlpEFKniHwgioLV5REQrk8MrLmNLMIHS4EnzeC2/109tvYjQRYdkQ2VEynSphJRLORwO33zzzccfbH/5qy9Ow/n2dr/dbm5ud/e3N/ubLRHFSJD6lPoUIzBDKRDCHb8AAcgZKOTjMY/T6XAkgJ/+0R9rXOLhdGJmLfJehIepaOzG4+OjIpLV5gmtd4K6eqowPZS3b9+eT2O33d3f3wNACOHdw3txBnURkWaDu0q47LwvUMJTP3+uPQIvsMses42gGiCQLZzYv/0pdNajZHuaUtput8ysZlMQNIUhpaQja2ChGo6w1pHCTRc1eSmlFCgaYVH1Ulq3JzvaMUaW2rFQRd5SUN+rs7IUxFBLj0DOnDOXIsygOhgRaQEnfRYA1K8oIlVcDResDq1JRvWZOo0IEXNmAIgINXmQKEQMSDFGJCIzrSIJah23IFJyztNU1POmlyVJapygJRbON1qL6xSDPABp9pe1+VWFUKVWP1v7YAiAzQjl6B6IiG/AQBgEBGAy9AvNfVp1QowX52eVz0n9k6oQhnBpYm7I0+A5u3SElC6hy375Xp6foahjK/4ndljIuf6aKfzC/mSeVGWE1y5m1pZ6QIiFNEUIgLbbLQkQllL4q29eD+MRpHz66Ufbbtjv931MUDhDFplEMOfy3XevMfS/+eLrkqfbu30WePXqBTNkrMpb7JJWi/Vl8BTCbWmEiPv9znGu5r8tBRGHYRjOVUWUVh7l1f2NphdWFREJCQkwpWDGR5ZcShmGIYYgWJgZkGsJU0QUBrS+1oSIMQXEKFmd0sWQTUQQmUh1QpBWElZE2NLKSJENUAKBCGkye0VRZrbAE1UlozBSCIjAyuUwIKAAE6EZBhQ6ACRSU6G84cc4upFCj3ALJmS4YjhhD6+Rb8GoFiz8gqMrlrb4c06gL/cXP/TP23IWs/JMxb/dk/714HJNaH7qLQbedjxmzhARsVK8dhNr7PXsdf4V1LwWT/EwcIcT3Faiu/zM0SLWkBYwdMg6k+F0wBTidru9ubkJIbx9+/a7b7598933r1+/noZxu9sAQC7T4fiYc9bKTsfjMYag3TxjJBGZ8jBNU9/3JCxcPEZJs6vt+k3XbSpJDSHG+M0335Qpn04nEdntNn3fn06nh/eHV9v78/k8TWeCsu1v9jfb7XabUnj78P50Ou226eamQ8QXL37y6aef/uEf/uH/6B//6cuXL1++fEEow3A6Dw/DeULibht5KIfHd8MgSNuf/+ynt7c3x8dDSpuhUfyGKi0hbQU0JTprhDQEg/m5QCe4LH/2BIYvWMLiHKGTXdb4/Mywi2Pyg9fVOTvMufITEQGVlB05s2mbrdGGWh9emB+ZxXvb808qZgto6DQwLK3XurmlXCy1V2kUzj2xC1oB13bEooCg8Ww/uB/k6vbZTXEE/OqOyDXPBsybLi7g6emzRy1a+wDdG50k+pwR4eqOXL1sDt7vtwD7VYJ29QQtB0cAERACUN+f2nKBWvUs/VO1QRImYRRG4CASCLbb1Ke42/Q3fegIxmHk4ZBrtE872oSMgFWhrHGEwS1ksZynpi1PiHSLH+KcuSBeejdDiyoSBGAJgVSCFxat3vHu/fu/+uu/3v9Xf/JX//EXf/Hv/u1ut7u93d/stz/60SebzebVBy80aDMgbbadeo26rhvHMzMPx1OkcDocQghSeNtv1EJHRCFFZnx4dzwej9M0QQzMvNlstrvbalyGcHNzIyJISYC0qtMwFpEcY2TB/e393Yv06Wc//eCjj4+H02efffbtd29/9+VX6mzHWiedhUWomOtXRBawkt/P4OWl3jXNQRTNyWy2fB8csTz79uvFKwCEiKZpsgYG2idNbfc5Z4SxKf+qORAApKhpmZbKi9M0QUB1Dqgaad5Cizj1R56ZkWIu7M06IlXZsFWnlLTpiI7GrW8eNCFbLxFRyywiqlOFmUOrAtBWagXeRXPJKmQAkQURtDRRDJcKKxTAes9kYOQLRWKWiadSRHP8prF2viitRYQW3C9cVM+0jLwKAaiU3NxZ6mxkFwcntQEVhhAiEaakMbCKFVrYpiI2hBhjHluRPCCASxFO1c/Vcqe/1dVpf+MaMoqXNEKo9aVFwzAdIl0cgwuz4xwtZ1KZXp6h0DxSlCywc67oGtov0Bga5fehpB4fPL+esSpGpmqTUqzLOaNoXVkqXHb7tA83jw/v/u1f/Pu//EvY0OOnn3662+0Q8f7+5asXL/t+G+L00Yefvnv3+Nvf/vb169dIab/Zfvzxx4jh4ZybR7r1jVCCgAFjzc/UmRBRy2C4TJJciqDe0fhPbIaD4/GoFZjUStL3qe97PbbROrtIVpoGAKfhUE2WLeUuaC0xyWpqiYEUGTKA5NoPgghQBLg2CxEBZgC8pJXmnEUAEYoUZkbRfoyISNg4ZF0UAAYQRgFh5tiO32Vr1cdsfTaMtVR0W5X/luZf8iMYMsk1TcPEpjVBVDsWrK5npIqrpBycYOH/XD8Gjn0aybbj4Udbz3Zx335y9S1+zv5aPGyGBPvT/0qkqEzsQdpmfsXeaUTW3uVX7aehFEQPoR8B5icW3O5L8zNIE+LtyVKKLdTEdyIKmsUO+ObNm++++WY8D1IyCvSbbpqm8TyIiPbbVWoYUGsuBxEZhqGUQgG2XSp5Ks2rWYNbIKDQhx+/snju8/n88PDw7s1bY3uaFzGdp1IKMGz7/nh6x8ybbf/q1ctXr15om5dcxki86eiDlzd/8ic///nPf/7Tn/70448/DiEIUi7T4fERIHcRt32iQEjlfD7uNv2YYdP3iNvPfvyjD1+++u3xqxDDnHpW4DOzXGjNFYPI8yi0+ApXEu1il58f31NwQ7l6SJ94+9WzAAD27FNzXk/1qUXNh9V3zXRFT7XWP/S454+wfrCcDf+wjrGYiQcOuuovFT44i9MGl0q9WAJUi2yyI0kumXBRHsBfvGrsaV6vhdomrYiLf3INT5uep1cLIknzaAVPGO23ftUyv+x1XdeJCxCqcxC4pD9VXjDz0T1FHn//yxs10LEAboyQW90CW8vVdyEwgCvlUrVBovZ4gFpYC9UgBdp6mwklAKMwSQ7CPcWX93cv7+8CopTMwyMMY+JcSrPQAVTvN2gRGcUfAQAWIazUIqxUFFmxJ9umtTHew2dhlm0yn7Qj4PlmKQVrjGKuaS0PDw+ff/75t9/+drfbnc/Hm+3u1Qcvb29vf/Sjj4dh2O+3Nzc3KVLXdbe3NyKiMZ/jcOq6rpSy7fqHd+8B4Pj4iIh3N7c559il/X4fQng8Hs/nMzOfhqM2BNOuXOrb0UOkQZIxRiu22fc9l/Ly1au7uxe73c1hmIZh6vrty5cvP/+7X6sOECOBIGIAECgMFzJwAdTiX/9hISgb9Nb8sYHugoOmATacZLhSlOG6T5JcnbbSevMSEVI9zta+JYSgMq64NjNd1yFSSolRagJhCFr0RYkPVovPBSX0K+GgnmpuCYTjyNM0mU4iLSdK5wASjAKQSxfULdP4UgBQr1cpBSVTqztqECaqI8+kRCggkGIfKFjJTc2nQsQyZaziTUFEZEEWZj6eJjVGqDaomqGVuAQARLUqK86HUiYTXwmr/UvDOqZp0tpyClVdfqRaiTSEQBQ6xJB67TOubSfI1aEBgBg720RuTXoUFAVq20YVV2KMu90uhEAhUOyACIgEUQgh6P5CKQUB9A4IgUijzTEEMdNk3UoRwFkUmLF7cZIkNvnN8wjDw+I8536cMM9LNxIUWs0V+8qsio68gE1DWsCkooPeL8TTNG12+5wZISDRNJXH4ykS0oZ/9+W3Kit+/+3bvzz/tYi8ePHh3f3LLm2//O1Xwzm/uH8RA6f+Zpom7GobpCbYUKN7UaEKcjngpgZjVVAvbJ1a7VnNtjU4xIDTND2+f4Aadx32+/12u+36qNYQIhLhQNT1Xd/33RCZmXMZp7PKt9CcqyGErkuICLWnKJaiel3bQRJARlQ7wqQlO4xZVTZRioD2oCMKEAQ17lRRGtXEAsBQ4R+NE1jFAiICEfXVqu5rNjBCEm8dcUq/UoQFGkFrGmaob2jEcw+VIwdLGc5w1A4et3TVGKN+EPcWnJsl/G+JiDn7A2C47o+Hf6MtVh+wNeoZXgyi59mkHLvPLt5swTDE1WsyyS/G2OoRzzyERBT+/5z9aZMlyZEgiKmqmbn7uyIiIzIys+4qFFBANzA9fcwpy/2wHO4XUigrsuRfJT+uCCkr7NmZnoMz6J5uNI5GF+rIM653uLuZqfKDutkzd3+RwNIFUnjp4W5upqa3qqlaIzwt+ZAATiUcoCgCkVG8nHB+t9QaFaQxtfeF4rhUngYXJZUkJhCNyVu3MhFSzD8gxu12u6ibytp91wmzIbq/vWPm1WKpTVqMMVFYhEPotQcrs3jfc+irylpLeqqEJYrAcrFYr9fWWmuq1WqFhgjw9evX2919VVX7/f5wOJyfn/d91+72WietahoR6fu+a/vLdXV9fb1YLG5ubn79q1/Ujfvoo4+ePXv2Z3/60y+++OLLL79Yr9fOOUCJMRwOD7Y+Q8TaKkMJoQ9kIhnWk4f9w85WhjA8ffr0Zz/96W9/+zuwoDqx9m0jIsCRu07xIqHH1IrTH1VVlYklGZdETmsPKk54FOEfrnxn4uSjsY82D8iPxPEyoy8xSjQlYTyZ/Fb+TSndkR9N+JTMdvJsafDYmcmw5Y9yFXHcj3HCcMp/zidZUmumnRJcxTYNCX6kzCGkBEU4umD0pn4hpMa+mnRkUl+mTG4ThgOFDpqZeF5m6TnSHzklFcaVljn1A5yselL7YeI5kiTCS1aAY40Bx1oFjN1PbdvmYY/8XyivRRHJ+2M7OJhdODZmYMxCy5vp06RKVfmKjCPPmLrgTpj85BUZ3BBHuDECAsYYq6oygN53HKO11iBE5th3i0VtCGtn2+22b/npRXV1cf786rJ2VmLw3pvKici79mH3sK8XZzFKFzyzlo6xhMgIEdS2J+Cg7JUEBCLEoyGdETtvSt6RvNFUKGolQKQogJmf0byMEqoyeHWPsqmua89RAMHY7169fv1GixZQTmp1lVEjzaIKLLLW5s4B52drRFQtGVlEBFPd4KqqtKhYvWiyN71aVIonzKyu98PhkGvZp7NhR2nIEjebzXq1aZpmvT6zrn737vbb777/7rvv4NigxcTIAlGYQCbkjFkOwiliLCFTOs7yHLhIDSWi1ORLXxcATaTM+oAgSq77QkSDVjyumYwAak5rT3m1hNXq0LZPSJj6PQz4EGMU1oKiTslQBRDjMfjv+2NniMGpSjazdzXkEFzXH0le1RL952az0ZNv1lotTKroKIX5mntR5CKZ2V0rwxmQotrqAOfBclaRZ61FkhACIdV1bchlfQlp6GIVQhAQiDk5fDjx6L3vAmoDjOCjhgd1ycofZDh2NURXtPhQ7rhg6GjYDHdS4mVms+oZ0qigViQ35IQw89uqqlhi732U4JzjollFloMxsB7rQkS19rXDBzPX9UKtX0UYvY+IfR8y89S4p06PZJikBj9zIjEiCoxabqp4jce+qVjKC92yDCjdvhCCmlDZEaB1bpkZ0vnYrMoO/WlE1FrL89TvKrboSsuCnIqZ2rEj1TcdwomHvltUjlliiIjm5t1t37Xf9rdVVS2W9Wq1Wjaruq4r17x9c/v69V3bhldvbnovhIvV+swQUuU6bhWGZIwxDge9VKy1nvUg6SimiqlAd9/3er46hKClqhR8Wo0iM4pw2EFh8Gu5I2tt3bimadbr9XK5rGuXYx6rxVIftv2g9uNQ9B7rulJrwtnhJIgxxtnGGAMgRKDpAOoiqLDyHDkF5yNH5b0kYG3lLCGitkee6FQyZBYMPPDomCm5Xim8RxxQWADhlMKU/RA8Tp+QcSoXFAoWnLqUJsv90Ks8zQgzyV3eSXs8eJiybiSFsTdXO0pSKQefqFwydo2UImQCtxI+RDT5Vn43Zd4fxYbSQ2YWWRxiOt45kUyT5Z/858k7+SaNHcnlMsulZeBkdYSZCXNEAmavTGELSe3o+94ZE3rvvXeGAGCxWFhrmYPCm4iMtfWi2d13BqWqqlWzAKz7vvXeu8qKyNlis1qtVqu1Gn677aFt27u7B1UdRKJzvbFYW/dwe6v5NqvVyliUGBDxycXZZrMhfhDwu32/Oau/+vGf/+QnP/nyyy+vrq5W6yURNU11e3tb167v+9VqVVn20VskMCAchZklAjKStO1BV3F2cWVoudvFZ8+eoUZChh7EQ8ie42CrTHCvhNgEtUqamrwyud6DFcUcjtdJHCivGMPkjoxV8PK/k+lN1Mo/ZML5dmlqlvOdfKKk+vK/8y9OnpSxcnx8F45l4rGwFrITpKRKLAxsmSXG5BEmdARFjG6yF/lzeqe0b2FsaFERoCu3tes6KoIJ5VCYRH42ITJjKV11yjzLT09ge3Lf82InGznxOwxTguMpyjy9gdOOA19/4IVTPD8ybRwRzjETJG9BBuYcRRER1FxBBEgFlWHQZUPXM0pVVaZyEj2HaETON3UMfbcNroGrdXP2rPn4xfPzszWB1IZCCN0BQgjehxq5Iej6FhGtJQDLglEkaBQTQRAQMWp5Q40dypQh/F4onaSFky/mkct/HvGWCMd+/UyRIBgFRBSGEpmJwv3dQaGt/mwtCgkAvu2INONJRNR5fTSfch6gZlCJSNMMhRazHM+e60wseRMREU2w1mqpQOOqumpijLvdwUc5HLoYGYeDZilKJiM/0RiLTjOxyf0SzpMnyzELDiAwLY07FJcuyW2ya3kfEVEFWdM0xhhA4dT0T08P5icVwzWoxcwhxBij0JHJgCCnM1HqqNKKl5ldAABH8qHLNT80yqqDZ+NQZfrg4gFAFGspn2hSe8a5bBAqSgzJ/1hASVmIpi1nb4WxqPYpDccjAUmQRCBquf6sBA8UnTq/6+GungeDMIVDjzmf6UcEADKDRe3bDgAyN0QR1FUov+WpuHGV4p428Gg0sVNE9l2bDDEkJDXVnLFxloECAMJQJlIiorVV0yw0qzArsRnUAFTq20ctHwxLTFsPiMdGREiD7yCzRy1jPMHS/CP1rhxM35LQ8pP5AWY2yeWXF6Uype/7TE2Zw8ShyM2QzzKi5XGuGSICsHpVAMB7HzwjMKGNUW7v7hfoRehw6L/53Str7ZPzi7pe9D6en10hUt/xoY1v5K7vwTULQ87ZmpCC59B55n2iNYwxlhHCUpSrbOq6TsvXq0GoVS20MX0uMwMACzspCjDo7XwbNLO6aZqmqZbL5Wq1quv66vqJRg6rqgqxjz4wByTZ7R/UQUAGjLMSemRBlLqpiIjjkGNKhJpcQwYMHq1xDRs656xr8qbjUJFYmLmqnaaohcEMGaq9DJG0vCX5d+YII0SRaUPMbGiZ8fmW8veAkTOPO5y6Js/PcXHyu3TOlcJ+XkQhuStG9AxjMZAVl/mUyucn2nP51twHX2pCI2Cm50vkg6SQlSQ6AZeMr7xf8/mUw5avJ6Q5AYTJPyUJP6JRJJ1SsjuN+vweAyyTH8VoxCH0fahMjDGGrhdjiKip6pTFxxrdUg+TQUuE3vsYQ90454xzzWq5UCe0iBwO+7u7u/1+H/vonNtu97H3elTdc7dsNvVqtd1uXWWAxRBXtrJNrUvousOz68UHz1989dVPvvrqq48++mixWKgsYQ7MQSKeb1ZVZbsORTxzIApIBpGMMQxRRLPMxdqKGcigRWIREf7ii8/Ozlf3MDpujjioVY/hf76y7XGSXsp9mbwohb4yR5ucqlcibf5ESXST8eWUGpQvPFopAAXS5ufnrz82lF758P1JkoQZip5E4wnllo+VJlMeB4c8LphwMyl0wcyRColVLl8mE57QrxnbeJCsxzmI8jgT4ZSnMVkgFqUXyr+Wc5MioIQ4ssqywp0VrBIZSrhNBoSiwex8p7IqU26WCGdn8JS7wnQH4RE8fw8+5Gpsk03Xdc755+Q6OezkJgmgQYMGY4gxoERHVNWV7O8bB5sVPL168vzq4vxsfXl2ZkB8e3AVgKWDUNfiwUcKHQWwziMYZokCQYQFBQ2SFa1co6cDQAYlWoxmZMyX9hg1lTzksXWVfxobhEdjyRRFKcbfJR7pTwQMBIxoASCymjpq6jMAONOU+JknIyIMEKNAjJBCGQDwsL0vcTVPO7d3zzMZ/oo9Dok8EtTXDkaNwxgjAg3Hk0CIBC2CP73LmT9kaJQYfhLaE3ZXcoYJGyy3Lz0p5agy1pcAjuTPzFVVrdfrxWJhDAmwiMZMVA2wMcYQ2BijHdU0iTTDkJzJbSfUmauRn8xq9MexbzgbPfKk1qBKRkwhIEk6verERISphEnWEvW3TY3mMSmNeUoFlsYMAQ2UGTs8o1YlFKxAkkWXrdlso2pcS8vGtD1ngzDGXJd/2E0eqnQg4BD10qqtpmi+p/PUh0GGc3HZQ1GpQWicHhkzxgzOCEAZzndJSv9WTj5FG/1jhpgxRgRt6j2YQ2cZGUJg5mF3siUsqfzT2FcyFPwUiIjDscbMGxM1jfD8OKtT8Q9InCTPJ5vWYIYIJxbVj7KWmE1WKZTzbOiWdCEStVEoADBSBDYizEHTCbq+931bOXIVqvNiu+sDkwGMAYXl/uHAd4fd9vC6vr26/uDi/CrG9rtvv39pbz/44KMnT6627YOi6749dF2XD83u93tyVvPRyn3P5xj7vleDUAlBJ6+FfHP02BgTOCobSiikvhhvDPbed33/sN1aR9ol1Vq73d9vNpuLi4vlcuksmaqytrGO+r5fLpeu0kA9iRgAsKkGqQAnd5umgEZEq2VFlRcSaQhRtNlMjIEEjEVnrR5MiDECsrByHlRHDGSDMENhwKdxJlimW+ShseYchxR3MaWeTiRQiVLvvzJWZb4gyeCcMNNyZBmrL+Wn57OdT6Nk0BMPWR7kMZqZjFOSzUTgzaX4Y8DB1L8xj3A0ehFEIO9angbNonzvgfbJ5ZR/krElgEWIMiuOGbBptFHJx/ILY54ikixea6tAPQG6qslCIqcfD3BjWK1WzNz3XdNUl0/O67oigru7u5ub7X6/b1xzeXm1WW5ev3797t07ZxpbkzFoLBqDRAgSm9oYazOEqsp98PzFj370o08++eRP/uTzqqq0Ak27P2y390S2rp3KyO12e3Z2ttvtrKWhnhuxgLAUwgw4BqnrRdcdfODtdhu4PbTxgw+vr67O7954GDFTztpLdhOXCF9eel/5eOkjnGx0iQZzhMmDT+5MeHHe30KciIhQcaqkvJ83CKb49qjxNqfK91x8uu3B76dBGPsjJordbLZzfnI8SFlCe24i6gPWuZLz5KnmIHDJBPKKJvExSAYwjKNVGdqx6Bmj+DBhdDJWWCeLhRlHKt+aQOMU2KGEZ36mxIfHEHjyCRwrzZMLx76234skk0lOliljlouogb7pdRItR5SlTAtFlXZAJAGR6JwjjqHviYO15BBiv28ifPR88eHz58+fPV1WTthX2IeuBQ611MaZCqvekEXe1bZxfU0QhYGjCJCAaFE3jgJILIhwQoWc4fOELZTX3PEBs70YQy+Wz0yen3+IQSv781AtAkkEmRkEEAWAEIXAALAAAXAfBHHoalgICFBlS4OiNMhhAgDbDCmsg0qRSoaQHdqjS/qviMZAtNAIucoY5qH0IjlhESQk0nS+vN7H6AVm6Cd/sDX42FU8wLnBQwn890yAU4aC2jxaNNsgIBkRiTQwhOQHV5Gt5hMkPxHHGDW7cZAmcIzSJOtiUNaPXiFwevAsZ+cqSCGxaM3CzWmWiIKgTd6MtccSFWXGhIgwq2uSYDhiN1g4Wa9gZlcZY2z+Smm4yrhjQTZfY3ElJpACAwQQQVOQdA5JlzGKcMwcQp8jHDi8M/YhygA9zbAFAGOiMcaaylZu8Ggj6npJSOjIiGRobVfgm0xxDxGtrQCCWv7e+6ZpFIYJaY/dI468TihlrlKMpYAb+UowaV/ppgCAtvWao1zmw1klGCT+zCOZkT+H/ko0HtIgH5W2x7j3cUBkEWBEEmEJfQTLpm3bphJnDDgXuD/c7x/ubrfb/aZa9l1AxKpqKmuFUZiXy1UU431EY9dn5w/bbrvv37273e7a5XmjlltVNcwwVJrlIfTXd9MDZZq3nHAMMpXFGJ1zzrqMDBrlszikkSczfojAV5VVx4RaYsy83+8BoG13i8Xi4vz87OxsvV6u1+vVamFs1TS1rasy31DPUkHUHG/dGgWaujijoqvI0OEsaQsRmUkAkFFM3lDvvR5qMUSIlMPNtmRkE36X/5kpOSumpTqYdZEMxMmWlwh0Um8or/LYGxQqXb5TzmeOyplISo94flh3KCPihAWXlJPJgIo6K+VK33OVGkn+1sS1X3IZGhcFVvY3lEQbK3kiAnCCtBJApvdlZoXmic099+VsJ9oDFqllkE5nPbYRxYCjOShUJTIA6eGLuq7ZBwJsmmrRNABaPTkiekqtipDt+fmTs7O1s4goIfZ3d7cPD3cAcL7ZXJydgZD3frc7hBDOz57s93trDUvYrDbPXzyLsdvtH87qsxD8YllfX11+/vnnX3311WeffXZ5eVlV1bu33zBz37fGOFs5dTsZY6xlAIhImvcuydXaQQeROQIROVMhSYwxBC8Q+z4gUR97dSdtzprLq7NfvnqtcItxEKWEdjjjekzYyBs0TRPCQv5NMKdACXjsd0ZmGJMSFMhcbnf5Y8DGYk/fs9cwRsJyniUtlJ8uH5tf5dng4nX5Q2zC8pX3U+sE2qAKbNJIylWMJFZxHT0XiXjnPBBm+4WFa7ycZzYUJzQIM/rNUjbfz5rcHBpSFNGZfDoz9jzt7MWfT6bkruX4J5GznPCEx4JMK5NlgE125Pcy2/k1Z+yTv2YgnET+DJPj2iFbGwQgKFEArDF9eyCOjaWqromjxB4i/+zLzUcffvD08mK9XETf9Ye+MQ4WhsQYY4RMNBgr5yz1h3Xo2u/vOIIEAESwZAOYntnHCGQBCRBQSCQikKraEyR8DN/mSy6h+v4HTj7MRx/xEKgZNCdihgiAoFFBQmAUgBgjod7ECKxGBQBYskgEiFzKRwA9WT2kWwBIik763sOIfIaJUTG9ch+NtZ0PIpFIpTYSGetcjJH4WEpEtZgoQWu4ngTge/AHHjcmM1mluxodnQ/Fkltvn3qx3OIEeSQiPXCVrKzhiKaq9dneQDTOOSYQEWP0bOEQKDNgj8ifDhxmFMr/lOSdOew7rZKfFWUqetuo1qv5olJ4CfOYOa578hwyAHD0GZfKQIIhcMZaMlEABQgQBSQymGNf5cyoy9ig6u75uJru9RCfxKjaucYJNY1IRJDUDRdjNLa2ebE6dx1fCxcJH+tg5V1DjRmmCg4IQ4BxOP3IjKQBdsPMqr4DDIHBCdcFIMRjgYZkyWc5SIhQeuRlsAahBPg8NgDDuZ7scWMiGOyHmbzI6FeiekZOmzLaqDg9Ya0darACQHKeKtxMOiSfRYzOUAqpB4VUQoiICIYRQSAyI1BgAGOOlljXd8Lx+vr6iy8+e/v9t7vd7rDbt23fU0BEa52zFQTufUAw52cXu0O/b988bLfm0IK7WCwWy+VysbKYMp811t0F33ch5+IqCi0Wiwxt7XSi2zEcsjVVxnbtpNK4Y9cNItKs3exrGJAWWa39IUeu625vb/u+77qV913fr6q99b7XbNuhpkb0zAjJPkJkQ0NB1JzCTXSsiAMARAYRa+vQVaL1bCMzBxGklH3NIAykCf0iwgjWpOqrWJ4BYyGkoaOAgAgIILNwFFNNU0cU53IZVig4KRFxoUlAIcAeE0IlIsK4SkRGyvyVCQPNSiSm5AoprqToTNMvJzy3vEZENdNuJzit18R8TbDFcrbl58rv5kyt8iotNyJCBCySteCU3Hr/HX23rLIIj4ufDJ/SKs48PUdTmVnG2slcMc3wEREJMcZYWVdVFbCoAZzPnWtpGaWxzz/6wdu3b9++fhNit9s9IMnz509/+MMfWoKbm5s3b97GIHXdhBC7rgueP3jxjJn3++3Z+ebTTz8U8Hd3t+tN85Mf/+izzz7VIjFd122323c3LxFxtahFhBliUAgg++j7oNA25LYPu6qyCKauqes6rEOUKCACjtGbVKWOmdVTE6IPQZBwuay+/OGnf/V3b/UkfQjHc4PZy5BRKKHHCZx8bI/m98u/ljdL9JvYhOWUSlLKCBZTY/HyT6UaMSFYken0ShdSXux8FZMrf1RG9RKlfOk9PKT874SQT5Lt8bt0GnpDQlQx89LFM+czc0jqOJzONmfHfGlNzWGSyT9fMu70M1lyplkYG1RJN0rVw9M+TlxRWZ/I/yyXwEUKQAmcjF0ThgxjpC0WeFxvyplP+sTv8xievEpmmz/3GEjLyU/emv9TEAYpC8eGgaCQlGhQnCXxfe/D5fnyww8//Omn9Xq5sAYq6QJ3BH5pDZGprQss3sdOQgxsIa6XTXyyeW6g7fzDfr9voefAGIyxZF1kQAEEYBbE4beIaMPV99POZI2TfZnwh5LbzCGTrxi1/5WacsAgLCwiFoEIGASQGCIIikRmcc4hiXaBQ9ESXBEAIh/dB4NHTI7yZS6njDUn97HEljLyy6z2pAgQIgGwAAa1lwSZBTASkSY+iAyprfNVz8Hye/jGKd3g+DoO2Ybj1zlLeygwdjKCiAAMdJ1rhKhLEYrU+rTFKpSHUACkXMTMc/b7fUYJbcqXP+29F8mpbpJkMYYIMRUXMcZo2b/sIBYR7cuHQ1oEEwGzYSY9xkZEAKjqbCk7RFCEOB7lUclgjRkyM0sGC8kZfdSth+ORISvxvZaQSQZhjFljJDJoZICGFrZh5hA9AFSVNaYREYs227fZltb8VY0QlqxbRIiGDFVmTttsEVGLMpohCGOMMWrOaEkhVjM3pU0mw8/kRemJmKqqgBAQhYWVFSkqAmjqZ1ZmBQBwIKjMoOSIXqbMMcGirN38iJNeWRctxdnEkwhJ4pR6PhTWXX5Ambz+UxGsdDFgSi1mZkNGSBAJMKaztcTIIuJDF2VARWvg/Pz8ww8//Pj5NQB0XXd3d7d72O73+/v7h7vbO1c3cbd9/faVGLtcLs8uzu/vtl0Ir169Wi6XZ2dnq816uVyqvTdkCwfOUIoxtm2bD1KmSZrs1xii32gluWKHyrfJ8zXgc65GndLvERFgSMAOIZBw5MDM6rPw3j88POBORDhGXzVN7fSjenYUbDphq/E9jjoxQDRgCABiyPqGiDAiI5EhK5EjeOWyivAxSgw+clQ3PhENjU0yUzi6FooGBqUSwMwWp3WaJSmaWdso38JCW9JvlQ6M+ZU1laxAUCqeWz5W8taMmlJYUzQ+z5MZSsmpR4I/zRBmymh+t3wSCtZfMvHJrMq3Ti42VzMr1xVS29N8FR8aMh7y83PhXYL9f69+VQ44Wc4cGiJSZghO1A7NC4ICUMyMYAgpRun73hlrbSUxGLRtvzfGNK6y1lYLPWciVVX97d/+7Xa7vbp6cna+6bqDD63aV998/VulWEMuhNB1PRFdXJy9e/dmuVw+e3791Vc/+Cd/8tPr68vlqjrbLM/PNyyh79uXr24AoK7rs83KWnv/0BtjLaJWkSKyiNpXDLwPf/8P/9AddjHGn/70j2NEZkD0pDxBBED0QIX6AitbaWGzGIVMZY384EefVtVfSypfRkTqeJEExATJBE9mwhNnzDIplVtzco/mmzj5UXLq8WaNqhGOtng8DoypD8aEkB0uE8wscTiPgFMN6XjRODyV+MY0U+Xku5MpTSCQ3y2puJx/yUbyperFxD9Vwm0CpTkAS9DN2RHNwnFUONcnk4RHBHl+uNzckmZlbEtMOBs8glf5NxYmIpzCt8ng+c58pzKQ8zKHV2boMOfV86Emz2eNZzYfIXM0cUuN5PGhcP5lQpQYalc5FIiRgJ88Wf/g808///STS/PKGJDYVVhZIz1FYm+FAJiQMAbuWx9EGGpnL87Of3hxfuja27vdm9v7dw/7g2cRRELPgDi03QTW+NmJMNMEhx+7cGYTntz3k9s0gaQqncfnkZE0XieIIhAFEGjI+xqqjAMLRC1poZX9Bl6UsyQQQTROOHEcQOj9Y/sCYwzUH70XY6y1BnGQkzGy+E7lLGI2HtP8ZfR6ufASaCfh/H6QTgGLGmGeEN0JU/CxT2gNWCLSWESMMYDoGUItcIaIqUwoERGCGj+DZaUGxq7dh2Qyca4QDoBD3eMhdKD+r6qqVsvzCqwmxamRowwMkgYsImorOudCCESQI3UlcHKLwrx9iBhjrOiYFIdFXAULdzMU/sQYPQAwQ85czQmizOyL8KDaVjF6Im35rRn+BmGwqI0xLFE7aOiBLpHoqC7YwvEAxTABOUpPvYhQUq8IZTssPtuQBoxB1KQgkVLiHHFAl6wFgSCF10y6AqeqpEIwoPQoPUSvx/zv+WJmlZ75tFhe5skr2/wpjjJsHKdIYOKNxxNnpTTM/seyMnZ2Iqj5lwunZ6sYAAJ6BNSCN+kMZBTBh+19UzWLxcI5CxJ229v/9nd//4tf/OLZk41e5+fnz58/r6qKQzy03atXb+4fDu3B73a75y8+Wm/Of/v1d99//z0RqvXY9l3f92dnZ9oKAoSqSjLkldzUzi/We7SNtaN15Ahj8SGFTS6iNXKQiHJrHERUP/BwrjKGyCEdoSKjhW1AiGi5XK7Xa4EIMQwJqCikWWZEaAgAQipJyiwwnO7xeoyxFPoSGVHyYVRJtphOBwexjgJyrK2MpYY0PqKWd5qKaNXkk5RKhJWjZVzJRFW4sk5fODYg8xwmP/JV3skYPJln/m+5qJIvlzua9zK/UjJNHNt+pbQoRyg/+n4uT6lcZ35eGZx5pC8ZM+dzFDCr2TBZzuTmBFbz++UPSe63jAA8LtUz/DXFxAFHIpOIBt9VAS5m1o6CMpyNZkMkQoj4s5/9DAC6/WG7fdjv913XxeidczUtLi4uPvvss2bh7u5uDu324eGhb/c6E+ecNZV+c7FYXD99+vnnf3r97Okf/dGPP/n0g7p2VW2cQ4597/fGoKvIuiUAkMB+v+u6rqrPAYARtK9sjPHm3d3Nu3fWVFVt//2//w/Pn169fv36n//zf77bbZ1znj2CITQCHEJAGfLFe9/Wi8paq2ULkNCHbr1eVlUVknfDGMMM2jVEkljI8FZGUPpiJky/vPkYRZTC9eRfy3/K2F6CMV0M79LIuqOihtNkKEgCYz7bjGZY6FglA5mPZ2Z1VqTwbkzmDEUNnnIhcKp66kn2ld8KIWZeVzpHS5CW0zZj74mMY3QT4Jz8a2ZZUtQJKPktFFwiv5s760x0rMkX8z9Lvjrne5PZyrjS2HgLcD6CJL/vBBTzT0jBV8tdKz5K81fmkyyhV85wjnjz5yEFZpOPn7KVm0eb4LCgVrs5cvUQQl1XED1LeH799Mc//PLZ5XkIwVVoSESoJgoIPgYkJOf6rre2soTWWhM9GnRuGReC1RME8+w6XN7e/+7719+/vXk4hND1ZGpCZECAQYNDMCJxfqQwz/zkeidEUULsPdf8Aa11UWS5D1cEr3iKSWcSFETDIjh0XBDF8UJfAgBgEDURVUUY0l4AhLk0EcgaGR4sE7Ew08Lw36SHWVPlRgUxRiK01gLYyN4YrfIRZUaAJ5FkAtX3AK1kKZMXx+xuuFNg2sh99tgm6lvaMwlTURNm7mOwzoQQ+k6bEJK16tUCY4wahBqS0gQ3Inr27JkWze+6Tg3CkhAQTe7To/Ppuk4r5htjtA9kbkyv9fShaGkTYwSIACgSY/Q4VBPNQcKcV0JEqGfhjDn2cMsWIOJIDePUZkwJF4aeN5J/q3EYijYkuZNHVVW5l4Yyde3/rnVoLOlBR9CjejF6ZFSrG3Ggd0hvIiJCed5SRMSY2Pe9nj1TH0Tfe++9Vk13zjIzGPXTpbM2rFnVx1Wno2KUuTqmsj1Ao4O+InpEJVRVhWg0vxTGnZyoKASQEDS5wwbqGan6E/SefE4vhXCM0VibcSNvjYhwjNneiEXNmJPsNJNwgRJYyAUGEEBO9UWdiDRN44xj5hiCiFRV1ffd/f29+MM//Pa3HGNdu+VyuV6fXV1dXV0+/dGPfrTePOna8Prt3b5tD4e+aarPPvvs5u6d975t27bv1EmxWq2stU29hCQXyhZfpTTMcgOOonmkHkjq93CkfRgoveu6zL70WK92L9zd3wmM4lhVVRmLALJare7v7w/trus7EamMJUPIAcAQEdnhCCsIGWP6vse0NSnZDWKMTe2YWeJQPImLAj/qhrBoiAyL6Aliy5EQCCB150QmIiSw1inlo2WW2AWPiFihNRWrUSJ6pGFAoBACxxhJ12YM5RowrEfIB8UFAAEIMRQRP8UFRYjehxIdM6xz5kNmEzK2MLP2pn8qU1hLWRKH0gCqLaaKfCjCEoVFgJBU9VIMiFG/OGyxzkUth2T9JpoDYAZjaoWbIlKeg8JgwvsQMbKfFLrNvBVxqFOktxFBBBCMRpgmVIQaPBzoEABA0/JFjjpQCYpQ9Csrod22fbKoKe2MLnJqYytAAguACAKiEQAWVHtVe/choiFIbFoEKIYdggFrQ+j33qxXZxfXzzbL9cXzj37+859/9823BoVAjDHr1cJay/0DoX396h/7vn/35uVysbhYb5bL5sUHzz788MObm7d//4u/qyr7wx/+2aeffnp9ff35hy+qyio7FojMnkMwAI5sDFoUFLXxA0G1rlbv9tvb29vN+my3O1S2rqrmf/lf/l+b9fl333/zf/o//pu6WvzRH/3Rq9ff/+Y3v/7k4w8RGXgzsEJhAFZFhEGsq/suioAxWFkXhINvP/vkxZcfXv38b/62RmhBuhDBurbv69pxZBJWxzEQCpAIRjGObMpXIREIgZkFgDJvL/ENkhdzwnNFowyK2sPmZmqaOkr0ftk/E5KTT0Q0Ypkz1kAEBUDADCcQBIdT+Qkr3NHBUV6l9gaFpClwqbyMDOEQ1aIQQMOzI3MUsRRdyr84D6sB6uRUHSmyANk9liGW6RQyWLjIU5jPdrhT8DF1gehsaPyiJKWWDaJBgCEiQpaYOXDwbVDCp3zYRiR675J+NllCCB4Gnp4twygizlWjIr1GlcKhb2rehSyMQxLeiCjFNiGRAMQs/1IRvDBECY6sFZIjoJzeQCPZU564dwIjW6f5WgEAjEVA5C5y9BCHJLHcJGrgkzGqgpuhql+PMAhvKfIPdS9KTpv2i3ThR6ubBQQ4pEwaANJTWDDEjhZouxgCoVRWrIvC4oPz/fWigYe7FcAPPlj/6LOL680B/V3fH7DqrWmgblqOnRAsr3qyEWgfd4ZJIEYhaGpB8pG7EK7e3m6uLuqr5e6yuvlw9f3N5S9//f2vf/sG2fYHFCSoa295x3tPPdaw2TeqZQAhEAIwChACiVIDiGBEiCCMJAAWRxRXkMCIJDNiI5ryfkGwZcpT+iFisTkOjiAQEZFQCU+/ovDUo2sjBcvQMWuaMMXujrIFQMSZY0thpW/mIfSEg8pVXCJEPXMPDAhgDUAqm4EAHCSjRhaLXtgUOAvJbBUN6c2UYyZNChNDYMlY1f3UYgEwekQMlQOQAFmiKBKZAyCgYULRYiRI3HcY29qaTV07kti1vvPYAhoQg71gLzYaK+QiGO/9+uJ5d9gaMpvl4nLdYHdf11XbOxncMcPxfv3h+2gMpCxEEAFjrDHG1dZaWizqLNMHmkz9GJhZBEMIWqWTuI1dD8ZUxlSLoZtcjPFuv1vVTde14n1d2647BAnOGB+jsbZKtqJoGZtclFKGmInW6UZEoEhkAUCDxXqGX/m46qWRNcs4MIcYo7CLqR3icVO0rnfk4IOwkFBlhjzDiASYghUKghShyn0LaWikCWIsV0cLTf9qkbKJNZBPYdiHfl9bFxbc9fHQ+d5HjxiN64Eaa8VYZ4SQxXcx9A1IVe3F4aGL+0PPbKxdGXQQIQAQCQtEBkAgi2gETLC4TPL0yLQNEceIIIYGwafERUQAS2aOMSSNUWIMzBw5KvVpM718LlSTHqVQofUU/eFw0LBwTNa4JiZm/s8xSpKPXDSalnRoQoYzXKLuXf0nR9VgMEZ2to4xhtinJFIfQgAWZgES5ypTGUADQADGBzLoEMAYMBB920vY1ZVE7NzCSIAQ5Ob24fa2/f77WzRfA9D5xdWTq2syLkZplouzp09DCK30VYyu7/f7/Zubd29vb64ur589eyZocsS7PGpmqAFtLaYIIFEFw3q1ybyrFNCJIwAzC+hZiEjRJz9LJI1OJ2nY9WSMi9ELeAFAA47ictN88YNP/6f/2//47ubVt99+8/XXv/3NP/xqv9++ffuWYn04HJjZOGSGtusAaFmvgW2z2HAEY4II7Pet9365OAd5DcBAYlUEMwdPNCToRg7RoFgLRMSENZDNKxHRtvcDYzzKDzBIv7+yXOlNL8fUBkRYHEhLiHI60FF69EvZkx/ILw6CZPx8/vT8Krdt8li23LIOMRmznED54vy+1ss68o7imswrSaDjk2mEbOWecFKWCn0mvHznuJW/z3Nc1i8q4VaeahiDaPS7GJZPPsbMpZ8vr1FTMjQRHHtzoIOI7Lfbv/lvfy2RjUERBoTlYokI9zc3yIcnT57UdWVIrp9ePnv27KOPPmAJZ2dnde0+/+zTH3/15YsXz66urrREL3StftQ6sraK0bctd12Xur4OZ2pjjN9+++0333wDi+a777770Q+/+vl/+ev7++3//D//3+u6/vKHX9SVffv2zZPL88Ph8Md//MdN0whA8N40RxdDBlQ25vM/UUQdjVdXT5hZ+xQZH+OpmNURH2ZlwajI0MuPlW/RLBheoNbUM6c7Uz6ZxWop88qRk2/shIN8cif9czqT8snZZP73XbNxpsOWXx8Qm0eBsjTC6EVEPO7nyT5RqRbf/E/lQsrfORU8Q2CgbhmhSjl+iVFSKMETDjMInuEQ1LGWTMldy1cwhffzXuf5TNC43B1OVQ3LQ4bl81hqSGP+PEGk/Onyu3NX1PCtZKuXz6e4EOTWZSWvw8GSH5FVua6TGwdzc+LUFWIk6yxBxxx8i0SVoYaq7cPdh+vFly+uPrt+8uS8IsMGrTELkChgCBxqpX1wiIYEKus4+uD7wJEFmQxaWtgK165eV2dn6zPgi9Xyol6tpL5Ynv/8l7/D2rVALXsWWNSuAvR9N6z9OEESBGQoliqDRaZenPgHhQTnwMFTJw8nT0rRSaxEIZzJ68nrJ0c7+VhJBeXG5dNlf8g4k2mMphS5fHH0/IzxiggJwPA6AAADoQzZsIh49JETChpERFMhM4EY0V2C4b3gHWHj6pUz69otHDVna2fpcL89dP39vvWBnaXauZah73sklEHUChIwgAEQwKqqFDLD4TTm5PsbQkzGGGsxMwq0R3JT01HfcpXLjhJEw8xN08QY/WGnD4QQBrsr9cjWqwS1FJ76fByLU2JR5kXGHD07yoqJSOBYr0UEQvCDYX3khKPkrHypuxmgNGiPfHI42ZXOXubvWmu13k+e0kBUbsjYzAzEFDJlWELRNFIqAqA+sLG9rThE6XzfdR6AJXLofZRIyAakruv1cvXs2fMY5e7h8Pbd3XbXR90XAd0vtb7IJGxkUbM2r6s0g7P3tnTGhRgTnkBJBAXbVsfuUL3mWOt8TEFlMjAWuaAlZ55sffnfzD309JOkppRaG0ZmGSh5F4bzESSabCVp7jTkyAgicwhd1/V9D8CiZ/MYYAj5IBEBSlXVNzfv3t7cuaqp68X5k4vzJ6ZpmhcvXmy324eHBwCoqoqZu677x3/8x81mc3Z2dnl5uVgsYmQt6GKtNVRrPjCnfFctdpijiBMSABzuq3UioKV6c4ldHOqOJpEdI8ToQ+gBgwBFButEpH7x4sWnX37xKX7+p8Di969fv+4O+zdv3vz67//hm2+++eabbx4edn3nraWu8yG01jkf9sFHAEI0VY3WVsays5UwZhRiESAxYEIIxpBzKS4aGRGryh4NQijYOgCEELIDvqBe4Hi0ifMrmUGXKDsgyszLzkXINd/MCPQYK8+PTe5PnoFCJJxkVTCWeeXCS5aRFiiDS2imk02kbP5dJlUeSXCIZow+N8CkWAIWnaNPfuU913x182si3sbTG+Y2UXwfk68wA2MJTADQw3XlviT/05DzjxiH09ghbJn1hHvtXGVd8P3u4ZYIOMQvP//QVaZp0G02n3z6wQ9/+MMPPnjuvd+sV1dXV1dXT6rKRvYxRtATN0SLxWK1XhDR4bA7HA4qmfq+Vx09Rm9MU9VWRO7v73/+n/7jn//5n3/yySfb+92vfvWbb775Zrlc/uM//uPHH34kwn/xF39Bwp8uPgYYzg0LjGrDQOElygxamSAZcs598sknAIwommDDga216m7HDEYBVXWpqHNNqVq3jn+SNKDA85N//b2YkAk5c/ySfMrtm3yFH4kBwnspFMY20h8+4fxiCQplr6MPpYP35eTnrGAIAxRXQQxYPHZc6aQIE4zrXZWryzAsEQOSjafhQRgDELOtM4NY+ZVSDUpUduKEc1mbKkv3ws10vIgIxmgzgRul3J58UMcaM+aQQ2pQ1sNOblyZBwvjE+ZQ8AdrbYiZybP2OofCYTFEAcdAPrW/ML+Tpvd7mOR08mQEEYEQ2QgQiIVo2JPA0yebTz58/uxi5SAQdkiC6ESayBUaR2itsUSWBCQycfRd67uDiAAaITJc2crWV01z3qwX1sS4lLheVM31k3Wz6GL45evbtw87qCrnnO88xnDm6j6lcHNyXRAMhwsLoo5l3Yj3X+/hG3NsnMhBeGQrJ+g3GW0iquZitKQmGG/3yQEf2808sVLHKL81YXH5LZxJ/OOAiIBMAqpMiwiDoGi6FACAoBmaaRIikQ8ggiwUk32JwAYFOVYENeCS7FllnqwXm0VTO+svzm7u7szbW3g49OKZDQkYEACDMRCKIaqrRjVfOFYdRJEhBy9GYWaOyn+UFRyrenTd8ViNwi9zAzmmoB83sWma8jBVzsnMm8J8LJeaWbQ+T6n+ij6ZI29QZDaq4xgKPbPkciXPy7tQGkJ5DvlPmJSoQYDWDaVOG0TqKEFEdM6hGb47WIyAiBhpeOBotbKIiHMuYxEV7hcUC0DWR7K2FhSgQ9c+POy870Sk771ET8i1NRfnmxcvXlxeUmComl1k7P3t7uBD7IXJ1igievDRaW9nEdaQYYL8hPObcVd3velDme54NCa1QKYxRiuaHPGcj4ImA1bxKkdi1QTi1HkyQz5vIo6ryOD4eEgpKYRkgjwFBhan8UkQMTJHZs3IAzLMDChEwsx933vvq8oOkkVoaKCAGKPnyHd3L0PEzfnFctUIw5s3b24f7pt6+fEnH2oPz/1+v9vt9vt9F30I4f7+/nA4HA6Hp0+frtebpml0Yod9p1SgCoAeT9V0qpLVZPzEghUjojCJhBgVUEpWGKPPRVkRLXNELUJtQCAKRGtNjL7fP1giMoDOPXt2Dbb65PNP/+xf/sv9u3dv3ry5v9ve3d198813v/nNb77//tV+d9jv97HKdW4ohNB1D+y90p2zhmOytwGGlCge/ESCg9NkquXolXTQE2y91EpOCoySenFm5p18txwho8gE1rGoIAqJfRCN3Mn54Uwt5VYNm5RSqPM4usT8e7xQ9cQd11Iu7STo8l/zNX++/GuMjDB6TI4y9YQbhuj4QOllmVSe0B/0yHGm8p84FuREVgrRO4dJebME42R1mX9NHgshREBGEUJDrhoCKbFqnATvLDqLyFE4Xj05v76+Pj+rjTEvXrz47LPPnj17dnV5uV6vRWLTNNYSAPjuwBybRQUAu93D2XJBBpi56w7b7bbrDnqevu97bb/EzN53VdU8f3GNJP/uv/6XFy9e/MNvfvOXf/mX19fP/+7v/ttXX351dXV1tt44555eX+63t8yxqmofOkltCSSpy5I8eZhy1I5mgIgIfPbpx1VtD10c0niZjTUSsXQ0iAiKgNETekdmXbon6FTkPP+ebBPM0L5gyscXJ8gweX6yxZO9nmPmMFq6N5lP/tyEIk7O4eSUcMhC4YKpieTeu0ekHar2lBObEOPkmzK4fnRWp+FTEgUUYJ8oxPmLMV15+/Q+pRTfPOeBXgo/Wp5t+cwcLPmZCTAV/ZL7I2LSeCbzVNSKY//dZGuyGVnae5mHz3n15IH81wmXhkIdL21aIoJR/8bjrNIIDDw6WE7j0q8ltstMysiQif2oaTG/qKr74INEPVgkHIzvIYaPnp1dbhbE4fBwHw0vGkMIMQSyDbDjaJAckrFo1G5gwi703HaIyBhCRI+9cf3503MykeMe2k4OnWG4cChn1Z/90efR/fbwj+2Bo0TBGIiFgEBISFP0B2eiiqjATAAoejYPBIbThohHx2sJ+cdof062JbQfu8bEdYT8Y4DF9wrTybcmO3XyrxN2NxlzQkojUBDijOmVCylHUwxEGPLxIZ1tZAFAA6hJvAnCCAzC0apZgxBBk3sRrXBlrAO/EN5YebqorlaLxiKyr55crBvbVNbSzZv7h0O/JzRLW3UxcBRDUFuzXC2cc9qmQ8uVmdQWXPVMEYlBcsMk1ewHeo+ZXx1Du1iYBGntqSxf4UECNTm815o0+Yexo+3jVFI/H0fMXAgKwk8W16DiI6jOgJCqPgIAosQYWRiGQiPEDEN/RMiMdMjvSG3Zht0cqoNYo8AxWq8j7WNd18hHf1YWtSGlRpuj5ytKEfAAAJOEPQAAAyNYQSdi0QChjyGp/rr24KNHts658ycX3t8Y5xaL5XK5ruvDoYsxFB0UmY1FIkcDNKJwy+nclxROOhEJIcZ4TIkfQkCcifHYbg1JAEbkgIjaliCfN8sBusleS4oG60kWGTv+Jr9Lzq8fqqoqxuj90YnARcR4QtqUKxQksyr5+9SlCNYgWeLs6LQYe20iEgEACZlDjOIjVlVlGOqmqqoqeI7todv5ruu6Xx2ePn16eXl5dnamhm7fBO/94XDY7Xbb7Xa3211eXq3Xa231uV6vc+Q5x6LL6FfyoSSss3lFkjYFgFl72SsfipElsmbX9/2WmYmAtDNZ7OyB225JKKH3WFFktkLMbFBABHy7PFt9erEBABAE7x8eHna7/cuXL7/++uvvvvvu9au3r1+/PhwOIeB+7wnWIhIDhxCAgAwNWmuEGGOQICiutkQ2xtj3nT06d5GzjZ6INnFVoWMYWibq11EvmW8w4uA3y3x2oujACcZ9bL5XfoWKU7MZe/D3lVeafEVEcEYYiAgwLRaf1HomOLphyjEfE5A4vtJtzqdajpBBBlUoxwYhPiJE5+DKU80bAWNQn5znXHZO/jrfRBzrneV/SyFavHjMMzxKYhIBUf5jjNHqzMYYIjBY9e12uWgQQtfuGovPnl99/PGH10+vfvTVZ03TPHv27Prpc02PISJjXPQ+ylC6hsyQjbBY1ES03++1UaExuFgsRCRycFXuw6YnpA91XX/88cf/+l/+i9/86tf/4l/8i//u//Cvna0//+Tzq6ur+/tt5czhcPjbv/0bAj4/P7N2g4iHw2FhqznclFQyNChl6DHz8+fPnj65/M3X37JxoIX+xeT3CVAQBE5w2Iz2MHZwTHa5fLG8WXrsJpx68u7JPc0XnUpwLbG0JAdJOXglCZygwT/YFJwvcILeiLZMoBs+pweAAOBUBEmlYb4/IWeW000gHqN6ToWs8pRKA1UHySfoTo45vG6m9/VHquyHeXVHUBc6lo6f1b4J3Ob2WAnDyUf1oiLLNHOYUisqf3CREToB1+QTGVAlpvE4y6t4kfVZIorJUMRUwX+Atpm2z+FxxuljiDqHxslLyHjxAGINEsTYHxaGnm6WX3x4fVU78PuHXVtZJFnVyypGqe0C0IKQMKniaACNJbOs/d60EoPnwOIDeEEyHtoWDMXasXRMPRqs0V6KIWvdDz97Ujd//euvX233rlow0X3X1malRQgiDIFBFBBMR8x1T0UA2KgJAseCq+/n/L8HDidMI4CiFm4J55Os5jFxU+I2zLZpjs/lV+aDyyPm7mMOtVwmZ/LiYyDSPFxCBEl1cUCGZnDJIAQBZoQIOOR9DJaDIJOwFbYEKwtLV5037nJZXa2qJ42tjGBg42Bpl7U1FVHlzLuHw7YPHXeGQfqhOOZqsbS2EhEG1uIoVNTBGo4xVkYV8a7rQoh93yvEnKkAhnAiFnVcurYfA0e0r0GIwQff+957HyP7GHwMgWMUjsKBo4/BshEEQWBhLR6R6ZqKYpWcasBAYpIq/ZP5F0EIMXA6zWiMQRRmTlk5LCJ950vsymZGadRBYd6UBqEpmE9VVWacsTVEFFNRDMwOXzPCGVRhn9hXFFZM0NsCADzYPNagMSiR1Dj3vXZBFLJIaJ1zztbORUWWoJ36AAyStjSMITIzyKjHRpYj2vBdGb4eTklgUWrKobmBbTJz/pMU6n0uBT8hYd01jYtm+BhjaJxUmF/M5FOGZ0XEWh0h95mEDNs4rlCYENjqmTdEBA1fojHGRh9EQE8dD4R7nIBo5VhElKElB6xWi7bXsHZYb9arzXrftdvt/s2bN23bbrfbs7OzzWazXq/bQ//w8OCcq6qq67qu677//ntr7Xq9Xq/XF+dD6ooGmTG5gLOJqEHUjDN6Kr6AADMzRIncJ+TkGKPEwYrR/oQAgmSIIHLgYCVyXdfGGGsNs+bcivQBrQH2YAzEIEMGuN1cnW0uNy8+//hP/umPdWLv0vX27du//Zuv9/v9zc3N7e1t27YxSuh93/fL5dJYAmuEh76Z0jNgsMN+4CB6AUA9IilFKsFdz0gnDJiIh4ngH2tUOPkTjg/pTWR2KV1KIUFF2lW5AeUzeVb54ZNiac7rEU8o0PmfMLvmg5T8Yra0UbkFGpqTQOmzmcizEgKTr2SH/eTTnFKc845MJpynPR/5pGSFYh/fAwoujnEmJ9ZRig/vDl2IERG104mONvB9JINSOdvudxwPz68u/tlf/NN//a/++T/56R99+OGLh/09Ig4Vi8gYwK7z290OAKrK1nVd17Xv++39g3Nms9n07T7BUHn0gDLOuRiDIraIxCgh9ADwx3/8x0R0eXkJgGertTHuP/6nv3rx7IObm7fb7Xa/3z69fPLixfWh3S0WC2sJmPOOlr4JEcmV7lRC6+ouztafff7xr377j2QrIjRF8ArBqHNUNyZrF3kHM/ZOyK38b2bEkw0tI8bjS8qH8yATkswI9hieHMc6RVDzdycjPPbKY1exzBM9lPSs9uQrUmSTzvlAmW+jwnJgJjxyfMw5mxQbBIU8m/w3y+9S4MHMQM2gNmYQtDzOAqUxg83vlowiY6B+TovKTKZa8kwpsn0mqAVpT8ujg5AwxFqrRd7y/UzLUGBsCa4JDEuwlOPgWIkvgCkajsn7i6gVKzMc4skJTFLf8wRSfd8/9GpDQERjLHHkvnUsz6+WX37w7Hq9OHfWxNDGNviubQ0ZNZgJwREaNGrmkzFggZjJWkKBtm17j4yG7JKw9rdtD8YSIUl0QhAJxIaw6MOn9aq+fur2/r999+pl122B2BKiIKA6EYeKSQjCAkSpHA6jQERKne9+j/PlpLzI1+99sYwSnxzqpPiYPJnVXBlbZRPSK4d9vyP4/ZMf3R9XcYQCl6AQl8fHgWXQO0FPtTEMNiKAEcVqEEEZAlDeq9cSJQJHwuhIapEVmuv16vnFelObpYEVhVVV16t6b4UBHTlnnqyWzevb7Tdv797dbQWlj9EIGgDnXAL+MDtVigrFAdR5ai0jYtt2ajwAgNCxfnJmHURUaiN6lGMo0wrSdZ12eBIZVR2f2+rJijvhUCuxJX8l8ZkwcIN02ClGCSEYg8elkQyvi8v7lXgCIpLWUM1XPjdoKktlymixagBQlbbEh5KT6KX7WHJCKThq5MARAscYOUrvI7dtK5EJ0FprCQkEJJLA4XB48+bNek0A6AO0h344WkKEgCEcD34PxgYHZiZ0WUaU+5WFRYaqGicSjm5H5mSrIBtjtFBizvsDBsCIcOwLkKUGAIQQ1Io+QRePEFT+0xF0KVMs44Bq2kSkdpSuJyHMcK5VBGBIiNbkXENkQu8R1enpWbTrhp63RBA0ye2CiIgGCHf7ravq9Xp5drZpmgWSpYMNIVh72XXdq1evbm9vLy8vr66u6mpxcXEhIlpIqW3btu36vn94eHh4eHj5/dvVanV5eXlxcaGtIzRSWtc1M2tpxkwsIgIY8tqTNz+KFi2jmHYtQO4UoKWPYoAuOGcMDdpjXS1EBIUgAgKSUAjRmarvAmJERDJA1gIRRM8hkO1FIhJS455++PTpR8/VFfh/+Z/q/nB49+7dy5cv375+/fL7199+++2rV6++/fZbRAw9tG3b9YeqahbrZn2+soAMyFqcTa+TvnNE1B4spYCfsIP5HREhM20dMVE1MgKlMUfFQsuhSrUge68nth8UwiYPmynkPeJtPqthDjwVCRMgTNZewk2xNgNzoGcDZbY04uhgzJyHlnekUOzylYMDeaXvId18vzSA5wssLy4SUydjluBNf9eq0wULIFFXtn7FOIsCZUK8MQZQ2oP/pz/743/1L//8X/+LP/3jP/pq2djd9s63Dz50CIZD4AgM8eFwePv25v7+/vz8XCL3fb9eL1988HyzOWcOals5Z1Qn7n0X+4goxpgQehFR1U497CEEZrh68mSz2Xz37Uth/u6777755psY5ca9vbl5u1gsXnzw/MnZedM0+0NgDnVd+9BmfOJCMoUQ9KBHCShEXCzqL7/4/P/9v/6lcaZh6j0HZlSehwMTlDi03QGJWMAWZmK1xIoJqkz+OXcZpB8nzvzAmKeXn56QYTlmKZNOzmGCKpNv/YFXnkxGNtH618miU05gU9pSXoJ+5GQqNcBxzDTVtNjH51CuUe+UkdtJFC5zUSpSLkV5/DgZAQq6niAPFMVpYMwQJEUykz1pcGwW5uSW+fznDHw+Hyi2LG+0pP5FmXLzHE5uPY7PfJZBCUx2qaTwpj6sWuCYKbEWbR1tCWqfviHsUALtMXqBgi7mzPaxKwg7YyyxeG85PDtrPn365KPLc+PbpcOmdjUsdjuOvus6rKoqQCC0zlhC0KCRGboJomtqt2zirtt3nWexDTvAm5tDH2EZI1UC2FuSRqCK1Ah2D7fnwXx1/cQYQ69e/26/Nc5pUVVt23xENgANlBOACCAiIQjwvHPRhJrmf52ImzkkJ3/NhDABNRT0dfLr5bBzfpUHyag151cT7J08MEHpxwxIwSFEUs5qELIzqtGLBEGEEVjbhSMKmqj/nwYgEC3hWXEwhI4sERqkylBjqbawaezzJ5tnF+tKAsXWIdYGGkdAwCAdCNbkzNpaAuDK0JuH7e4QkQSBK2MBNLiDmPA5H9NNJ6mGLM2maYyx+RCgpq6VjmP9sV5tyjVK8rynzFBfes3mAC8ZY/lMJjQpijbnSIuq1JR6FYiIUnTOcYXUhQsSk7S2lqQKmlRCWUSstdn2KFen+55TJ/KV8YpSLVk1VCJMhUUuQntELRkArl/s+74Pnln6ENreazB2tVot6gZRLGHljITY+f7l96/3G+Oci0z3223fayvFkedO7Q3mwBJJQI+a5CXnOYTcj66IVpUcW4uycoqvEhGz9hsYjPYBmen4VhZM+c7EL6yfKcFYUl/mKiV153kOkohHDKHEnHTEkZiBJQ7KI5F6ZtJvDiEAs7W2qipjUMAgogHKg5NBa4gjNE29XDZI0rYtGbdYLBaLhfdRU0O7rnv37t1+vz/bXKxWq8VioTmiTdPEyCGE/X6/3W4RqG3bly9f3tzcVFXVNM16vV4ul5CSijV4qDQSY2TWOqk+B65iDBJiVVWcEAYAMNWWY4mUfDSVs1Xl6rohsnfv7hyZ84tNXTtYrAiA2hbQVJsr8J5DJyzsAbwXiAAIDGQtoED0gML+EGM0FgHOq1X9Yv3Ji08/UdbkHx7u7u5+9avfeO/fvHnz29/+9ptvvrl9d/Pw8LB7OBxrmidaBU05SBcMBTAKQT5n9PC4tFCzfsKmJ6ynREGYWYP6I9XLsnOsnXy0/OvJeWKhCKZFCc2OyiAiwFCKHWeyc/K5kw8AMKaaxUN8nATHh4iISHi6nAmxlZPPimNJk+VbmXTL6UHB4PLDc+CkOU+sEuCxkwyKwhtaJzrx5ZjdlEPkEEeKAubK4wFEgMhUVVVXTiKZ9ebf/Jv/4f/6f/4fKwfb+3cPt15ixxwEK4Hg+/jwsP36629+9cvf7Pf78/Mn7f4X19fX6pXv+/Ds2VPnHDPUzjBz27ZIYIyx9tjezVpLlDqr6ilaA0x2v9/f3r1rD733/vWrl1/+4IeI8tnnn15fX9fWhBA0PLjd3td1XfLuAb+Tx46I9AyPMmJdr7P0wfNra6muXAC2nY2dFxFSF3+5OzKEBx/b0/LmbL9En817VpwVOe4mnKpxWoiQIa8j7dTUNVMizJxC09dzq9ZBgRDRKgU6SRl/mXNK2x9yyVBMTzRGW4aarbUAI5tKr1wMIFsCCq5JpA5gCuFya+aUkscvqybkdyHxlkkoTET0OyVF6O9Uje1I1xMHfP4EJvEJhV9Min56JtX4luIkIRU5q5k8jTG+6HFc7mY5MiTNIIRQV1UeRxWLbAxM0CMvfDKyFOkeJR8eHHxps8ZYx3rkqcRDvZ+8sEekVbgpPCd7+j7ceuQiQmsQQhDfbWr7wZOL6826Yo/RGzHOWLamN9QHH7ogjGQPzhBWlrTIJAQV1662KzxvPb998Nvb7qHtodtbJy0sV0ArlsbF2sZ1bRrnDBqWYFBAYm3og6uNNK76/tXv3rzbEdHQqZ4Km5BEJKeei4gAA4hM0LrApcfWOyH2/OTEszC/L4WXIcN/gsD5v/l+iZDl6/mZ7H0o5zzHNJjt7+SV0gFaPjb0ZTqllmRDsURgBJ2Puo9ZtEtklnfAZjhAxupkOq9DZV1VUe1M5Uzt3KqxC2eXFZ2vmnVtIARDtrKIJCH4qqmiaMF6DhxXBp5ulrWrNPNTECkdb0GyYCD0LSKwMAmTgEnlbFrfG2OsrVxTu7oJIWgdjt4PhWqzKaWaydu3b+eeGgBQNRcRrbWqy+m5skl1Ga3uSETKaspDVlg0jcRxEpMOklWvgeEwZuNWRE9ChjyaGjCQalBRkaGg42sGkJ6oBADx0zSNUq6VPCrbyXl6et+RMcZ0XXdELRnCcTFGBjkcDn3f+8hd17VtzyDOuWdPr+vaAXMgJGhC7Lv97vb+7uGhr+qFc5UPHEEMOUCtS3SM9TEH7QonSTnMCJz5pKSjffpWTsRFO5BhjEZEAAdvmkg+Anf068VU7T9vTQZO7jCZvXjDV8bx4Sl1FDZhovfh9SEzC472Z0mqAINzXPk8gCpWAmg4JZoOOMPRIFRVBdKQ4RAZEUkgHfQDAAIia52A9rQ4M86hMWQJgIxxxpjlchlC2O129/f393e/Q8SnT59eXV2t12sRYZaqquq6Pjs7aw+9Xvv99uEhWmsfHpbL5TIfQXTOVpWNsVJK6fs+hOBDWfFoOE+UTURKedoiUleLpmmcM01Tr5ZNVVPTuLP18t/+5b9vFnVdu+Wi/uijj549e9o0zfn5uakrQEf1AgyC70K7ZxFjqD10VW1jDADsFrVKe6oqCRFiqzF/Y4yra7euny6unn78AQACRL/d3t3dvX756re//e233347uJ+Tj8FnAsuTBjCIeXclciwdvZJcMkq0Jc1n+3hCY/mxDBQp4gyRRzpEgS4jzMvjTHwY5VATGZDRd4KORKS1RvJjhegSPW7EqQg7FC4rKopoHR1Ow4pULT7qPcYMOUUThitJamM6EZQZKxaFBws5NSrgDmMRy0WqWEmupX9UX8wCey6AS2if/Eq5KQkNok7MpCuEENnHoN1yLabADqIJIWqcUDvkWtqEvjMgL1++vL+/r6wAdwa8tdB3fUTrnHt4uPurv/qPtzf3XeefP//g6vJ60TR3d3f397cxxrvbv/3jn/7k+vrqxYsP7+++wRR1BICBazNrzJCZs/KkF3Ow1m5W6/325Xq1+O//+//OuRoR69r1fR97sY4Q8XDYaQdbQy7vHaXubVycjkBES0ZSddAYuo8//vDqycWrm4fl6uJQeRH03lsyCEOTX9S8CWME0dKxGFImNM1PgEJK5YsTvUihkMHM8Mv7mIsQTAgh73uOvJWvI2K2chU/lb9QmfsxRtSSDzCzGmYwvhAxxiP9zqVLYkHHJagCgWgBMPfS1PyomLqv5u9mUp2Mk79W3EmhtjHcJl8vhVk2RcpBMnwyTDiVBBiIxaDWYyinhIjWmswWSoor+RuNWjWwuqLLccyAdREAnLMxRu+H8tkimqaeOY/EGELwxh4N5nJ12SuvQC7t6vzFsjZAycazEOFxca88eP6dmWHGvcieCEQGpTDLiBh7SK1QS5alsivTI6erxDcphILI0RsIY6kxqKosowdiIGNJYk10fba+Wjc1MPR+VdvGEgKH2A9dm0UkhN3h1pIslpUlYogxRgEkwkOIxrrm7MLd9+HN9vaw3XnPsv27W3N5tv7o6smL8+WTylWd9LWYSvb7A5DxJMY1i142Er9Ybc4C/I3n24d7qhwOdiGGECw5ZsGheRIhMqIhTJ1oC7ydM/CT/5xffCoIXAI2X/qtifAtJ5DfzQiQw0FzVJk7sCZTnXCVidiaL6r8uj7EYxVWZyhj70mejLr2WYIMrl49/sFEYAwaJIkBmEnYECHCmfHn58uzs1VTVbVzjTZQ4r4ichBiiI0lQosITFhVTjjUtjKIMcY2eohhYVAW9pMXV+b17duHQ22oqR0RGWv7oEch9PKD2jCcuCNrrVYuIDTKJJ1zVaj6vg8+EgkRIRCow5RD76PqOXomDWCUiBFjZB7VylLBtFqtFos6xF7pV+v1Z7LNPDxvB49rikzwSkQAxVgUJpVTWhCu6zwzW0vq6NR3c9FLAMhnKYnocDhQih/ms9mYTD41I5VFMHPXdUSkFsJ2u91sNkrOzjk1inzsdCHOOf0QCmhAsm3bwHo+E7RG5Xa77/v+4uJC3QJ1XTdNLZFZ3LJuYowgLZFBNGQELXMEIAKyXef7vu/7NoSgJyA0TmVc1lsG9q5sSvWZ8hghAHRd17gq80Nj0YBLYI/JepRyB7MGrvA0xlRVpavL1iAUfpm8oRMOkF0AmQnj0KThWKdaYRiLEkfMrCeijTEA7L1PVayzqjNUQjLGcGRh7ww655pq5Wtqu62rKIQAkYmMcxA8xyhROHUOZOZgTRVCkCjWViJora3rGgAWi0XTNPd328PhcHNzs91ul8vl+fn5ZnOm2aEA4Gyvy/Tep4TSdrvd3tzcKKyWy+VyudQAowKh6w8hLJi57/vD4UCEzarquk4Td/UoKwGqzYmITdMYiyi8WCwuLtZNZUPwMeLr798ai33f/eOvf3d1dWWtPTs7W5+fX1xcXFycr9frxaK2ywtAAQ7GNCCBGXvftvstopABbMPibMMxxBiMQWcIhIEjxAjRAyIgukX9dP3B048+/qM//xPw3opERMmpUnnXeepBH3gozbg8FoaQ4k1OXyyFfXkNdDWTDTAECE9IptIVVEr6+ZMlc5+LhFLByhPIeJ+XkIhqKCoz51x5tLz8NKxMTM7hHzgEQ2R2YVGIQi/VestA3ARKeZmTHzC74rgNxknlQE6d38h/Knd5uigAPfNaPqlg0RpKZXKCfjE7/PTsdd/5vuqBmaPXCL7bNMKMBCJire2CvHn16ne//d3D/e76+sXlk6dPnlwZ47z352dXt7fv7h9u37178+rlm4vzy9ev39ZVsu2Hz47UygLmw9YTIAGena2bpmqaxhinDVeYQ3fYdV23XDbr9dq5OkbvvdfiNCXyAHMsyIFS8juo85hwtWwuL86/efkabee9RwGjRU2AkQFFojr3RSTRhSQFN4vSDPCJJnQSN+ARtSlTd57tBJfmV/lMyRDmyJbx5NhCQQABDBIRSkwlGIoX4JEKEI9NZrau4TBZ8SeDODKY8WToQGtt549orFe/O9Y/80wyv8qy8OScMUnZMoEnv0VEgJB/l2ufsZHhgTjOJsLkxwU4+vglHceFZBRJ4R2Qou/ryWlPtlh/TwzRuWEMpwA798GVfHsywsldThwjiBZbGF3HeHJmVuX4GaTzPOEM6uym1Zs8H2e8uookHnYVyQdPLz+6PLPBdw/3Z5uFEQmhR2utc8bZdrcLnScyNh6MFdsQw8pUBlBClE6iMSb6eAhMVU2LVYf7B89k8J7N9mZ/+9Dfn28+uzyLq9q3wcihWRAQ9hCReNVUz+Jyhabuul9yVyEBAAsTgIghUAJjBIOAAMxAIlFSaaXHth4e5yr/f1x5QyX5iN//GBdBYB73E55gy8lxThqi8HgGBDwCBxGBGQvKtJNdMJmgcs4bIqpjCRJ5EgAKI0eM0ZJZVG5R1T95sT4/P99sNhI59L1IIEAgNhhBGIWYLVoUQ2AdWGsjMIfoY+hbiL4hQkfIiGfr3aE7dMFYQyAWiUVCCNblpPSBRQhHZq7rBacqkWoQKnXXdU1FPY/ssgnx2GYgWyDMnB1kNLQHHMR9LNqfZtUipzlkJSr7uEsKnVylbMoPMLMeKSSiqmq0CiIi6SdKJY1TWyBKV95ZZpbC4ciI0Rq1RjRmoiDCdNStbVtMyVyZzxsYIFxV1QA6FgBQdd/H2HWdj7FtWy1JEmNsmqqqqto5aw0ACKBBFGsNs8MKAFjQMpg4ZBgB2br23vu+r0MIwCHGGGOIMfbFmcAsCADgcDjofHKQsITGkJl1zA8YMlmMMSKjs1FqSUpRm0N96DnAk6Gqr1Tp/Gqeif639N/ppcaPth/TvDBOJVAHz8UQBIzJRhARYWQagtyMYGBmgCCic3bZVNYuRS7u7t957yUM3goENgYYiBmMOYajjTFA1lprjMsUrbRQVwv1BYQQDofDfr9/+/bder0+OztbLpfr9TrL0L7vtf9hjLHrtK1I773f7XZak8Zau1ov9N0EQ44xhq5/cnmuINVZGSTnnHOubXtryTnX1K5pKmupbdvDYee7AEDONhYtkdnvu75/uL/dknmn6OCcOTtfX19fXV8/Xa3r1WqxWNZ2tbYGABh8D10XY3/75q3C2TmHjjGiIldVVRltLGp1DgRmm3e95Ik4s6mO219gQMkoIekTPI6fqFdDRyvPnJxmzSmqNufdNEvphFPSIk+Jx5HDfJWukYmWAwWjSXOOZFx+hou0lvzWREc8qSoBDu0rsh4jIuq8RaSkPmNJ0ppOPVGeRIbjKBnaeczyTvmhFFE5Mt/5zsKYtk/+wDFlYiFEJwOqNVhCOD2gb6mfddDG1e/ojOu69ttvv7+9vT8/WzKDIRJhY11jlm/f/Oa7l69ttbi6fv7k7Im1Vdf6plkSUb1YXtVVjPGXv/zV+fn5YrGoqyQJhtwemYClXDoiarVr59xyuSSiu7u7+/t7Avbet20rInqKw1oHYEpVMsOTmYUZijN1R01CgIDX69Unn370X/7272LoOQRAC6B+6ax9DtMziJzIqkTm8ke57ycpdLJHJzGz3O5yQJhd2TGEY/WIik5QeXxOpUoyRWCyc8onS+QpdYISneYcoNw1OCLk8fhZGmRUTvDkKO8ZvARROTGepUWcJKLJ7pQwV3LgdKi4FOQAUKaSwrgaTeaZXETSiMwc+FBEyfRzGTJlpK6c84QVT4iFi/jnfF2TtcO4wioUShUU6MrpzM9kqAET0osiQ/BmPL0jseT5lyPnSc4Ng7QLR047AfhAvDKiEcvRWXh+fv7FB8+vlq67eStdGxoyqwoAkKhaNHXk3aGLvkOkGEPb7e7vKUpwda2pCiJiKwiRWVzdLF1TB5DIgBVxNIfgQ9cHf79t/fVmeb125wtzhlizRB8riIvG1Jtq4aro+41gu98GEC8MIkM0UBXAAdAEyPpfkVTicnY9xjreQxoTqny/EDlJ7DCOqL9H3LznK/P7f8gzj/1zjsqlMJ2Q87CW7LQmFCQaHIvafZ6RxQCsnLlYLzfrVWVulw5WFXKAbddH3xEZay0wi7AIAjKRQ2PREJOxAOx93/ccokUia0iI2DCEi/XKM3K1IoAQApOGVgY5qwYhF/SsJ5pijGY46DQkWJakgSluFqImFOSjUIm3mNSGnZmjRBEfo49RRHyMfQx9DLVwFBHmPgQ7xPOPBqeahVxkl+SoIyZLLJsENGSvWGWVWjvKWpeKVR6PZHPKvCgVzuQsO+5jCKMzxhCjepmIjrYlJPs2xrjb7SYssTI2h2GHPKMQvffayG7XdV3XMbNaCJA4SQbxwGfAkBEiqjWfX4CZjaMhy5MGkeqcIwEtNCKRYwxtCLpSrVzCA9qA9x2iGGNy+gkzhxAk+hn2DvIx/xMKGafR1CzX8t5lq5hTahIXB2tLwZT/K4WKkueQ9v2Y/5LF3/CAHPnDwPbFZE9Wpk1EPTArzBICe49EaAy9eP5BjFHTXfq+b9vW95GRfC/G1pQzY8lhIZs4nZiw1jb1UkScc2oQHg4Htfp2u11d14tmtVgsNpuNHkHUwjMajvZeg7q9wkrtvTdvdiGEs7NNyiatyABEPRbbS0r01a6XRLTZbBDRWbLWhtBv7+4P7Y45WONEBCL4PgaI0XOMMVo+HFqFXlXZh4f9N797CcBI8erqcrGs1+tmc7Zar1dnZ+uLiwu7vrw4XwOHsNsdDofDriWi2hlrrQQ2xhBZlqiYpiR1DHdMNuykQBUR4VH7qVL0ZmzIrJaIbIoR52eygpJHOGpFaZS57KFUthH+sGsuuqRQkSfmCiS+MNdRyjHL+1SUZS8fyCmd+XU9NzgZpCAZwlnkMDG+qf2m9Cyn1OVSkyuv0lNbyr7HRPvJQSY3yw0qVcCBWUvgmGTtUUUeNDkiQNSWKghDW6G4qBYHwVcv371+e/v5Zx8JGkDxMVaWXr9+/e2333ed//STz548ueIIPkq1WDjrmNmaCixdXl39/G/+6/evXn76+SeZKalKNERljg4CbTuVZbyKpCgiEfjQ93c3b+/v70VEue1yuVwshtJSKtiGCPlxu9OWFV6Vko687+qq+uLTT2pXidZpZAzBGyQSxQMcPAKklQJPpGbNSaxApPdFfsqdKv+bKWJyf77p+X5+US+TOh3nQaQ4f5j5gBRn20qvU/6r4LRVZp7/fML5T+Mnj36TfGdiNswxnE+lwCEiQJy8pf8tG9OX9+cgzexxMn7+Z8lwjvbM8ezlUYcunywHYWbFRJqVZ8zbMflnCc/yd5yd8terVAXKAKNJ9F4uJ290iSR5qhksJWSyGlfOcAIuHLSZo4sw3QFIXcjKqqEnEWmyxcaMfBMye31CX+C762dXnz67vlguViRVXfvYtbs2rFeuceCMIevq0CwXxlmDhm1ggEPXBsCq866uKmuMMf2hBzJk0VhsnF00dtt6gnBOm94aL3zLcr87vIz9x3D2ITVtH582lWMhiaYG2ziuqyounoLbb+8fDi0CGCQARE2a1YPogxJFQBowFISpMJ0D6jG4ze/noSaEDDMGcnJb50NlHHhsSo+9/hjfe+wVmWkU+btySsARESGVuKo4bEAAUChh48C52aDBGChES7y09smqud6szs/OLpe42WwWTXU4HCT67tB6AGPIkiEiS8aaylWOiABNZGDAwBIFjDHWWUaCELvoD7v9wtVPzjZQryWG7nBAV9ertZYuzNxFRMQoVOloLGWZWISSMkFp5luFQ+fxXFM0W0pHu4shF9lXpU514rY1muWY9zH/mKiRmZVl7atw6DBA1kJ9bpGd+yUaYww5gSCDWxwQgRmV/0PRaqL8qKbiZ/7DzMgc4ZhEQER6cgMAjLVcdI4thS8mi4uZQ++14sjhcHg4HHKMUcNEtXPr5TLGQDQ0dVDc0lOlMQqDCCMjERktLSdJxTXGODJkABE1hWqFICJq7Wg8KqbqrJjyrWJRdMd3XSGBRyIji+VSGPnUuLz04UrWopIcyTgAhYGAxWHCcg65euqQHjmK2R4lte4pHBESiAjUPhQRLec4NI/E4D1EBpHI4XAI3aEnCojx7OzMGNNU1WKxWC6XzAxCYOzD/SEyRjGaRG3QIgAzxNjnTyfNBABgtVoR0cXFRdu2h0O73+/btr29vX3j3y2Xy4uLi/Pz8+VyadOl+cZcnAdRaPT+ICL7/X6xWFhHsfV1Xa8XjffeGFQyMcZgquWrRMfB933/sL0PXXt2vn5+/fTQ7itjAcBZKyIg5Iyxxppl1fc9c4ieYwh938YYkOLrl69cZavKOmeso8Wi1sSEzUW1OVsNfRcXNvrQtu1h24bQK0rHEACgruvFYlFVjZ0wyvy7NJZKliowjTzoFdNh4pwleJJlZ+1hfmfAklN9z+bTmMx2IqXmdzKfyqbgOFx5jCjKTIHGIiEeCmWlJJhiITG/AoM1OLKusbCOEBFg6G+bzd2SXCcXIuqCJgCBgoxLpozpjLVkB88YbjATnzgTjfqj9AIc+c14C4hIIEo8BlISVpAGNJg1H4BFdCEYQui7IA0Yqm8fHl69fMNijK0FQ+TgI9493Ld91ywWF5dPyBhrTNMsm2bR933T1Kai1y+/axbVatXsD/dtt9+sFohIZDVpSoYTIrnhOyAQIIjojg9KubobQgh17S4uzkIIQ8mpRa3p5pGHI5faHoCZU+XxozJkiaQQG+p/4hiI7PMX182i3rehsrbzjJohCpgDhZDExgxVBib7GFa8ZytPbugESfKd/Fi5osnr+UUelxeffxTGpHdyYu+f6oQGoSDh+Yc0t3D2MBGR8LS4SDmrApOnYMSxVTYpTpMlIhdRKX2dioD8iS+mWupSKMRExPFE9gEU7JGKs7ghBPVBj5dwXFSeG6ezHHPaTxzjBJOEWc7ISbCXo8nsmr+IhaoxL/oyPDOOMSKiZoq+h4XOWRY+bofkpLhhJomd5o+W24qIF425XFZLh4Z9Uy/OL853yNuHm+3DbuPOF8aBIVM3m3P0XR9C7KANgX0QL13vxfW8XNTLmpiZSCCCYbg6W3724VOUl6/e8cL5ELwXaWsbnd0x9NvDtu0+XVTGw7nB2qCIUGWtsy6unvj4ypndLkgUdA6AgUVSKFUAICVsCwAhA58whN4Dn8mTv/f+3EU7IYf5X9/DN/5ARnHyizBjNZPR3v/PyVCl4MsII6ncLeKQKQralAAQJIr3KGFZ10/Xq6vN6ulmvVktF2tjbNUH6XoOHryPENkQdEkJq+u6qRtE1I6IIQiLIeOMrYioj8xtH70nYAQmYGMINYyDYUWk3jBmBhjqSKlroNTfJKnpIYScdpTrcqnrx1U2m1IazYiRRURbig9MDwYWZK3d7/f6cAihbVvmoMmHcUb4XKTM5MlA4l3ZYY3j9OYYPaLJ7A4AramsUdifSETXRNAySKjLLPWf44XDxEAP5qWwmLXWVJXez2cIh7IWyQaOMfaFzUxknSOtjdw0zbJpiHCxaDScOFm4iPig4gNAC0wwamq8thKxamEZQAEZWlkN26R1NXPwVq3xrutyCt6geNCI2GVWHRpg5LRtmuYk8utF44yYkjHKOGMuy4uJEM/mdBZT+ljm/3ysjwoAYMglohMwkk3yEIKEiBDARGbP4QDQE/Hh0BmDi7pWhc0YY01l6+bs7CxE2Leh6yMzOyIiK0U7pWzIafcETR9VQNV1s16v1TNy2Hcistvt2ratqmq1Wq3Xaw0VYlHq1hijX6+ay/1+3/c9IuiOdN2h3WkJ3HR6iwRYhmI1fee9F2ZEcc5cXpz94Aefff7ZZ8+uLvu+v7u9ffPmzdvXb25vb/vety2D1DFGY9BZIkI2pKokiPiuj75vtZMZ8Ov6TdNUjEHPTK4Wy7Ozs8vLi8vLy9VqZUxtrbUOPPi2bXdte/9uF2M81jSXQkHhcTGAkmNm/M4OHkjuE0y1tjJyiEiud1wKIVUISj3mOOAj7r3SIJmg7+TJUuZNJB8imhmVnlxmGvx9x/Pm1K5XqaPIiXzOWWChdF+li5lFRhBLizqudDJ/nB0RxKKY2AQy8+W8/5rPJA9Vsv4c66djRFdUQ9ZByIAIhcgAaIzhCH2IgcE457v+d9+8PHR+1VjhKEQhctNUTVNpH8L9YXtx/rRZLkIIzHG3e2i7fQQvEbvQ9bFHOzgmjiIh7QwfI0IKt6mqDcAx0mazQQJt3Knz1/QAYwxPnAUypLPlpSm4yx0REUSR6K+uLs/Xm5u712AaSEWYFGaSxlEylsLAkFQfdXIGrNy+uUKWrwnSzlE9oxCnMzyPkcNxgeP4UoldUDCHiQVbUkc5oIicLoOY5l+S2ARvJ1MlmlAZnHyrvDl/EuDYSmTyVixK6mVJeXJK2Qabz1NS+bujeMzS9JQzKG+Q/lAjfAhoz7r6Tgg/38/EOFmp/uCiSFU521g0hsFCAys3sWQF5eslAk9mkqdX4m255JP8f75N+iwUJPl7eZo+UKaZIGJZQ2gyWyKqquqT69XCmrjfI8LZxcXZYuOAOfpD3zcxBkBr3dJWslj0h/awOxy6nWcfI1OgaCNHNIyGqakIWVDCwtjl0/PNemmB+/bl7cPWA0DtXF1FwoOPvPdg0cZomX1tcFlbAMsxgrGOjHRGGIENAgMIC0gUMQgaDkyujZw4miLeWAjECdBKNH4MjHN6gcJXNd+g/K3HGNdknJOk/Xuvk2zqscdOUhYR4Xu9WqVBCABIKDTYhKIjqD+XPUbvDG7q6slqcbFarZ1ZgLQeDn0Xur7f79pDx15QUARQmGqsbLWom6ZumDkKA0AXmIGAKjQgALH3IQQOfrNa32zb/Xa/WmyI0Bg8eO+9r1YOAJC1MKnEEPjID1GLTwsCg2jXAYO2sG+H5MAYo/Uma//WUtM0ako5x0fWx8O2qrWjOagyFALtBx5VsDUcx6YyfWFS80t+mMplHzcLETVIyyzCaiBp166BI+UgFQDoacn8aRmCUWzQAgLDKIeccXAMAQAWNg8RudTEVY+EiUjshzqrXddp3qaahfqMOAAW5xyiLBaL2rkYhzOKElkiiwDQ0bF7bGKh1iBOwww6wyxi+q6VwqhTSlH9PAsUSJq2996ZJSSC0o3Lp/t032M8VuLJI+fl5zno5pZRk+GtsTjL1JG/on+dyEdJxj8WVS1KakVEHZKZUTmbCBTaARFFiBrugxiEA4JaEND3oT/0gHeIiGCqqjJV/ez6Q2NrYwwic5QYBQyDkKtdGVZFREKLiFpQR1eqFlTTNIvFYr2Kanhr+dC2bVXn0Tp2nLz/CjFrLZqgBBJCIAJrbdd1h+3DoMLh0RGpDV02ZwtFFTIAaN7ddG//3Xf/27/9X3/4g8+vr6+vr55+9tlHP/rh513rRcR33a9/+fV2u729vb2/fet9h4h14+raEbgQexGByBJ93/eH3YMxJopRD4NORtdVVdViWW9W64uLi9Vq1TR1VVXWNsaAVdoROdrxcyGRNwUAsplLRbZSRtaJ2SYiGpk5qU/kK2MhImq24YQpl2ylFGknJdnkRx5Z/1umqpZKzCNCSKTwgpRPzueTvpJv6/ZDho/eK4lQmR2MXh8u5mMFhRI+eEqq5eXgWN5DUuwm+1IO8tg1n9IEvOVHh+9CFD4yfSLSc5LqvZOk1IpIjIKgZ46VlaAh1wl8++13D/f7ZXMWGa1pPHeCcHt/c9j7Dz/4GNEwh647pGuHwMvV4rvvvr65ffujrz5fLpti9wepgFQIpygAUYuRAACiUT4J2ZhBMcbmcFBMXXeUl8FYXAEA47EBlG4bFH2QmJnIRZbrq6fPnj39ze9eESLzqIv6EaSJw853ORtsPI7T6t/zk5PNyvhQIskcaXXwCTmUY5ZLzoQvhV2UHyidQeWU5v6gR8jt+AoW12MLHC9WQ0kjkJYvT/A5ey7zK4PfegZPTLFuGDMNSeJwzgRwHJQrJ1DCagKHkjwzq6SC02blWylrvt1SKA2ltlTOTcY6+mMg5fHxgfn2lcApP5QxhGcpCfPFjjEZAEASMZbcDCAi2hKM5Twn8KfxQamCIRwhP0HpPPO8cBHRrlObBTl1ER2wbzuo3aJaLJfLfh92h0622zNzXq+WjkxlrCVziN7L7rA9AEgFgAKBOHResKockDVk0Tiq60X4+LrC8O9+cxv6GBEAxEXDIQqZXvDVoRUJDx790oTWNsC4p8Nut7tvfXswAtZQAAms/m+NCg62jQgICAswgnmEjU84zPtJsnys/OfJkTOPkrFNOB9/MuDJZ94zmZN35jOcPzClxEJdgQJXObHHTAsAanEjEKoXbxAxHI2IJVw2zflquW6aCpj7zkfv7dK33WF/6HaH0LYQQ20ILF6ena3Wi81ytagaQxTVbiDxxkVh7pk9h9i3bav82TrDEkLoF1W9qJtoTNy33nv0Qz+0ITXZGD1MaIyNcehqm5O2iDSekDUQxuSaD9FTShpUmlJzyBjMEOAoOWWxhIzCYXBcFklYpczKdJoNuRLUIpIiV5LYp9EsQkRCJC2gKyJd12ERmcmjNU0jhZ5zZM5DQumIOUvyUAOApNLo+opJ3EMt1dLvlg/yqQ1AQysFQMTaVdkg7PuOiCQeuylKqvugBrmIRBAOwLnwnh0cxKzHagwYPIY6OdVlVVjlItXlUU8ZQlt17SwUEiTvUdIDg0jDxVmPXAdIx8/8MO+UziEf+3QpoFICWUT07GX+U95WZg3wpvzSVHg/G7RyDFmLiIQYiIZMUUiOcvXyxyzjki9GBBQa2vldtQ9EjIAvX768vHq2WJ2z2PbgQwhCBsFk7C3ll4jkut/5/pB+XFvNBM42ZIxRq0tI6mWvfUd0+VFa5wb1T1+M0Xf7AxGRyR40cGaogXT/cLOom6qqnDOrRY2Ir199/803v/sP//7fbjabyrnrq6dPnz598eLFj3/41fn5+V/8sz8Vkb7v7+5v3r59+/bt67dv32y3264/xOiNMctlo2VLvSdmNlwhowD4ELtD/3DXxvhORKrKIiKyKN5eXl4+efJktVrlCGHUPcvbPGP0g3yNPPUoZzzIyFHqhaWroOQg2TM0UR3ml85kEvkZ5lQcdJnMarLr+XrENB0JrWI0RhkpjuX4eT4wkm3Hb8mRKlgTKgCOQEtcVbCgwzwaFxGD8q+TmeTfEwjkSxEdZpK7ZAEn77/nscmnj/QPIoV2SERaiCWT9FhLG3TcGDSzFIXx7e3Ndr+75jMRIWspUl1X2+22bfuqqY2l29vbvn+ji6prZ51pmurNmzcxhk8/+xiQCQ0iQZHPmU9QDCffmfUWkSUiAux8EA5AVkQ4clVVVVV13cEYU9dOc2z61K4tnx4ZEIljBgKnFhDKSlRAEjKiubi4uL6+NsbAEBsZYDFgadrpEp6TrS8xs0A2yNsy2eUZJos8TmJ8qtKDXqWBkWclhY1aYuYEjfPDE54AhcyezAlPnSXLSxYRBIOnaicyMxQtNDP5WzPhY9OR08PDwgVOIDwWET8YQ7Xkb/mj5T+h2BopMlXyBEoOdiSlxCJsap9Q8gRKJYiPrKZQCyYF4iRpAyWES6TKd8olTyaZubedGbpYqP4lbEs5UvJbGRu3k32kpCiUL5Ywn1wn4T/xU0z2Iv+TNfQAAABVVQ2cigdXtzGmruv2cLvcnLvKtdv92zevahICYUFj3O6wP8SOjFksFpUhW1crY87A9AEedp0EiagZAggs3eFQm0VtTGTuD1sx5nKzWP3o86/Nq3/83at3rzroHprVVVOvve/avvXsWbo9izlYc6iWXUuBdw/bhx30XQ8AjgwLMjOJcloaDlcmVBYQFpmj/4S+TkJmsinvuUqWPpcLcsrUnLAymDGrySAnv4szsTW/c/LFOfZOOMzxMRnh7YBmJIgISbgryEDEGNOQWS4Wi6axBtmHvm/Z0Nfv3vV97w979r0FWVa2WS2buj4/f7Ja1IvFylmrWSAATGjQOgwhShd877u+970IOufaruMQK2vX63XTNIcAMUYU0AqHTdM4a9VEUYNQCVrbrKtpMfBhOLba03ieHm0wdmAUMcYQfG7XpGf5suEFKcYeUsf6uq6JnMm9T4ujQ5LCiRm8pcjI/0yboiwrH+6yMYqw0jKBUDYqyv9mg1DN19JhOrDQCDycGxwpmRPZpK/rgTpIJrQuU11UGibSgiLB+5zxaFITCCI1AJwIO2NHcygqS3MEAAiawcuRiKqqMqnSVYgxstfKqMPaKpc2gvKsmFkt87xNOgfnnMixaFkp4MKxL+ioWIbqZvloKCQ7P6Tej1jIdyLq2lYRJrcEV9S6uLjIBmq2YEUkRv3ukFcJMEquOfoamDXUBIIAZAqDMKOljmmMsQhMjmMgAkQHAJKae8cYD4cDt501/uLJ0+VyaR1w3PsAwogE2fDTHF1m5ghl+ozOKK9R/6rNVPKOm9S8pE6pqvv9XnfEGte2bYxRq848PNwhYuMqlgCsFW4CEVXWVVWFiIKyWCwk8qHdSeyapjGGnDMXT84WdXM4HF69+v7bb3/3n/8D/29P/tIYs1mdP3v27OOPP/zggw+ePXv646++rCorIt9998133333zbdf393d3N9tQwgAYq21WHHkyF43BwAJDSB0ux4RAYWZ9/v+7m739dffYel2xaGHOBmDRKThSCU3AGAejl4M+jWR9hUgA6itxoWRAFS1AgEEza31bacVlzKOZhcsnxT2GJRPAYxYvBJv6SmRpJVy4VTOFM7jlKTj4caZdpIGzLy95BojLjNJT4dxvQf9kYNLiGjIHU/rHovXsbCW5QIA0PJZkJS5zKrMsbVrVLW5hEYm1PxKnkk5CIzTAMqZQ8E39be1Vs2YkVD8AyzGDP+c/x08i3byMXWECBIBB9bZSWAfAMBakNgjRBC4vb+7uLhgsnfbwy9//Q8//cmX+7uHBUjs7k2EH3z86S9+8ctf/PV/+dlP/4SQWr+72Gw4gquMc+7Xf//rmzd3/+xP/+XT84/WzZP9/o1FMmTUXS4sQENVQ0SBqMLEIGh1b2DgyhkUYB80XST2mkdUIRhCE0GYj2WpWDTYBfr/IKgE0fftwG2RfYhKLyJS18sQhLv+pz/++P/5/7ir19i42Hshh3pOm8gSEDBCJARCjKWOkhHv5F4AQIxH1zWmjGI8Vuca7ziJdoac41KOhZaadImHACO+CakTA43b8XHO80Bgrd6DQNboiGptJfITIMwh5Ql2UTrNUnpGjDEAhSflONtROKLE+ahHBEAo5e1EZmZvreXIxfMgAsyMY/LJU+IxURx/FB5xKa5SNckcT4mCgEBAopaARBTkyC6ddRl2CkD9oCF4KNTuPFrObymxglI57/zdvJaS8ZbCGPA4+ViUDi8NwhIl+r5XRpGk+FALofwWp6P2JQ6X8OGiT+CY3yKBERDFYD1fiwB6/ANmzAeAgLwIlXw4z1aOLQePja044pBPx4KAMNRw4r7vh7o+BBpqQYpdv2vFh5UYQLeoF+sNWLdv9z1LF7mPXJFt993u5mFxdrZ0VfT8ZLM0cI58ePPm1Xbf9tYG2HhYnp9f3INrO3GVayrTVAYg+ND9D//sq/8PP1TcbTt+u33tlnaxevL2XVg25w9BDj4cOn53v79aLxxJbOm/3vWxugghcEzegYoYojEg2mNHKQsAASyA6tnMmUuglvfLyJBwOf9+tG1DhnD5A8f2dkm589dlXFnqBDWN70+Qp8SxyVt5MiUmlIOXT+qVPHoy1I4ZH1Iti8OVRsgm7plMD9SSBDSCzjHViK7lq/X6Ka1qdn2EHXX3h8Pt7u63/Rfdw73pu4vKPqkDOL5q8PLp8uyyWtS1rdBTYAF0VkQii4Gulw6ij6Hvfdt3oWqaZnl29+b25qE9v/5oubmMaDoWWqw6RmTxkSX03rIee7M0ZHKyRXYcQtBer3VdV1XlvV9QwzyEhvQVAuz7ngiYuetb7zsAcM5YcrtDp5DUEo673c57DwC3d+/2+30IoarsctlYW3nvm2bZ90NOXfDeWkvWIrP3XTIRMUZvyaBUEoNABCGdg4iw5B7eyKB8z2qKLiUzlqTW3dTSknk327ZXDd4Yg6gVTcg5EhoiexK0Gg1mVJligoghskiIKCzB98wMIvq6dp87HA7KJ30Mxhg0tHROTQtjTGWtCFjrgKjteyCSdFham0ACAGMUEQZGCxVaRIzs/b4zxujgCl5lj3Vd+4cWEa213vu994MeK2IQIzMBVHUNKdCHRfscLuJ1+SYiGlPlm8YY7ry+3iVBo1EvZmZQGZGziggArakEhgYMmMNoRNp/RwMAOrLCLSnzUSR63w88GcVVGCMgAYTBcI1RmNm4KAAh9ABgxRpDBEiGBAgiDwuPEZgAmr7zLLu6rk1lQggxijHOGIpBSHD3sD9b7zdn5031ZLc7dL3mytbKUUSi/jAWzNCaMoYYCK0xBkBiDCoshcRUVBnnYjoKByFKC2AsgrOCC3S2CoGYQ/RVY5xKYZXRAoNGrYwXABQ/FZHqeiPRGqKL1cZYDH3X7Ti0iOy2Dx0zkzWuXtiVbTly8K9uHn759dfVz/+/y+WyruvVavHRRx999NGHL168+OSHX/3sn/1FCOH+/vbm5ub7779/9erVzev7rut0Z1l1J4EQwmKx0iRYRAKBQ+eh88aYY5guXyWHncTf8JSnXEcoY+tYqEHOOcZR35gjjz7lGkTE1JJtEglBAJxM7z0SBQuVt7wZeZT6VTL9ORAmV56VzMqa41jDzpPPoMhVCrN4mzw8WUse/7GFvP+OThILL/vkgRKwcvSXy2RiOPPvwhjUEz2v5LYwxpDJnTyCOjT7vieitm1fv357d3dnmH0Pxrjl0lxfX//iF7/4+c9/jmD+5E/+9OnTq8OhXa6WNzc3b968efPm9dXVk5/85CfnF5u7u7shEDvDh2ToYrlwIoS0TQyiPabyQkSrdxCqt061Dc8dIpqkRysEQCRGAhBEA4CERDaZcGAMIRl3cXF5tjm73/UIltOHQAgQEPU0gcIFf+/mlpd5pOVaKHqBHgdJ1clOIkN+fkx07/MITEjm5PNzTjJiAjJ67OQI5VBH0M35xtQeUELF7OXJ4n/yrdF68TgajGkZTnGbEvPnXKuEzGRp5Q8s7KUJnE8iA55qq3Niu4vrMZ4GR9IYLjUUM5TynwaOVDjCSv5GRR5m/hOOnQslTOY7OPxpZJwc4zYTaBxHQ5wQdTHtEU+WwaIo+HbRrMEYo8ka6h3AFH/wPnZdZ021tNVyuVytVpYgxO5u23EMbIlD2O/3D2hwAc4YRFg06/Ozy67zIbztQzBtL0JN7YPhYG0j0dkajKmsdRU9qZt/9bOfreyv//rvv/UVeB+6+/uz5YKBQDgE3vZe7v328GAghL4PtJnY+ZADEWOQpr9O62DDGLVmaPZ7CHDy4mPXHOHnMqic0iRinBnRhFnNCbAc/LEplXg4mV65rnKSJSfE4urBCJgAGvhlhICCwLioa+cQLRx833ftzu9vd3f3+7av9ijekhBIZd3FZnl+frZYLFQfYGaQQSEOIYTeM0jbtof9HgCccwjGc7y7v+/6vq7r5WbtnItETeM4ChIhEAPkkA4zW5sqQuEQACQyWVppdyUAKk86aH6jslZjzGKxiNHHGCF1MtQgyW6303ZtAJDbMnGqKskpDwgLC6TvexTRkjOS8ik41fATYGeHbu/pr0REIKPIc2QNghHAQL5zGTSE94smW/rDVC4HrHI4EQpCKPl2+V9JgSk9P3Y4HBRFdaiqqlQTds6p/bNcLpum0YePM48xu8Y4HSfRwTOUFMFSyZ/jEbuhommq2Kn2lYYBhyzfBPa8LmOMOjIG0TcuhZgBlWmBmY0+TJhT9nQo51yUod2FJspqVqQzVjsH5iWoQdj3vXMmZ5bmr0yqc6tvvaR6KRw9ao5SEsG5dIIcE5IjMMTgo29j9L7vXcXMbAaupbrQICwOh8Pt7S2SWazOV+u17fqu68SMlqkBcEwZKyKCcOzAAQCUOqZgEUwClCGmmmKYRFRVFaJjhyHYjIcioj5HjSEnRM3h0yFpDYEZkcTy4H/wIBFgSMJS3FBQrNYLfavrut1u9+pV/7vf/a5pau/92dnZ9bOr6+vrZ8+efvDBBz/72c82m023C7e3t99///1333335s2b+/t7PS15f3+v69JaOMys+3uscJDldIlAKSp4FJB5h2aCZyqi9IGqqhVHS0SkonvyVE7gUYGTwqaaM+4Sz0qMzzuU75eDzE3ccvC5vMHifsksoLjmcq6ceWYuBVWMyiFM3prLdRjzr/dDo/znSSjN5WvpVZqsq4TtfI0wKFLHAEiOT+ozJeM4CTpdlQJnuV5F33377bd3d3fPnqyY+6qq2OGLF89/+MMfEtnd/uHv//7v6nqh4fibmxvv/fX11eeff3719AkzN03DHEo0GP6LqNkgQ+EyLlF3fpxPGcGQAo2MQIUT0ZAIRJHIkVJz3kFcabknRILc9Jx774WJAZ5dvfjgg4/f/M2vmuVGCLaHvYz6brNABOFskEihD52E/2TaY3o8cRYxkQFO7kMRVZ5s+vzJCQaWMia/PvnvfIYww8z8ZH6Ri2zD+WgnIVA+kHFSg0v5c5lNz8lh+BycILT5hKEgqMkCSzCWb00M0fKaPIwjZXT6ZDmHEkSTEeYzP80NUqo/jmuTDt7NGR8uFZqMmVwUOSgHJyJN0cFC4k742/uvtMAogpObeSFY2PATblkudoqBOKpnNCxweOY4SGTo2mhdWBtLRLUzxLUP9bKpbSBXOWttCOFhuzWA6+XGsF3WFs+k23fbh13f7do+Coa299YKIkYhILGW6qWtLJ27swt01ZfYsP2vv/z61YPYGlik58AIkSAI+xAeIhtgZgg05KplFb+kwbycIkg+yts8KQUmIIdTKHoStWC8j+WfJiwl3y9FzASHJ+PMuUeJPBOWiHNFYiy23sOdHlvRnAkjYm8aAIggyGIkorAVJJHlcglW7g73h8Nh2+06Dj3HniHK3mBA8eJbg82yqdeLxjkXfZDIPgZrKlWavfeH9tD3/aHd+a4nIkLrY2i74CV475t6sV6vrbWM2NQNCPrABqqY2lMlHSN5Kg0RkXMOhoZPBADqeM1NHdRYij445zQpMgxFNA6Hw8F33Xbfcsoq1NKaoGl71mpe3FAe09q6rrUgvoJOFQPvPTBrxko2XcAklQDZ2TqnGlprDSMz50QSAAHQYwKoxwvLHSlxA5IghoLTioiNVUbI8pVciyUzsSFoE4a2EzknNjemS8Acyk0px9MqnXVdn5+f13V9d3fX9z0i6ulBTkVHsmmXjg6ldoXJJtTPKRroP7WCqIbaAIbERQV4jFHZjk5V46tUnNwuWTSlthClmJakla2qJoTAeOQniKjHpwUhb33ueBF6n5KkpFxXjLGuXVVVpUEOQ5ux0gk72jKYMxYZUWtiywQpIwbx2AoFkyclsjjniHRWWiQCvPcPDw9kKkHXLFY6N7B1QevHTKIBmEVajE6yDK7mhyXNPPIkj0DQEIA1RaUSwAHtU5UJ1lrFGYA6avkAS4g+GIPWWkHIi1XDO++dtWRMHUK4v7+vqurdzZvbu3e//vWvNXv57Ozs7Ozss4++eP78+dOn1z/60Y/qur65uXn9+nUI4e///u/3u/bh4aFtWw0IK8CHE6Ll9uSVnJTZE5shv1V6dMr/cqqakvf4iBwFoz8iBIJA2WYaMrqUNzJt5/lMljChB0ye7JO6AhbycqKyEI4snMnzp66paiuFoTUXe5M75YcmEquUhfPRHrvmU52vhYqOQJOZ4ExqzjWDTD/lmJmiSqKKMQpNz0xKqt2yXC4fbg6vXr26v9t+8uIydB0iAvD5+fmf/dmffv755y9fvvzmm++616/Pz8+32+1yuXzx4qMvvvji2fOn3vuuO1xcXGy3W23BdJyqkIAaSEa7tw3/BEOEICAIUYp4iwzK4gRtMrsU0eawApJPwKJzVqICcIg7iCAASfQCCFEuL59++tGn//k//dKsXGUJpAcIg/8eGZAFRIAJq/kO4rgq43x/S28LJsW93CNIxxbLok2S3AFyyugqt3XyuQmXmJDw/L/lxPLr+evzFUGB8793SnOamlwnxY8kC+M9o50cdsJASvYyn+SchOdD6TU/aaMXF6mb5VsTKJXM4T2znX9XnzfjvhQqvsp3s/Slsdeg5IflNCZfzPLm925W+dfS4Dm5BERt2XI6JFt+5eQgkwEHMk8vDkdrwPSBa+Q+xMPh0HWNNbCoK4yh7/YhWmYmoQPZGLEPcLZ5UpGr3bJp1otq1bYhRIkHb2y7bOqqsmSNEDBEImsqXItEiZ9fPml+UnNA/vXvbru+ZRZyYKypXQTqJQJGEkYS6EZzngiRU+JphAAnYY7FYb85HyhHhjHyTJ6XUybZ/M7892R6J7dpsq3v+X3yczQ+wAwFsZSs6eR8yqs3DQGABJSIEi2wA3EEvX8I3njv94euDSzGGts4RygBQk+xdZUsHC1q46w1SMLIomjnY+QY42631e5n+uXYx67b9yEAGqoWxjixw4mbiGyQCZ0xWNs6cOn/KpqyHlvaDlKs2OJez8arzRZ9YOb9frvdPbRt2/f6v15ijDKiLDVRtAkbEWnRQr3TNI1zrm33WXkdojqI1lKuuoGIbKKaNEhC2JVydgi8MCa8MoiGUIwxymhzMYK8ZdmYzJkOkk4pM3OEYyCrZFNaJXKOaTE5edUaVENIjTRIljARpdxU1G4Q2j4kLySmTuWcym9mdCodZCcXkiMHaoDVdZ0tLj0xqD90C/R+7m8RY8wRwsl/YazGZEGDAjmpVZKmaq31KR3XOaebq+A67PYCwxxyuU5JETyNPpVkxTwUaU/3KRuuuikqh0s7NjvqMskTkuBwAh9wmLxzDp0D7EIIQGCMQ0S1vgkxBCYrIcbtdtv5uFgeVqt1VVVFZjilIOEQXAUA4akhMGF3OgeBWFUVACAc1fsYIwADG+08l2GudXEnEvDIGJFRK0QnEkigiAAWMNue2qVzOAOpD2jRGkTIlTuTa6k/HA673e7du3f/+a/+y2azsdaen59/9tnnT58+vbi4+PDDD3/84x/3fb992N/c3Lx8+fLly5dv3rzZ7/c2w6Y0lsp5T3JEZXy2JIOJZimXmCOBqchS+UrJgkdCCBHSi3BKovyBlzqtaVY5txSWJcHMpcvwXxhJi/c/nO9gYfdmCOcRSoVPeU0Jz5KnwEzZmsxhMquT4Cphnj8xMSHKrTw5Ds503AznyepgzIPKm8P4WQUZYIsiose1+xjevn376u2bf2K/5IP0gfvoq6o6O19ba5fLxdOnT/f7VpngBx98sFgsAMAYrKpF3/f7/S7zu/F2jDwRgzmnMwLRVl3F6hCS4BwOR0UWGLqh+dCJSO71RJriAgQsIprQSJRPQ6v3DowP3DTrD158xAxd68VUVo+nwtF+PNkSfRJxnV8lYuSNoxTSzMiTBplag1AQ4x9ylaOVQD4OVUQYShTNEacjGgxAHmHOZD4TdjG5WX56YjhNns/THrlmkjMysxqAaZXRkl7+QBCVqygpKysuxwnMVOpydXrFVKShvM+pok85yRLscyDMmcZk7ZJ0kdJVXL6rgt+mQ9FZdNFQLeD0Ndn3zBsnEy52kxEB5fhY4jAyOd427OnUnzAVupiY/wzJWeB4K8+QAYARIA55WYR9lOAwCu679nDYny3rxpqKiPtuv931bWds1TRrH7iPHNrOGWPQLevVenWx7+Pu9m6/7w5dOD8/d3XViITIre8rj6aqwu7u6Xpzt+/7ZfXnP/1RF+N//G+/M+QdmYjCiETOMwQG1fAXRduxyUZnmhqT0pRIT5LP5K+/lyHMX5+IjMe4Sin7SjwsB8xLkLG8Lqnp/dN77LsT4EiRQv+HjCkiPVkjgsKO2TBYYItMAHcPD2AAycXK2dpGMDFi7MXhHkK3sHJ9sf7w+vLJ2dlSa7yTAUIADCHs20Pfdrvd7nDYQwJ+6H3bd8xim9pWTWWl72J76Gnlo9jY9x5ZkJJif+SinItSxrzS0RJ4uIbW2CEEibzd3ndd50PPzCIRUaqqqp0h2+S3tKqKkrxGzBaLxWq1wnSWzFqrLdryZsUYCcA5kwNlADlDlY3FGPZ6uJFSdhwAgORWroIoZuithyLCxdlOTGpMoUbLZJmx70trML9YSiJ9PqZGO2oNalRQQaRMOIRgrW2aJmvL2ppivV7XdZ2tWV21lgzB5GvLfsNSTklRxnzYiFT9JdsqOV0Qx9Wb85KhkDWUqk9nVSc/plHEvHzMZwhTkCrPXH9nwxXHxXuePHkiQ4jumE0ahx7rYU7FJSsmoqyB4NFKN4iYq+PEGEEocwlKtd/1WxKjscdSIHlPNeIdYwwhCiM59DHWiMZYZj4cDpFRBKp6gVaBOZgGatgPX0nOQClUZU7TEBHOx+MlJJEn431hg1aXm2UoGUhM5giQkhcxBwBkDiLgfRfZA7Crhp7DgqyeHRnK9RtEEuFU73fYOx86DR2HoO0xSZHq6uqKiO7v7zU2yMwhxLOzs+vr6xcvXnzxxRcfffTRJ598IiLq+7Dlhk14XylQ55g317xLJCjfzeNPUCSPWVI44nCumMaHUpgZZs2jYSz+y+cnVyZ1NKeroo0mUH4iaY15+ZNP5FfSM7kALpXLz5Rcvjj552TkOXwmD09AMf+rjG378v4EVomuRsUkJuPjKWFcwmTCDspvHT1ArMf3S20YAbBtW03T3+7233//MsZoyIEE5tC2QUSMpfOLzeXlJTPrKfnVahVjDMHrvHA4qquTJADWKesniFLhr8H+0vb0kksbl1sgKb8lL1lEjPopwAAI4GSlUYbabkDAZNzwFmPgLYLzgSvyH334Yr1aBGYiTesh0f5KMDQtLeEpM2Vojgb5T/mxhHjTPRr+K0JkJkOV4aD5JacUu4loec/cJjg8fzJzBhijVulgej8cJuAquXliJtM4BhS2a/npk2Q1+SIW4RRIlsaEGWb6mqDQSW6pY6rHt5zMiGrGL2JKSSp9nCfhnyVrSpkecQZEZBkxAUkKZjaAJ1Sfn5nMP/+3xKWMWiXrK2VKuVN5KFHTh4TgqMSnhRzhPEybBHHqNMl7NGd66aHkFwBAyCs1ADGRP6hutGcBpnWFgKbrw6Hv1gtTGXp6dR65e3t373vf9cxgq4WnPtRhh4jNcrFYLJ48uexivL3fPWy7tutZ7pumXq6qqjaOsYtsAzAFxr5ZmKXQmbg/+vKTLsS//vV3rfTsBZjBWkKLgokDjXRZLC4YU8Fk78o9mvDqEmLwOMKfRLCTr5y8A4WHCx5hLJOHy98nd/P9Hz05/nzt73lmzhCCgIgYBmHGyIgiJGKIHTBhBBHlN4wcASIA75cOnj/ZfP7Rs09fXF2s6oUVZ0wIgcgKY4ji+9gHz8xa/Ui1bQGp64aMI1eBMRYpxH0ffB0jI0fv/3+s/WuPLUmSGIiZmbvH4zwy877q3a+Z6ebOUNxdiiMBxAC7ErDLD8uVsNB+WEBYSH+OX0RK3yRAXxcCRe6SnBkOOf2s6q6u7qp7696bNzPPKx7ubqYPFuHHI+Lk7RpAgcKtk3HieLibm5nb23pgNLYNUfCcFq46lwJHi9AyD72v9AGtXKIrSkhuaXB8rai2VuPZonOuKuxQfJu567rDATR4MEYvEovC1nW5XtdlWcpgP0WAIkn2A8NhRhT1dA3uMhmcgYDEY9zpOCsBAMn6YMOorWl1E0FJR3OO3rn/AMcQUADwY9FgBUWu/+c8bdAeY4TImjfYtq320NblpBxIVSHUR1dVlTYxTyVhjDFarzgphDkzxEwhnM1ZA1MRUT0EGimqqTHKupNNcDZsPkiMUZtanWsZjldS/CCjWUTUdM18PgAQhYkhn7m1Fq0hIiOEhFq/R7MZVQ3u+77v26TNZifgeZsGrMwOAhpruuZzyw+FnB4R0VhblkUM0IXWew8iAlpFn3sZchsRjIio96yqKrIFUWGsQzRd10GYqxvD9BhxlNuTdcOYoW3kcL4bo/QiLNqQsDCkoBhXEVCMIiqNySlIAgDalnBE18xAIFGExp6ykTkgSlE4a4zm8Q77SyLMIhyCKKk653AM9CVzLlOSavwosh1ORyJikKIqbeFijIFj07Zf/e53v/3qq3/1r/81M9d1/eGHH3722WdPnz61CUEpcxLmV864EXM3i2TbLDI97yeMOxstHwqzAyyj5EHxm+F9Gg0W12TMhSgwm1s+wvLP2YtEcofBHBqzV+c3JzHEGYUkmzpOj5z08+RSy7lV/oqLTGEpGUyWMJ1eLurp/Zg1Fc2fnEHjPXdyHFAUTWat2RJkbAAoPBboRAIgBtAChn3fff3114fDqS4MMTtn2rY1xlVVqZzaWrtZ1afT6XjaaTK3MXQ6nQBgtd52bZ9guMTbBNczrBZg1EfyIPU0TowRjWqzg7GAIYqIsIbSCZKgGICAaJhj5OjlZF0lYJj9J59++OLFszfvjmjRidXMKAEBQEEauibwhLIuAn95zfYXcV70KH8yx5AcZ/KvYErIF7FiiYoiAgtaS5ifY0s+Qlrp8o1LHJ4R6XLmM0yWx+Y5ncB5yQstMR8zzeR8Z4EkCYsg81s+Nv8ZrPJX6JUHOOCocc32brZrOfdYQnUGTMpEAZp28MvhgOegmnNXK8hyMmdTungopJsJLLmrcwkBIEE56zkAoH76CTY+su/MDDC3eYkI4tzTK3g276R5sogPDMAdA7NfFX7ljGXZVvRkXVjEVV0/ubpmhrvd8XBsT33smcpji3UtwEBgCrfdbnqO94dmd2j3+3g4dne7U7Wqi6IoCtt7oo43hewPt+vts2pVyPHho4+e/udlfWi73725x+BPfcQI1jpCy4CIJNznk09ombZ4ycmXJJPj80XSXl5L/JltLiyoKX/sMW52kbnlU6JFLYAZ8V781ezPxwh/9nwOjRn/PM+TPQgTM7KgECAAmkgIjoKAB2DPFHsXTCnWWbOx8MHzZz/69IPPXjx9sqlWVhA4Ro6RQwxRoOv6vu99H0c9SsurGOdsWZbGFl6oixJCFFJHnAMiRuTIzD7EHsgm25DiuEI7cnIQndeokqKKvLpQ51xVlCp9IoE2Huy6hiUUxgYe9LHkZEvVa2T0KlxdXaUMBc1xSo61oiii9yH0qkQNXrjIqZxGjJLyEonIOkqysl5ElHgyM2tEQFyUMJQxVCHfOGNMHKrscirLMeNyA6zGCfu2S77QQfofiUsrhSrotDdVVVWr1dqMNZZTbKQdS0YvmfNFjo1DStgQhppiTVM0JmdlaQZF15/7y9MkIHbQbJdvSZNMcDDGSBy0a90FIhIEGT2KwyvOHR/pGMXYM/6k6eV6aaIXItLKx9npPwF7egOODkNBNjRRfYkISYqisJ2FsVrygNjMSIOow8wAZC2CEDOvVmVVrcqyRFMiGusKJCsiaAsYowVTLqIwJvRImGOtNcZq/QilGsi80IOQScMMaUjFB4mUCHA4Ssb44aQQagDXsDokkYhAaVHGmKqqhAOAYQ6a167tWAAcx3RIgTo/U2R413WaPUhE6rll5nW9iTEq/zudTjrtEH3XdYUrVbf03r98+fLly5fMbHOsnSEQTAUCGFUduWS65kdCSWNkrZ2RdvcsQ2QyE4yyRYgBHjlR4FLAW35g5KdLEpty2iCiMFb4yakRzuLCPIaY4HwQpmfStJcHqqTg7NErOIMJTpXq9PPZe/NZQXZc5ZuFWSQbTcvl5+PgVAIY1pVX8crKzS+XM4PM7MqHTaa7NJkZWAYSmKMZAiAgeo6rwvm+efP23W5/dFcrg2ALq+HazIE5OmcQoW1Pzhki7SQTRESfadsWHxdWxpuIiGferHaQiQ4wmDDTnLVjIQwp+DI0XCFCBKKx9gZr0jYL6a99iCHGaGsuHAYWMPHmZrO9Wr1+9wBiyQAxaW1REU1bJIA5kHP6WgJ/BuQMYVgBkp4Z1zLBtNk4OarIlAZnW7n81WPokfAnMdx8RwAGHgtTJpNTRxpWRotjvrnp36UaMxCInHNW0zzzzYUJciqDPsMnEWCSeGbcZsSQSa2aNI2c/NMM00tzAKYDPj05EmMC4RgclhHp0gOZ8+f0QE6PsyWLDNkp6dU8trVITF6fVEnC971knWayw3iCVwkUMwbI4zULnZ19OIMROUFynHxEPOfH5pSbo5YSMi74+aSSzDA+CgIMhaZIn03kFrCIfdg3TUlRLGxXpvEra6wxZrPZkHFC7hTuuyA+sHT+EDtyQCVWtHJleXW9ff78adeHu4eXpxb2u9N6vV6tVq5EMhQEKOytLaQ7ApVPn111wXQ9/4O/98f7/d9QE6MPPYNEFGMRCAKwOQslS6VlSRqJXmZUT1nm8HQQuHjl2DvjAOkDZgfNcuT5zk7vz56fsYLlIOl8nz2/5JMJ8d4/YVgQdXpd/sGBNyIWwGrcARVskC3et0csjLG2MOh6MRzKEEswP/z45vufffzDTz/aFM5AMBAlgg+BjAssXe+bzrddF7q2a08ao1iW5WpV1nXtikrIcAAC6UJPZF1RGmsFyRgDMYbQG+EUXZLLAMysjXZo7Fmql0Y82qHOPjOzc2VdFiJF7zUcFGL0MdrIQqM/GkYaHDVM0WbcXdcVxVh919qqqlR1VElA1TbfdU0TVVUbMvRC1JRC3coUk6kKMCICTKSdkScjAGib0JxVKotr21Z9Vnl1K0S01ibpHwBi1l0pxw2t6RJC6E6Nnlaq95osQr4syxR4qcmTCsa+71OGpP7QWpv0qxnHViYOozERxmWst9vCe2ttck4GZiCyYx0RHtMRdYHaS0AvM7Z8BADEPA/tzBLVb5mOJBltfH3baZ2bYXBnEVFTcoaXwsSYW9vCWEx9CNNZw2MfYBz7N+rCh9jgMUc9P7BGbJooun3oCM/8XJ9A0LZhGGL0Hnzfq7ZjiKzuOCMLkyFrC4jcei+CSGSM8zH2vnch1qtNWdamOBeVURBxhISEWju31cxvhZ4h55xGNVvSRoKRJWjKaPJ/pouIeJqIqBirbc/SqlNBIGPVtCoMHEKI7EWYiEIUrTKq/sbksfT9MIKxCFEF58EI0jRHGb1NQ1NEazUVK90EIA3P3mw2uukxRkPnZhNDR6lk8c2pRbd8jA8emwdmZetzpsljzl76cxiQRX3N6X6SISTTMKdH/oWzIYSAeP4zP5ZoGpWEmSamD/DUCwrT4yr9ankHEQ0OSACZAASXBNDEoTBT3mj0DSabVkIUnNr7ITsXcwjn9DODef4A0TwXy4x1mXLWkDY+v5mHZ+QKtqILImpswPLtM7gl1iljkvdsgc45QZ0PgwgMJaGEQbz3dV2TMYHj27dvf//7b773v/3PT7t7I9FZilHC0FFUAASBVRI0VCAic0xMkIjIpNhRzcTNikENAT1mVI1Q+7zlEm1aUMqE1vx7bVyGasMkQNFmg1H5YJTQ9R1ILIoCbcExomXnCArjxQcWkSaC/PCPvvfX/+GnT1983J56SbFqYLQksfC5pkXadxwbYMaxQS1mqV+Y6SFpNxEn/eguonf6PGPZs12WhYQ0+woykiQiHqPqcaqWz16aYG6MG46F7Mmcn+JUpr9o+FjOKqF6HHyEcygRUbIvpaFijGjm4mDiTkmNTEbWGCMuVKzE7tPu8BhWasaScenCUSJZ8pPx3wnVJ1jNOG1aWj5JyVRfmLKCZKGUsalxDsmkFuZcQoOXqjE8LEEp3z4aS+epHJbWlc8w805A/qvhnBvxMCGJwDmbehxNqZUAIHLUaPCcASa04bFRbaprP7Tq1k2mATcAIEhUP6HCIE27CewQHk6dBV49WR37ePtwcPZqW5cVGAG76fh6y3eHUx98APQm3u9u2cQnTioL1tonN1ch8OnUvX777tXrFuC2LOoQ4ViHm5urwHtri6Lw1Wpb1dahFBhuKvvR1Tr0u2MESxEMcIgRhYyyifOBlRAjxnMlgxkKLfENFj1F869y+k34IwudLdkaMokQEvyXaaWJHGBBtkv2cvFXMNjXLyS4JmzJc1Zns50tM/2Zy7uJlnP5ATJI2nBERgO2sA6xiAw9gIA1FXWh821nrCkJCvRPKvPxk6d/9vc+evHs6dPrKyMhtCF6hsgAxAyHw6npPBEhmKZpjoe9xEjW1nX95MkTV1RFUQmat/cPd3fvmp5XT555jm3bVjdr44raBheL2LV9GORvFRmNsaS5ZzGZkEy22Jg6suqdEPpj8MYYAWZm70OMg7znvT82nXNOW5bFobMc9H1/OBy0OKGIlGX5gx/8QJsuXF9fqzC5Wq1CCE3TADPiCgA0AFJEjCvquo4xhth37Vm27Ps+xN4552wJANZa5whRAMFaqy6RpM/AyLHzxgySqXD6AI4NFZLDLWGIflA9UBXCrusknBu1K+rhUIDADJZrQM0+K4pSUwcVRbuuS0Y9LRAqY0Kgcjx90hYFZ13mE18SEdUwq6rabDbJw9M1DRGpBJ64a1Lm06L0vTHG5H2CUdHVB8qy1PXGsd2ITs+S0bzQMdD4LEITkRAm6XeIjfTRusGgMCNeNUzoumjhWkg303brQkQw30QbbfCBxoKumsq4rmsOHhHLsgSO+/0exTtnHh4e6nVpjAEWEXGuNEbQGBOhrmv1lUGMMTKRNE338LB39aosS+0Rom9XOBdVqWdc2Xbr0QeuBTxT0xGDqIaAorQ82jR10xW8IhEBTVagm4jIOBnS/4b6tAqYge0gA4AhcsYiSt8Zay0gV6XzvgshCELSmEIILAwIxiKAxTHAGIDVGw+jjqfsOoSAMpzOWhfXex+jR7R9HxOL9qGPHJRM7JKx6lhJmk9HwrDHU6dWLjTIqPbkupk1RqaYsVTJEsYsD7AcsjC37mZgzQa5+GfC2scOhkcHkfkg+Xmc/2SkCs4hmR5evu7iOMtnZgu8CKL3fJuErfwByTTP2Q/TJkoeJ7bou5geVms9wHmTMygNaT94jgSmIfNYBGXo0iAiAlIUhfIFY0zfh9t3923Tky1iPAIAImhuAw5R+FJVRQih7U5qXHS2VOtg13UgpHHbMFZXkxiHqmI8TFtYRBBgKNk9gwBoeWiW6JWfogrNxhhnCgAQjpHF+xBZC20zkhSlK4oVAOjZYIwpi/Kh6wkd2GpVr9Ha58+frtZV05yQEKJo2DsAABlEQqChO/ZC+5rt12N4OKxOJm6ZdCFCarmRfpsOj4sv5azFXA6lZFvJoSdLD8z0LTMpcIEwF364nOdstO9CR7N1QdIDpi+CjH7zV+CoBCbVLkHGjEaidMgpqqSRE3vM7WWzycAiuDTjHhcIFrMeTfkkH2MOF+cwHMzZhuUDZrR2LjpKWW7hEiWWR8ny3/TwzA4IWb1WzNzU+kYyEgPjnFlpK7Mz2PNXIOJFYKjUEhEBwMxaWZxjR8/KTEQiBI9yEj4EXsVwjLGLvEVrnCmF6prLY2+Mb733sWstW5I+9MfmGECqarVZlfz05nTsgufudH/77kT27fVpc3W16cVdhcNqtTHbEg1H6USoFHhSF9//+AUj9fzu0EMDfURiMEJosmPl4kYsqfgiSsyuGTEuz81Ed7MBH6Po2Yf0ZG5wXP5wuRC65MnEzL4zuz817U3e/tgbcapwzhBpNrEaIhCZ8fsAJGgZjO+79WpN0fBxJxE+3Lg/+ejDH3380bMPtleb1aoyEKSPzjOf2r5pfM8saIx1p+a4u7/rmtYYY5179uLFdru92t7YwnmWw7E5tW1giMJCBtH4GIz3rijLsizRnGIAgjy2MMYUPDkAUAt3jYSMIwfLjD5DWTWI0YcQtLIFMyNLjIyIeb1NbUKgmkYI4eHh4eXLl865Tz755Pr6eqgKo7KftavVqrDW+05EtI2eaJqExoyQlEWdVJoQQoh9CKGFXotbxijWsrMAAKroFrZIu5OWLDLkdOn4uU7IQ5lTq0llAOeAiPNLx7XEGKOftK1Ko6mKotVEi6JI3sKiKNT8R2Mkpzpn6rpOknM6WxHRN01+QMhoyFBCc87pV6mg6KqqknKY/JY42vv0dTnOG0MzyOjJpToqnO2wZ614qKk+9akQESMggC5cdVoiwsDG0NKoBxmLwExjTy/iIa10gENmuJmbd3FUJdIgIQSDuF6vC4sc/GF/f9gdY0TnXNu2pZKCgIh0Xedcoaqgc4VxzopxDljEt633XoxNXMW5MjGZ8TJahwbGjizr9VotBW3b9m3btu3pdBKIdV1fXV09vb4qyzKZxsrShV5COMvtIgKQ2jLplkbIcgh1v4iQCIui2G63q7JkCYV12vql7bsU2mqMcYP9XI3CoFI5EbVtm/PtM4H4YAwhDiFaCQfSXqd5KiLZtBOYmcrSTqR34xh9zqO2kK6EduknOTtGHEp0LHd9xsqzX83jxGBQ5Cbxx/mp8B5xcIag6ebsseVXmLxIU2cdXFKQMknufKZeXOlsJu+Z9gyYSUS7+PxFhS2fWD7seyCQv3SInAaAheKdIE80WchCaJtL89pXHKc/QUDnnPc+RmuMO5zaV69e7Y6np1frIGeFNlGXiGjSoDGmLFeI2Hfh7u6uaZrnz59jtjad5kAASRsU1KKgImIcLjYIAcB7bzAxPkr4FkJUrZlF8+MRyCJJUdgY47HrEdEVZVWsuq5793A06zpE4yNiL0ju+umzq+vru4cDoAVgkahQRCFEQjJyKddutndnuE1xEs6kMTHqp8+IwFN0Sg/ELI80/5Dv9ezKX3F+7HFLf/r3Pcgvj8htaaq5ZyMnpYu/EhmtDhkc0lvyn+T85CLqziA2e2bmJcsXMnvRjB2lz8mZljNJZtYDIx9w5iJeAiFfwuzbNJmzVwcnBrIln8w/z6TzGaufQRKmGz2bUj4NGaUWIsqrDky2HiOAuTjIY9cSJRAxp684RoieH0aYVeVkooAAwk3gXd8XJ1iVxVUXqzKuCleU5ZrN6tS5U8Nd1/b9O/Hrde18h62NINbaqlpdbepPP/mQGfb749u9/+blu4emuTn5Q8+fQhO31kldiIdgELHgWBN89uJJH+Kpa8OhaTsfIwR0ZIiCzKCqV+L/7znj8itHiekPRaYGxJxL/EFo5+g9w/zH5rbkCX+QS7zn21wqna30PaPlT+YIvHzAxEiEgsISmFGcQUSIXBtnQ8S2vSL4oxfbP/vkoz/68MUHN9viSeWcs4ZEgAorGkUZuWm6cr0Vkd3D/t27+8LC0+vNalV/+OGHhavKsgQyzeG02x32u2Pb+z4woSVrvA98Oq3LqlitrSu57wKDyqxjPZJRGBsL68OEJwguOBgSIiKP9VS870LsRYSEGWwc60mmS4MY9edd171+/RoAtMiKOiISg3LOQVH0vR37Hw6xNgNekRhyaW4hhN6j914YNXrWmEjkCxfLsiyKEhEJJ43v077onWTLVs3KGOM5Jrap8oxqs1rBRcZ+g+nf2Idcn8SxxaL67larlaof6r/iLJESRntEik0dz2LELHAjZBVxMLNfaEhFHlqi0yisTQ0hkioOY8P3GKPeSYdIot9kvKPRZJmAlkABAFr8BjJOogshd06DTMmNAAA+ag4hTUM/RITH0utpa4wxImc9QnlDQo/x+EA898xQXfcc8Xf2aoIQUQjenzcuVmXJXvV5kahhIFC4qqpWm80NAHgfvQ/GGM1kdFU9gjGIYIySqoz2fW/MEGmSEFV1MMWWruv6tm2apmma3kcNmW6Ph+vr6+vr67qui6IwBlvx6q9Lnlj1puYhvme6y45jH4ElhD4CC6Kx1lq7Xq/XgYc+kLrR+q8aCHIsXa1WaTt4qIQUAIY+MSLi/dBIUwS991mXEdGUKN2RSaRQzgcVUdLZ8B5+mjNfzEJ3EsbHMbn/jFWDrDYfFhEBLjP6pBC+XyaYofXsKwDgLATrPQdPAgUtkvdgwl7nP0n4BNmWE1EySi1/dfHOe5Zw8Vp+JXKB9V98Yz7/XNBceo3SyAkCMWuPkZb8/i0gMAgMiIQCggAECFFYRCKgtbZt2zdv3h6Px+dPbiAQc8qflpFlGCW5vu+ZGxGJQZRIBpKIqAGZqPEeaHwfxlkQgKAMG8QoMIaQjW8ZdjzxTTmXaxPkwS8BmgfvCBGA0HtvbQkIbdMdms6Qs7ZyVdVHLuqVYXv/rvnqd5///Bdf3O/2neeytFpcHzCOLbIv5xDOJJvlli0p4tG9m2opy5FloRPmjy1xAKbYhTikRM6muiTz/IEcnZYLzOcwm0y6mf/2Aso99lj2QkxeKZ5MLB3eSfmRzDyRLl7k8ev9nDHmm3J+3eU8rglIccqCaOyblHaKp0aEGUrkWDFaKIevdL8S0PIN5ayPfH4/zSR9lVY9w8OLm5J2Ybbjcpac0usmMCEiRMBplVFEzAXci2xniaX5V4w4M6SpsSqphYIQkVgYmG3nEbks7KpqnXEA9qouihJXq3VVHelw7Pv+XQ9s4ipSxRJjlBgIuCB6er2W8MHhcGq63+978A9NE3jX+81aInsDLUFxVTsywN6zD2tnXlytT/0Tb03cN23TM0SAPvdezjY9v5Nh++Uj4zHEG2InMiYw4xWPEe9jLGWGGwlXL3KVfNj8+Rku5etNBJX+zGeSI+3F8fNVPPZn/urhX0QApIKsoShRgi8NUNdtHf3440//sz/67NOn2ysHqxLRIkIU72MIMUTvfetD23kgd2y6/X7/7mEXQrjebp88ubm52qxXW+U2TdPudvv98dR0bdOHPiBZU5Zl34bT6STObawjY6qqijIciBq9plff94BmiJqbUEfOhQblyhCxxNEH1Q+RacOaWQAi+8ieJeh/IfaRPSC7wgBA79s3b791hWEJn37yvdVqlfSlzBMy5P0yM8pYnDkCR4BRabHWWkchBGE8Ho8AEEIQicGz9965HhHLuqQxODZxQhlTVFQGgNEDRkQaVyRjLRz9lXNOi4LOdN0YI8SzEA8A6hVUx6A6LSeNCmBQxbuuSxppHLMoZ0x4YE3nYgSTmG3VSVKzB8rlyLHGScrhkrE0BjNrvmLShLUlHWTZ2vqn5ozhqBundJi0QWmqiCg0SSIYHkAEAFtAqmIyO2hSRuT00JnQWlpWjKnYzFlU1h9SFmyYFELr7Ha1ldh3hkCecGhvb9+0TVOva++DMVDX9dY65rHvhjGI2Pd914eqWltLiFJv1l0faOyTxFmnkK7rc9aR9pfG0Ji6rldVtdlsuq7rfXs6nQ6Hw7t37w6Hw36/V7Vwu12v1+sYi6IoTqeTxlTHOERvDrXlByvNAMAQmAgQsOva42nfNSfgYAy1lVYtKoqy0BhXZhbBrjsm5TBZMSRLkRjfSKrcuXpoiELCiCgILFEQ0BCIIIIwC7IgoAGS0T26PErTDj3GQ2eMOOXIzUyVIiLwyCGdjTYRJh7lxXOenvP9XM5IR9Rk2MV18eYMCCogzs6YGWTy+Rgzqe2eZjU7CC8exss/lwfbxQnnz89u5tCAbFtTyNns3zhWi07PJLZ7cdh8SheP6jkKobbKARJA0rAuBqC+78fKzjZEvr17OOxPzEOIthqWAM4Vtw6HAzPHyCJIRM6Wm83G2sLasfgVq4XmbJiYwUc/++ARkZVRik4SRaSua2AZIzcElI0iudIwBxyqi47yCsP+2BZlbUwpaKwxpigRiBl2h/brX/7+5z/7/Bc///zubn/79l6wuLmpT+0JAIjQgpFzKYsI2Ua/H0lwtC3N7uil+5j/FhERhMYo8RxLZyPk4+RWqNlW5jpnOmli1ifq/N7HKXGJSEtGlJ0ul0tW5Bh+EVb5i85oKRMUzd+Szzzhz4wGdTTO8pOXk8nXcpF48z2SURPTD+NX78OEGfFilrQ8g9gSo8b7y0lNoJdmNYPJ7L2PMajZnfTbBLd8wIRRomeVnA+UcSEMMrG1GRoc+MkGr0fuTCCbrTq/GJa9DM+lR5m0+TAyC/kegJyluigrW5bG1xVa6+p6va5XhbsngF0HpuVtG6sKLMUYeozsCEtn7POrU/fxsWvjm9tdI82ha6J/FaprsVgwFT74g0MxKEhYGfNiW4N9YcqKzC3DYdf7PvRBqscQO19j9uejCuHs+RGfJ6fqco8uovHsyoG8fNFFEn5sbhf/TJNZvlQ9JxfpOr+T0/jyZj49mR6dSCshAjREBg0RRPK96doVyfeeX//xJy9+9MmLT59vVg4dhapyHqKwcBDv+6bpT6fueGjatjfl6s3b29dv30TfP7tZP3/67MmTJ9fbtRCJyKnt7u93d/tD27Yxiu9DL2itLaoVhVPf9nw8Mtmu76/Xm2KMkTPG9H2vVTr6vh/KKg75b2foZctkcy6acubDGk6j0U4Ck1oyMCozekf1ihhj27avXr1qmqZt+mfPnj19+lSnxMxKXBp2OKgoPJb6I2GCdKYTkQGDekwNWhz3/dChru89APjoU/ynqme6LUluSVlLg4+xGFTitIqEJ8k6lmCisrqWDx3DDgeFUD2fiV2nCAvO8vTUgTPkSWY5+bmRncbkPYVAsuu1bZsQ1YwXagV2Ihkb2CZPg8nKCsgYWq9sM8fzhMapHA7m2mY22hnhaThEkitPAxZHVQZ4EVw6IyWYnhqzYyJtRNr39OrhgfGRtGsCgoU7Ho8o4e//6Z/+1//V//7J9frf/tv/5d/923/75Vdf7na7/X7ftm3LLQA4W5Zlvd1unSt97AHQGCNAQlAUBRmXKdogWbWhXH+GUXdNqXPW2sJaxTdXmLqut9ttdzo2TXN/f39/f//kyZNnz5589MGnRWHrut5sNsfj8Xg89r7t+94YgygJhmcmI+BcaQhC1zVNc9jtgINz1veFMWitNc6q772u67Is1usyhRCrR0RDPY/Ho2R1U9NCjDFt2zOzVsFRrCtLxxy0A6RkvY6NRZuDIOeGqQxOziWZGRfGP/03kRZkBwwzOzIJALnyiTjvmDsMO+Xz7zk28tNlMkL2bY6gOV7ClOPDFCnzDwIC0yX/wXMxgTRRxVKcSnOIi+T+5YTTsLKQepdrnz2QiwuztV8cLWcES+vs8ppBdTl/zqKF9U8CEUCAIZEZAFg4hKCNNYkKAbi7u7u9u0+xEETWGArBt23bdZ1A3O+OVVVtNtu6XltrhQc/XjLU5TwLOS9yQACAMum8NwMFInZNm+4QJb0SY+yYmSVooRoGjMwRcHv1nCMKOOuKyHT3bv/lV998883Lv/3FV69fv727e2ibvq5XrrhGPL69fdhsKjZAyADIIMIAEpkB4dFZLWFOWU7X7JF8hPNKZUgZmyF//qLlh/xJyJAHLmHp8vmLf2Z3/gA+z9548XWP0ZGIIE1mm54XEZgaLxP/eWxFS9rJ79A0LCd/jLLArYuLTYPkaky+rnwyytNTX8GcAJWtJwaLWZCnMv10Fp7txzhPa8Qs0mG26USUDBb5dsyOidkW5CPMGPISMikHRgadcFh4yk2C7F3vOR3yt8944AXgX8RPAABgEAQIgIzQAkGI9tRVtN8U1cpV69rXVVWW5Xq9ruu6PBxuW344RGsOIBJX1iHWtlhVdeEKt64++uDJQ3NsIbZv7nYt+I6/4NMLslL10ZgKQw3hel1fr1fI4aosnXMkGENgH4z3hyAPJsFgspw5bs/X8ei+zKAEMEf+2Za9Z6OXk1k+nGTQS1O9bAibPfb+TZ/Zo5eD58ggo4B+8XmYQlXfG7BiFiExIIYDxlByqB1/7/rq7//wsz/66Pm6oBXIqnTGWoYYtdV7EA4xxhh8ZEAhvL17+Pb1m93u9Ozp+sWLD54+fbKuK2fp0PYxxv1+//bdu4f9oQvS+ND33pQbMrYoirqWJkqI8XDYNW1bkKnXQ0yjplS1ba+uCR77mGksnK7COQvnvR672Msk5IElxBgBmAWEhsornKJvsmgIvaMw77ru7u6ubfr7+/umaZ4+fbper1WWGFIHUwmiOJZ/EwKB5CxKLAth6EwYAlsbfK8yrjBz27ZaG0MFBtUJE5fTLEEZM+hCCHaMmkv3JSt1kQRoGGsolnZwyCSQquaZ3Ia6ZPVGqgaohRyVJ+sqEhPOUW7An+ATallrgdCQEZHdYa/6p3POEAJhFAYBjEPn8TBtW69lhHRk9R/qmN73+hiPnlIai5nJqPTmTDuJo4kwtZVCKiKIOJyIw+ESz01HaBr7+l24cYqlzI+MnJs559THqU5mxTEE6Pu+KJ1Eefny5V//9V//p/+r/+Qv/uIv/ul/89/c7+7fvHnz+eef/+ynP//888/fvH4dPBtXVFUFY5UpJCI06g937lxRM6E0AOaK/YDVaI0xPnQKRuecxJjsTXVdV1XFddV13eFwOB6P+/3+cNjtH043N1fPnz+/urrS+kBd32jQKXMY5dJzNf5NVZVlKRza4yl4bpoOOHjvg/fqUQQaSowqHq7WFoSISP9M7S6rqsrRMq0lxmiMZ0ZjXNpxLRJJRFrgI7dNWHlEK8hxJYcgTc/v9Ntk/klbewb3yEzzjT+jw5z/Dh7CNPJ78Gz2TOLa+c35WbIQ+PAPBW4laOQEkEg9Hy1JYHFa8kEekZuXMJitKCeb94Pi4hKWz8ymvZxMkmhlDM7EVPx3ISWISHJ/L6WB/KVm7JNzeVZjbEDbttfrmojevXv37bff9n1PliQTvoeZELx48YKIrHU0psmr7BjH3kEXV5cvYZDgM/cAo8aOEiI2x5OeB9ZaRAM8+Hj77iQiWksTUOtHE4gJHrtO9sfj29uXv/3q1Rdf/Pb3X796uN8fQ41gNpvPyio0x33bHa2pb64tYBDuBSJzGEuuIi7q66YLsuiFHNkubgrAvD/B7BnIMHa5Xwn3cJoQn2aCCwXssUFmP7yEzJNh8yld/KFGhLwH25esZig8MpXtYowEl0BHl+PDZ288T/hSYHZiEfkOPob8FwG4fG8+yNJSkx7LyxXkS5ipXhP4XOIzOHU2wpTAZ5bdDPGmwBmLxywfm+FtvuP5GYFjg5i0IoGolD48OT6TZivnk36uAyCiVnVTN2O6osjSSajPsLJDwggUAJoIpvGlHG/KVUV2XVSGyBizqVfXm/XpdKDGHNs9wKkgInYlwrowDqGw1hbl1Xb16ccf9CQtcvf24dTDmwjdqfUPD8fQXFt4WlDpaF1aZ2xhDIl55mxfr/E6Fh7v4v6A53M2J5wlIi0pbr6+BVbnuz/bMplqgxe5Rz6HfDtgQRT5b3N8eGzOOVbkE4AFBUl25feXucfL+V8ceUbXHorAnpAREGLvpH+6qj7bXv/ZJ598/9n1tQEIbSWrwpke4rH34pvYR+99CNB57nvfdV3Xhm+/fd334fp6/cknn3300QfbVQHg27Y9nWLbtrv94XQ6HY/HY+sDmEhmtapVsamqaoum8eHU993pdE9GWwWq1Lher50rtUte0/aqrhCdW/4Q1ZiViRLVx/gssKW1M0dVfQHOTjwY9ajctJQ2V2vMhBDatj0cDioW12U5VqZH1FAjHONILcYgM+gDDEYMYwyRNcYVTrVQjjEGDvpSVcPU96KqFGWt/NJ+JV8KjYU941g2X0PvUq6XMaYoiu1qo17BPB8Sx/NXNbFc5j4ej0n7Sj3lkwLGWRbfAC45c7PcD4bjaZ4Obn0As54ZuV9RczIVZrr1+pNc99bdzH2JOJYCkVG647E7wDA9BMq4CiKSMTaTN8jayJMs8VQoNSUBpgnT2IdQRtkyBcRkDOGsmyGic87LubuGajgWyXNEEuDwu9/97pe/+On/41/4m5urTz7++B/8Z//g+9///j/+x//4//Df/h+7rvv9V19//vnn37z69vbt/X5/7HxkUT3QkSucLeNo+8htAaoQ5vwwZ1zzHUFEOn/WzNKu6+7v7x8e7l6/fr3fPyjy39zcrNfr1boCgNPppK0+Qwha21DHrOrCGGyOJ43/jDGiMDOMObeqso0sWsgWQZej+KnFFNX9jlqFdSybpKPFGKuqGj+Lpg4iojafEBmqOcIoWp+rhuRXQinMxBr9AY2elhknVaUz0UD6NsY4uuInlmzvffr9/DzLsmvSr5g5N3bOTo588zhrtIJT4ZIuhbRdXM55zEyaTBizPNsyhnuOVkrKFU9CoeZ+14tXemz20kc8QhMBCx8XBXJEf8993euUkJqvNJ8eXjqeZ++d/UlENESEZLGOgBrqmbr37HaHu7u7PnBlSGTwE2rVsvV6XZQWRHmcIOIgug052We0oSGSHBGH7reIOBSpT5sYJxNW+RMA1uu1rqXve+DzeUAG1GqhzwKAhpN9+eWXv/rN7372s9988/KuOQUfSJiAbFk+OR6Ph70XiVGsdWuipmlP9daZEENUiIFA1DrCaS546aLMKZTOD1gIQ2m2OXdDREDOz54ltuRYCpnoT1lYSHr7EhW/C1bPHsvnv0Sn5a9ybPwul053ifYzNE2DXxx39vPZBPRbnobHpK9mT+YejPTSyWynWh9kvCufap5DmIZi5tTvYUaSKYl84jnPXpfPNmdouLD+5jSe2NSMv+X8MJ/kGc7jK2YsbvZkDp8li0PEyHF2YMu519Zc+z2DGhmmMQKLLc9CeRFAAMiwEQjQMjSdvz8cN8Ye61VdFNWqVK/CarWq6sJ3Xe+7PsYYrfchdH0oO+8LU1ZlVT57dt0SPnTdu+bQcmzJvOt7/+5d7Et7s7kqqj6E0+F4s9mg94RSIT5frVGsZarBfNlwzKpAJzCO9ceXmtLlNc45w+OnxkXEnkNrahbJ9262fbNtTQ8kQwYs6P3988epUjcLAkxP5tQ0W0hO2ul+Lsbkb2GyAIAGjGUjoQT34tnVn3z84Wc3V1cGy+iNpYLQt10LMYKI923Xno6t99J53u+7dw+nh0O32x1W6/UHH3388ccfP3lyRdz6tvV903VDiz9F47ZlMezWpRYzBICiKNa2gK5vQ4jR7/d7BmJmVQLLsiyKqqoqZt7tj7vdTr2Fae1d16mhk8ZIUWaOPLihZDw7EmbFUWOZMUCtB46IMWuGJCLOlvv9/nQ6nU4nrfF2c3VVlm673cLoZcJUaEQCR9CqAYNPLwbNITRD4zVnrbFGz3cJIQiOtRrHICAeC6ioVKlCy3BWGxN9H8fq5fnBx+MFo8qk0FutVlpFRi8aW8/pr1SZTGpkApeM6Yip4qhM9T0ztgpEc+5pDKNmEkJQ+EDG2Gd4npw/Oa9L4ysoQgjqAdOEw5wKFCVQVaNRSDbGaPbjWR4Y35z/PL0xxmjJAJ4rVaZVF0URo0+UwmcX4pm9G2M0HjY/I0QweS/TShMVD4QgpnS267q6tNfX1yDh9avbu7vbd7e3/+bf/Zu6rq+vrz/68OOf/OQn/+Dv/6f/8B/+w//i+ub/9f/8f798+S3Du96zABljXFEYshx8DsyRuZ0zXYesxTBM2BUmnWKYMr9G6BnCpBY+efKkLF1z7Pu+ffPmzeFwePbs2QcffHB1vdEQEoAUdH32Q0b2zDG1srTWOuOcMzF4ZvY+MkREtKZwzhlret/AmLmqc7amUO1OQ51VUVQ1Lwl7IYTj8dj3vaZXJEQVEWMGYtSbNlFUKnfLWZsUyRQYvSlZ26v8sE/UngSXAU0JiQgQU4w6EQlzUZYz6WRk6GzIJBSccu2zoJAd7aqx6Kr0K+XsxKJRYer0IUQSQOB4lvrTezObpSQhWOUwQwlBlXKYOYReaWbUl1hRCnDQfhNmw1QKnB08OIbmLsWjXNqeHqU0G2cmR+UWOySJISCYFDCZYK7EmY0DiOn804ao6gjSnR3MTtOXqdIIcEkCSEYpyLwEiAgSWXtQoNEZigiKIEZHYoib48O6rmIof/nbl6/enX7yg5X33lk8Hh+211uHLCKRoSwLDBK0Phh3BBFRrBWBFYAACCGBEEcgIQEw5AZVUdKBhCJCbSEQrUVbkSuAwffchNB7YQESJgZn7MqauvfQNT3X1yFEACJbxUAvXz78/Ke//c1vvv38i6+DN72vg5R64kTsOQbT3wP4zgdjDBr2fQsm2LLwPRtTF9b67hhHMyczG3uu7pWr5UuPXDoMMjRAHEyAItPze+TRrC2eEsuGqcA0Q07I2gNkZDhRQmC0GSvrsXRGquEnkRkvK36M59xjmFw8ojEhppBzEYlE5/MynYiYFfLOqWY4k8aidmnyg4Yj5/nkNAigGJQGmtTpZmaWYbGCJOGcO22yms6J4pR7ZOPLDHoJ/tkshlLUIhKFBQFwKHYi439n3qvUK8O3UZhByBo90TWytHAFM4uyRi3OMq4SshJwyU4smVMFsxJwwwM4mEL0AyLK2FQWEQGHwkIwZQtpsTiKVvkdHAs/EOWtFNVEMlTcjjFyBBzzZ3QvrLMaKw4Z8xmASGfR5Ly5Wf2d9xgVUvPDIqwZQiSIHAKwsRAY9gHum35bhbf7BoSfwWa7sc832Ox8uXlz6mG3g3B7auXamw+g3faONiA3pqlX/cbIp2tyH39UNfjr0zc/hwYJPMB9wxZQfA28lcoGwXVlNlf1qjYh+Ku1PTk+GP/pXff2zUMXhQECEBZlCIJkwZYiIhL1kENgRDGA4ewGTzurMRekPboA2Do91LwxZB30fatHXQwMhqyzzGAMsQhJRGXi6mAXCnGMNBu2WNHYjD4Zx8wxCIsIoTEmxBMiEtpk/db56Kmdeqwnu16MF3RCnFZEX6IZLFgKjR7LZOEaNSLGqaShV4xRGW8SnfVmWB+haZ+g2x67K+H/5LOP/uSTj6+e1rEIDzXtjWDsV9jdSL3uvN0fbyu36+T2GE899tHdPrS//+bhuO+fbN1HHz7/4SdPr1co/ggWA8i+9d82Nnj39t39/bsdoikLB65aba8jlFhtPLlILgKjw5vrsiw67pvQNvdtU5b1arMG5sJVGmZ2fXVVVZUmWWizvT740WjfawCk1rsHUSHVMwcfOnWghRBC1xvjPJ+TrGCMwSHtCCyCRHbkdYToxaPDKPHd/e3htL+9e/vhBx/c3Nw0TbPZbFZVrX0dLGpSViWFMcZZW8QY+75vWxL2QKC1UryPdV0XpYkxhhALgz2DMQYZiSeCEAuEGCQIjlUxiSaYycwpwU+v1J9QPxdFoargiHhRjcjKwMuyTGU8UuKWiFAUAyBRQh8MC5FhhND3RWkBZJCpMLKEqL6lUCWk1QkoKmo5mYHLjYquiMSu0flwVgHF+xZHVRyzNkjM3HXGjBVoFA4KJe+9hjvpOKO+bYwrAEAGZYUlSyUVEa3dQKSJ1AzCPnDkAQI6Hx5bVua8Nc0ZKRIRoFpHNHMHmYfsnsTG9SfGGOFCrOljiH2MkYELQ0BsxYt1LnSRHCADgbHOQGQUDn339vWb2zdvf/bTn/6L/9v/fb3efvLp9548ebra3Fxd3bRdzyCFqyJg0+w5UghBHQkklqOGccaiKCCy6gtFaavCDEe9sSKCY1BuCEFiRLQCJkRgMmScGtrRYFlbhFaMC01z7EK4vT95f3O82WxW63VdGFuU1hIZohCC+J6ZCXi/f7i/vW2bk7MkVRHZh+CZ2ToyjmKkGGMMfQw9AERuQNX1YkjKbdtWDmJNUdQrczwJozFus9nWdW2oKgwU1q4qu643PnTedyF45lJD2L3v1LrBDAQBJc6lqOR2GM/URc/3TBiVzC6o7sH8SNa425AVL8l5N2TCQboGIoGzFWfEy4mHEHNLj5ylRbwUSzZ7C2bSJSxkiHQ/F1PSn/nDs58PI2eBssvFzv7NVw0L4Wn2zGPzHz8kARrTQphZxpJTInObPU37Os5mO3vFxT/TUDK1MStwkoliBkyi5HIEREpMRIRDCCjS92FVoYjc39/f7R4APgBgDcRQ1qw4ud8dCYzKFs4aAAYUAO7HfmX6b1LErSVgjDFqsRkAIEBEs75aAzKzD9Kdmj5AjwasLWII1tUCpu+gadlQLMv1+urqEPvA3bev33zxxW9+9Ysvf/f7N7t9iN4JlBxt1AB8A4ACkQGFmeu6Nga974+nvbYDKqvN4bDLWWHCvbQROSY/hgxwCaNU7FniPBEJTPQimOJ//u9s2ByvJgh/6dsZGSIi4Bx5xl8N4ZwLBJvYF3IEm+LSOYRhSfgJFWG6wASNWRLzbHWPQRsRCUdRI+urJmOg+Ay8032R1IhpNrKMOthjX+UScD4gTAlwormNBr5zAt7URpBAnW7mdoecpdAYrpyAkC/wIls7w3867RyS+W6ONyejyRirMsyZJ/aIcRUTvp0t89GEt7/LlfFwARQCYEY4dv2x67vgu2g73xdBrLXPbp68AOyOd0eEnmXXtvTwYBwWZR1CY6ko7aosKrtxRKveAxnz+vUv+xNwB533u/6ALTuIhbkOMQQsDLuVK5+sVzUQ2jJ6/2WzJ2uQA8KAhwOGv0e7XVw4KkIKJRitGEQIgOpfQWOJmAfYRhGCGFj3CARYM5xYxI0wz/EzamkTxKh6PchgKxnQZqx1KaOCOiJe8mWdHQgXN3FGDumZnB5zAsnTujA7yjnzBMICk5fYW3H0fecK+ODZzfeur3740bNnz67LUsgxGInc+557ifteoPXt4XQb692+fftwfDj4+/vTt28OIcCz6/IH3/vk049ePL2+KRyJaG/0yCzRh+Px2LYtABgkW5RutSnrWsoKgUIIYg0RFQbZkAiKgRAGVUEVmM54Rri6unKucEVR13XX9dpc2/o+9EN42JiErOZy0Z83TdO0jSrDiGitTVV/FbCp8V3eajXfl0SwKOC9v7u7a5vm1atXH3/w4Xa7fXJ9s16vi6IwxdBGr1wVqXkgjtl3xpi2bZVxJRVO57Ba1XEsDZriJ40xkvU3h1E4UUvWjPPrFcfaG+cJIyJi6ueWB1KKyJs3b2TMn8zdA9IHPOf4MRhSf0bwrEXqEFGE1C9CRCEzDsooNscxOW1Jp745zRAycfikEKp2R1lAEGd5nkmkpyzMR8YGjAyZKAtLZw+HrEs7MzuwLOeg2dxvATDv+JLAiGAYz25kGcPumFkJPKNZBk1cRQQwiAzIApEZENE5U1Uu+lVZljF6z7EoipmJ53Q6/e53vwMwZKsnTzeuKDvfxyDqBlfX8hDQGzV/NRnECfGc3YdoRFiMSborZM7ztB3pHNXFlquaEWBMFm3aFh4euq5p29oYUzrjjCFE5iAhxhiFw/39u7dv3z7s7ruuYQ5kwA5e1vO+J9BZVzMzc0jdX4iI0HrvwfTWFMygwcUP9/sY4812peVpjUHnXFlVm81mtEtH9TSmRix930/QaHb8z1ae4J6zyPxPHAMm05PW2jgNfIJMzUvHQw5ctVcl3j1FuAnfgSkzwkw0zO+kzwmyszcmXEwzzOGQ7GEDyo9Ly2MGzhQ+LZwzmzMuBCbILJf5YzgNDc2PJcqKZMD5XDxbwXNXhlxsOnRJ4nzPhGcbnU8sLXy5rhw++VuIZlK+pvQM0rAlo1guALe3t69fv2Z+rmzbuKEsmLUFa9oEIKIlwUFO5CiChsxQPVLSSwMAkJA2D+SxrjojIsaH5mSGKAYQawgrRg5ihNypA2Z0dr293sZAt3f7d+9e/+u/+ZvXr19/8/Wr3e7AQoRViLb1XDiKCCwsABijgGfxMfrCUNedRIQMbLfrJ0+2Xd8ej/vr6+sQ+qaZqPFLN9oSJ3EqsuT4vNy7xdZPHs43KD8t8rfMVPqLv5KsNgNcUmVzrW82/+wkyAW7M97m7jU1uM6ofonhMDXZ4CWNhYiA8zf+gYs5NzSMTnIAM4aIZxL2ZBdm00vTmEmuZ53ngmIDsFhguvK9wFEhXFJ0HKvwzd6LOCHtx94yW9TMsjtwv+yN6SYviv0s8WS65HmLVxjPWgCQMdhmqg/MVc3ZnznOfMftThNjiIyMIAYQRA1MyCKnPu7a9qHv6uiqrreG68I8ffLko2j2dXNPbROhOx4eOt9Kz3L1tOKSwta5ypRl5WiN7fNrRHiDzevXb++ahgMcoY0hWGIgvloZ70IRSgu8dqYuVtz5Y1nUdW2thT4QEZCJWqZrXqBtuBjhkYjROXwSk4+hFwRmAO4FR9+dCPPQeo5UYxdBEu3KhIPDeIYAIKKNB4TIDl3HY7QOAEAgghjtBCkSAWikr3mw+qwErCxyLqZ844IhBjJuMBsq/3eJ8BdRpW6aDcIH29Uffe/jH37w7MPteluTkUAmxuibNsRTdxLpKfZ9357abzrz9cvbV6/fdC10LRx7eFrBRx88/9EPf/DserOpCt+f2t4ngSwEc9ztu6a1xlRlVa831fYGy4KLipmbpqESTVGIACEUxqJZqXNJPbEd+x5DBCGyVcVlVTlXWOvqul6v1z6G3f2emUPoR8VGiEhQUhUKM9YdDUGYfVFUEIc0PBkVwhywCVaSKdgpvky7FgJAd2o2m83D06dPrm+ur6/X63VpHSIKGSIqy1INvho4B2MmXt/3+opkXC7M4AErimKsQI6IqF3sdRUwHhzJvKWPxTGvT8bqLDIGvqXkw9QvLjElZXc6sVwL0hkWOEaCAEeOZsxnJnsO6ffseVTefBh63yUtTgXmpmlmuDrAc2z5mPxpOodU3YRI0/BQ5JzklZyZNF7WWg03TZxT3xv4LDeSOefLJTk26cC6cB96wCGtMYGCiJxzg7FoZrflKIAw2ndG4f/cyRBg0uOhC51ABIxACEZVQQaJDGyCNWSYUYSjBB86EXGORAbLRdc1RVk/f/7B1fYGjVP0I2uFsfWdptEdj/2goQwpA2QMZYmXzhhEMCNsrZizfjTcRMSxEMu4omHyxhj1xIpICCYKA3Df9ywhRm+t7Z0pNK6QQwhBQuTY7Xa7h4eH4+koEhEFEJSrayEIHk0yaZuGg1WEOYoAIBlruq7DELS2IgCK9BzVn+kRAQGMwbJ0VV045wDYOVsNYdG1IsygEKoTOSfmhCuJ86Y/dfdgIYbKaODBaRLdLNI61x5nClXCFRjPsPSrTNqYhHQO77VFTsAz/r4UBJd8Hyf26YnTAzIzuSxU4tkgj42/vJlLQssZzmYimZY7e0v+VWbYOKsKg0doCHuepFZenGT+ltkzueKaP5b0z3Tpn2qthCl3gKnXF0D0JSpZMAMajDEIgrV2t9u/fv3axz9hiCGidYbZGyKDFDg4WyIbdYLKAAkLyEar5CXpREQ1wBgbXQXZJNCzAKyv677vu77TLtJERR+47X292hb1CrBsm/jlr1///Gef/+1Pf/G733390DIzR2FEa8ghGmYSAjBWBALEGDvAgBQAopgYGaqqMga9933fRiaRaK1dr+umIS02nQCCY2R1jgMXcfiisLK8cskpwT+/v0S8i59nA+bKT/7YxTEv3hnw5NLN5Z8yrYmSeBFMJblEDmliCSWXOC+jlfI90z4/C+eeWgBngxUD0Pij2Stm8MmpA6al3nILy9JclZ9G8Aht5iRGY9JLWppGauQ8EzONFMf6Q3IOxLjArxJLN1k/sYuguzjVfGdzPLykRV9Q3hIHJjr7MMebF3LLvzuBzH64nGrEgCAAjMIEQEjEICAdx10Id21Xd65w4MgXptzW5ZNi83x9c7v+9niSYwzMMTxAiE28rotYrJGcSMUM5Grjtxv6yYefVZ5k//U+BAHoRd60p+Pb9rOPn4GYWqLpughSd4FPJ2pbMpaH+OF5gVZEBEA1rsEMqIt9yZepn621dV1vN080EskHFhEGjDGCYNd1MUbRNt8sjIEEUSBwbvif8C7dGhFhVmSOInHIrTi/dzKNNOeMWGS2IzJV3mZ3cgTGhSUXLtFOfvMiW4MMz7cn/9EHNz/89KMfffzs6bZ2FJ1BEwV6z33Pxz62oQcIIKe+P7bxV4fjt2+Ot28BBWoH1yv45MMPvvfpJ8+ur7Z1YUj6GH3rW982bde10bfheDyyxE29Xq82681Nsdl4JC4KZu5b79CjKSILYhRBIg3FKhObE0EQPhwOfeeLrjdGGYCztnBFhUJh6HPdhhA0nQlRc8biUFHTDCni7AMRdcF779Vlp+7BQa+YUr1eAzJEliSQAADA4XDouq5pmoe7+6urq5ubm+12W1VVHVlzn5I4oVumiY5xbAmgvf6Y2fvOGGPtUEZS/SRhbMWWFNHk/VOX40y9yWWhhEKq65qRrFThSWHzcWwskTRnXd3N1ZV+e05IMyTCNKYoRxDEFOIuKIACBskgESABAqBBij7kjr44XgnCMz8Ka77ZWGQVRqE6lRvNnYQAoKGw6UxJuqWP53wxGCNfYHpg5ZbEel1pE5MEH30FM89CRvVf7+dGRkRDhPlJh5mH1pghgwiYWSJEBmAGIZAQ+9ixgD21Td/3MQSAIfFHUMu1uhcvPvzx3/vTFy8+eP3mtm37t+9uEQlIsydi4NilXGtAGXKLhlNcsz4AhUGEmUCICM/+w8lhlwsnWikooa4xxpWFcSaEEKMHZBEMHC0SudI6hzIENgph7GOMUYC1eKFIjOyj1v7NFEKDAzqxRN0vMiZtsbWWGUQ0GhaIDIIhImsLEdYsGxbR+sMiEtkjymq12qzW2mDT2qIsVmWxsmlhubk33ZmdrIpDSfiQ7MrzxPTzQDmXRISL3Hb5Z3pRwpwZpooIyfyCxaGejywLc7JkElK+2fm/s/v5uXVxRfD4iTJbI2dxWWl1ynPTT2ZbkENGRiE4MYX85Bs8QlkY/SUZYj7biyfiBTLIPiy3eJo3lYPoPPhoI2AACEqohMEP6ci9999++7pt+tW6Yu4REYVUlWPPCAwMIASACGRosFVH6bP5sAgPUYNnoQF5EJxERO4Pe1fVdr2WwH1gZOvq9c129ebdwzfffPOLX/7ml5//9puv3+72J99HAIp2W5ZF5VwI4XA4tG0LQtYWPrYsqH0oiBiNIAkiYKfGdYzsjTHOGWaJEQ+HQ9ud2u6UQjLU3jEaHCFtfWLiM3KY7chFlEsUMTypxfEuIUB6JkfRHMFwKvOl8ZefL165Y+r87yXZXTIxcTY+LjyEOaCWK0JEeMRFokgBl1D6wrMZQGavi1mvzuVallOVxUsTyef7mwQaxInTON+O9Ir8yq17eoemfURzQBGR78MMAhcXcnFROWnPcAMex8zlTslUoEeciAuzN+bRK4h4se/lEuDf/ZqtnSmSAAobAUIkRgCICGx4F2LZtmVjKweVsZvSgLgnZfXh9c3bY/8u7EMfI5kdR3noHAfqXMniWJgZS4cIaxc/Xl/Z5wGa+Lv4Ztf1ASVyPDa9OxyhNivvTd/3fWihafdNOLVNb+Ig/QInUweiJGkMWWAeQCoyv0dEOPJARfCyLDebzWefPK/rOgbpggcgH0LXeRjLacQYgUUgQmQ9aE5ZyFzXdSFEEY1YRwDQLjoiEYSMQWMsQJQhd5dBzh2pRk9goo45OqWF4EInXO718oH3UM3ssfRtPk5CuScEf/Lh8x988uKDJ9u6QMuhcCTeN/tj9Cy9CBd94Nu+vT2c7k6nX94Ke4gWnUhR2hc319/79MPvf/zhpnKlQY6BmX2MXc9NF06N3+8Ovu+dsavVarvd1qs1FkUfIpJlyIKqWHBoDxABRmF0rPahHXS9930YVBfnXOEqNOTsYNA3Q/1wAyCEaAx677zvAMAVxjmHiAZwt9uhH+o9qg9BRNRkAJe2QG1PErOQRRxk3xDCfr8/Ho8PDw93d3dPnjzZbrfXT55qNRetlJiUnBCCcda4Ic0PtBNDjEPWGjNApWX3iUh1OT0i0nGZ7BSQ8YRx1UMRnYQYOkIIoXJl+mG6KSIaxIvTExkAQt8PtnIEY0xVVUaAiLquD8wKEK1WqkweHKR6AamwR4xxtVrR2C8eRu2OmeOowCQzn1ZWzxVCyM4RY+ZthPRK3lFmNmNVUhibbSSwqNDOzNrAI6Xyqg8QETfl2tjhoFHgKFi6rssLiqYJJIfT6Kgs1EE6OgyGoOW0QUXl0IO2vSACpEjqXRQOIhhiMACgKfqqxErXdeTjel3Vm+3VzQ0a2u33xll/PD0cDiKgmB9jqh1A41mpUBVEHVBZEDFHUA+iiTjykzO2LCQfyE86Q4Rg4iDDBGGOwTIDcFFY9WwDi0fWWhG+Sa04ojJkZpYQNP4ibRmMVSEgsjFoSDU+5UsG0TiHUVBL6nJkjr2WCOmjL8vS2tHri4wCIkBk2iZ07U5EDJFGEJRlaTXeILG8pBQlCy5MA2f5UhEIxHOloDydd/ZkUkiSVzoXQYY5kAhMyDg9lkcbp8FzQWS+T5cUsMhxNjcZrSD5imYUtSSwfF3Tt1xIL5y9K79mApBkVRBmC8kpbQHbQSbLtfpBULjkYIRRrZ0dnPmwF0/c5ZJzIxZOr3yNCbvyRY0gGuXXkZd5H50hY8y3r1+/e3jY3lxLP75FiEOI0RtEBGeG/oBWJKL6ArHXxwDGukCIiEBko/qlWZtGqK8R6/Xq2La+Da5cra+uDqf+p3/7m1/+8stfffG7N28fXr956L04V1m3pQI9x8BVs+9jbJxB56rt1TrG6PvYe09ExqKxWr0oigQWv65WAIAom3pV16WI3L578/b2jeby6pEwFv5SZLgA55m2MNvH9KRMBWu4hG9LnEx4Apew+qI/HKY4/whOTl66xEb9GeHEJ/YH35UKguXfXoTM+dtLg+fzmU31sTmctT7gxOI4DgayJaBmp6P+m/IM852SLP8wMYqRas4QyMdJE5ixssRPcqCFaVPZfL8GsSPLXUyQybGOpi1PLu74xa3Pn8/vGGPe8/D46rlaSItK94+FROpaZ8fBd7ww0wkFIqM4AUI0DAAkQAElkuxjpKYpnNQ2ru0qxkKCbIx7vll/cHP16tS9C01A6IH3Pt6ffMW8dq4qHRO62ohDMrRFdNdPKCKHGN68PXjvCYHgm/v7YKWsK2ZYAZYB4vF0OLW7BvoQgTCCCDOYQZiDGQmLCBDh2f4pmRadgISIulBEVA8hihTWsRXqyRQuRmldq92cVRu0jgozxNcZgl3jY4xN0+z3+/v7+8PhkMrljbswrWoLKMKjiQUAeOyPHNJkQHXIS3pdvr+P7V2GAOdnEnleZBQ5nuDi0EyfiehZTR/dbK9Lsy5osy6NxALgsJdj27HnLuApyLvW//7h+PXd/d3x+LYprUEntCnsxx+9+PGnH/7w4xdPt3XtLAk3EYRRxPggpybuTu3DwwMKV1W1KqvVamXLwgvEKBAjCjJDjOq9BQMowloOEREHbxOgDCXTQESieO3r4PvYUs8gzp4jfXAsGUKIRFgURV2X1loBre/Sxt73fe+DT83Wkj+AiPINmJw7fMZDEYnMImKtZRAc61I2Xds0zWq1ur27X6/XV1dXm82mruuyLHNOmAZBRHUJdt7341UURVFUWnZfXZfe+74/9zzAvPnZNEhNddq00arbtG0bOp8UwuQEY+amaVIwLWRCqfd9GrCqKhFg5ihnM5U6vJLmo6V8ErLpgEp6iefrVIcap0O/OMhzF1ORuZyoR3Dpcs8+Ur0oa4emcNCFBJbzclA4iw1WxNdhJ4EhcYwuyeIBUwTZ2WYxLNCKKDxINS7Minmm3uiJYCN45fpCwsiMIAAGODIjiHrtnCttUaHXsjrBGmetReuE8e7hvg/RumK12nQ+WmtZIArHzqtCKFABMCEZsmTQDJ40G2MUREEDJCIGgIWQAXGSKgLq8U7C1UBHmQLf9Z3KtMysZjMRiSLgfYzFgBdGiIkNIQsSsMTktQZkRDDWAp5ljIxZkdqK+77HOPRfIYqDd3EwDBmOkJTbEHqPwxEfo0etMElgrRoEh9TQU+N3+4aIrBoMcgqkMc44ucsTJcxkgoRqODW3JDXSOZeM6xkbwhzEk9MCkSXkrD5hNmosxCUDM07PwoRe+bl+/rzg+OktOaGmr9KuSFZTAeDCQaW/wuzzd5dClg8vfztyovn889/mi5r9Np8wLmLD0lBLuTbt6RLyszcuP8CiYrW6bETObAgR1UlorUVAItt5b0xRlvX9/f2rl29/8P0fCTph0WxAESmcM2TVmy/MKErrUSCSUVtKZNA3EZARQABiAQEKwCAEiAKEYPZHLoqNWPnyq29/+au//OWvfvubL7/+9s2DdRViCebKGtP1Ydf2RFQUVQhM6ExhmEOr5lJERCxK7WYRNQUWUdAgkimd0ULAvW/f3b29vb09HHZqHlNkSaeUkp5G20smCaUT4qIoM0OYEfLnnNJ86wUEx9iD2Vbmg+e/mmEUphLMl1Sv5VfDb/H8OcciRNCEz4Qh48If1TpgarafjnZByEMc/CDphxcBmN6SHOz5d489D9PDdeAAOcAvMYGLN2cePBzDnwJPPO35zy8y0hS6nybz/jmMXGWiRr6HceHUkEc0QaeLWvHFP1NEw/LbfPA0fxnkjzm/wkyLSF/NDoIZnjy2tNkyhxEwKDaSGBICoAAoiNEUAfwuxKJpNxafla730UepDV+tyo+fXd/2YR/47an3ntnQLrBrpT52RVkGgrqnsjJl6SqUsrL49CrIx2DN1/fvHtr2JNJ18PLtQ4iyu7m6sq4MEk/t4e5+z0XgSLaAyDGKNYaIARFTXdQcFECp1vlydQmFNSlIBaPCWmOoO532+4OxBTN3IVZVFfxwCq+kwJWz1hbOGGPElt57RNByWUoOMgiImmpltcOyiLAEjjJKUKDNP5Y4LKOVcIk2M974ng19DPFkKgAsz69ER0n+ybliURvriDlaMquyCm0TQjj1vgncdOHh1N+18fWh+f397s2xOQUAIIu4LutPXjz5+3/8gx9/9uFNbUtkS+J96Pu+9aHt47Hl+2N3t2uD56vt+mZ7pdqRELVN1/lQrlbWuRiHapkAFC0B89Ax+XbOAAEAAElEQVR4CYBZNPKNmeM5uea83hDYx9A2fUaDWimdrDG6R4iWmdu2u7u/vb+/75u2LMsIcjqdHh4ejsdjEhcxqxuPmekwKS0JpKKZpqNYaMdgxd1hf2obvLvfbreHw+H6+nq73WovexFR1186UFKan7JX9QqGEBA7RHTOlWUpoul5fddRKtYCo3SXUEW5SoqiTwEUQ36gPyuBkl0wNfCljKeicGf2YgiIGIBj1J4ZA6CYU7sImB5VKXdR42NH5yekiZVFnZ/j6bhBRHUC50sTEaTkQZ2c5prYptp4jvaDgS2z0GibhRgiIKIGYCEO/DgjtbGWj4wuW14CKierpCZksWMmj6wZFNHAIAiGDIIIMvcxRgQyBkAYmAFoEOcY1OtprTVFaa1FQw/7w25/qtYbtzsVVV1VlTUW0VhrVSGM7JJWbK015LQcqyrARKR504ltahRu2jLS0tULuyqMPtiu61TpAkNGtNY3apCtUq4xxplkbuhVvXfOaN2XyB4RDCBLmFg/AXEIHhv60AKfLQIxijEMZIkMocWhxYk2U7GjQqfJsaz9F9rOF06lBYNAHKUNXYxic6pL+8djmDKPBXbykz5HAhzNvSbri8Vj0q0WlUn7zZnvOFHUBbchn/nLjPVfwrOJWR0yEWe2qJxtLUdL3AGmhwdloZswP0ov6IQTorkkWs3upFDb9BbMdPI0VXjkyhlWvsDxJyIyFNVIg2SwmouY6d/8jYkwZiPk889xRu/nVcgG6lezkERmjgPw5ewEQHSuhBjI6klMrrRN179+8y5GBDHMQQQNChEUVamd4oMPEqNIFAKyoPJIZGYWiBKHWjMESOxZ0AA6IitAACQCgeW3X7/95puXP/vprz7/4svDvkdTsDjrnpItuz4ET7awtqyx4BCCZ3HWsMQYPXBPKMaRJaIhrJ5jjAzeWC6NrWrjnEGA42n/6tv94bDTBlOIWFXVaH2UGONo5TU43WnMdP7HMCHH/OzfVLj/3BIYAERVwqkpAefGjrkMNOPvkNW8muHtRQzRH19cCOKQ7ZmvIse6nOjGm4MpYYaueZW2fBydVE4mZ8Be0nou0NpU+RQRwPPcUuyNyNw/n7ORyZIz8KaXpsNVRnlU+ST3HU5Vtdl25O+Ssd9gHBrgDlI+Ec1C/WGMSnWuSJw88fnZds8mBhm3TLOaYU4aZKlg52CZYUXamgQxyqrX5NA7AwHnNqns2zn5vOdaYuB4UxQDEBg17gCBAcnZyNhJs+v9rrMHH1sGz7AtYF3Yj+3VMfD+1DTH5sRYuOrdsQmGqHC2i9zyjcANmdK5aoU+xm2J33vx1BgElO7Vq0MDpoLdHpp2dzj2V1VVAcXmtLvvTiVEQUsmskRgoxQ0Ttikz3TBd5ozdhEZgUl5sfnVqrIEp8Phm2++iTGGKDFKtV4BQAwCyHVRqoXLkjEWI1hmVg9Se2qiDwaJDGlwn7FYFna9rrWSQQjh7v7A2u9cRAQGIxiee9uMkzSI+ufl7VuaHvSK06qS6XNefzIfMFdd8q+WRKqfO0Mtci8MaETwdOyD7/ZtaKi8Df7VqX29a7/dHe8ObSdEri5jfLbZfPbi+sefPP/+h09varsidtb0fe99aNr+1Pp9Hw9d3DXh/uQ3BNfb9fZqXRSFMa4T6XzofLyqqrpeS+A+anIBowAPJCYDzx/q/ZCgOOeYIbIXEQCSsQgnjV6gGCOijGIxlmXJHI5HidE3TbM/PLRtC5GbpkFr2rbVwwuyRueSXen4yHPYcstRomJxxtFQH67vezTuYb8/te27+/vr62vtZe+cY61vobI4AMjQv1xZnDaB0OzBvm/j2CNE9SttWRHCoHsktTAn8Kwv8VBUZujdB5QaS+RoUFUVjaGV+TjH9sRDUxVISYAypPMZk9W2JaKiKJhDjnIpHTF5LPMrhKBxkbpNqf84M6v/EMfQvDg21jP2fKzrTzRgVZej+ryqkXoVRZl+q6Heyaaj1AfnkwuJqOs6MpBAnUjDGKOkSlmxCciOp6TeiAizOjaGcqk49syIMRI6MAzGAoAxQSmXBKytg+9Cp1oixAAcMQZwhUMkZq7K1fWTpw/73Zvbd/umLcu6rterzXa1Wq3X1bqqiWyMse1Qg7PGPoLDeac5lgAgEEdEk4TGyUGahIn84EvoHWPUnF5rrQyuRDXZiDEoItpuHpyRyF3X9H1fGbSWqqpCFDLQdcAcQeJjwsm56i8aFfOYgTmICBAgMkhUM5BmDqNWNFUXrbWWBqwrywoAfIghMApYa4uiVDSzaQvzC7JCrjL6mmHq8DFZry0cpU/Iakz1fT86As68eJYve8Gknck9KqkMLt2MuczABONPkkyWe/bStqW5SBaZqXSuB0bSTtMVsxoGGSM4K7GUOUiThJRkLOUIeaJR/mqcdp6ZntYTp2vaEZwK6Gka6X6CEgAgSVmWwQ+1g03WSXaUdR4V5TlzCcrQQGYCcP1XBc3ZzXwQzrpcdl0nHBARpyFtIkBETdNorHPb9kVRlOXmcDh89dtXD7vj0ydX0R+DZ+Mwek8Ofd8zsyHnamLmvm8jC5LtggkhiCBZZ6wNEaOXKFhW2xDR2YqBDof27u7+V7/64m/+5m++um2bU9v3QcCCbEKHgpZs2UcGdGQkRAmxj6AWjVCVbAAsRhEQCAYQJaCQxIEKNqt6s60RY3va392/++rrr4gGi4lzjkgNe71ojVRrREQtIAnlcxaTMBnGNLxcws4xIUNOkGlO6RmlkQ2dDRD6okHxmBT7OdNIfqRJJvfHsWplQqEl60zjq988cQlFGxEBM8HbfM45IWQD0qA2T91l+TIT9NLIPFbPSwipwByq0U6Rdsn9eFqTABGNTT1vBODcNy+xgpkwkbOUi7m1lPnZ0jR4WhQhp8f8hzjlh7lpPD2Ao0U85zxptolNpdoMNGaz8BhBlKCqZdlzppd2P+1XrijmLDHtZppJAnW6aa05b9xUmRQR7aOdEFhEBC8rnCKCY1J+PhORSyftJcV1gDOR0fLforUxkQkZkCODIYCi883ru1Md+zWCQ7cuY12VWyMfX60ertbN/eG2iw9dV9erlv1Xh1MT46nbfHazAjHsO4J31pVIRYHxqjCfPrkyxjxr2998+3pV2k7i7aG/byLGAEGcRR8ikvEhCJmiMCLCICKB0AJAhDg6TBFQINOfp0R5Nmt2XWssquhQluW6XjHzN998c//uwRam62NRlHd3dwCgcfgH2A2opfZE6xJNKcDTxhVFEdn/4Aff+/M//1/HGF1hd7vdL37xm/v7e2PM7e1d1w1l05um1YDAKXoPZrJ0J+1sTimzrdfcp+V10fCa/zl7SzKgpF8xcwjhXeG/3u2K9ebrN2/3D+WmqhuPD7188erNXde9fLf/9n7feCGqgKlr/Ycb82c/+OhHn7z4aFt8uHFXjjEGjJGZ7/aH+0Pbijl04cuX376+fScCL547Zq7ruq5WaE3zcLy9e1c/eVbUq8BgnHUkzEPRF4nMMJE9OAIAgyHvo1K9tWeLT1EUEgOO7QGYWWPGIkjf99YOsYghhBgkxighImIMQcOAE50mb7CI5B4nGaPTaXS7DbxLhMaDLIQQICSYOyJ1Wx2Px8PhsN/vnz9/fn19rcpMVVUqqWv9Q8VSHFPa1M6l53IYe+tZa/VXbdueTifd6pwj6QzPfs4xl08bGEiIRAggReESlPRc067fKXPPe980TdXX9/f3qeynSvyqXznni6Jwrhxkp8ix9z13MmprRBQ5CBARHY57OAty7P1YJqca+nCkYRWZT6cTTkVEAKjrmsUnI2DCaj10VH/Wlerxba0NQU0DWBSFIgOMUanMHKOfxQmuqnUcUw2T7jCC6Byjm6Adozh3rmeTAo3T5FWxPIvZxvR9XzsniMdwLFxVFtg1jQp7AXqO4EzlXMkhGAPGQNu2DggIBaEq6/V6ezydWt83nd8fT0+ePHOu3K62Q74l0mq1ijG2ba9AAAA9r8cdOTuHk5qamEDyjKVuKIho6VxQs7DFuFJcrVarVeW996EjGVI3DUrf933bxegJsCzdbrc7Hvfb7bbvuxhDXdc06hdhDBh2plC5QmlKSVtLmCKSbmhgZo6aEYxjoSPnkksAEFGQ0Ii1RufsXMnMKnJqeqdN5+VM2pjleOBELJvYzNJXSZ6LWeVfWegYM4kEMvWJiKZiD0Am88E8VR5gKgQvv11emClU+XJyD8ns4MnHz0+O9DkxRDVowvSsmkEMFicZTi+YmuTzt8uYQwXzc3EiJ2GKqxHp+z6GvOnT2e4+g9ts2NmHmR6SPj9mqZWxoHM6J0b0iAAmSSjpvdZqvxeQIAwSo3TeM8Pu4Xg69jdX2HWelImIhBCsJQ0xYDZooKiKwD5I33ONxhlyDLb1HJmcq+rqqvO8P7a3ty9/9cVv/+N/+Nmrb29DYO99R9fMNSAikGcM2qYwcNu2ZV250jHHU3fyoUNEa8m3D1VVVOtShLs2qMePoLDWXV9dbVarEPr7u3fvbt+cmn2MXiCqOZyZEYfU8HE7AEDb4JyJS4tk4ELWgYwQZKq2LZ/MAftdLsl0noSHs32f3UzELpn6BJdywyTb6xmfec81c1XxkDZ2IQ724nKyd52fzH9ljIHMIJvIn7KIgPyHkuk2OjYRGSKIsnxpvkEX15UvLbe45/Mc8GEsj36RHY14NbEZzdBDZkaB8cKphJ22Xr+NY8NPmPL2XFd/DOyTwbMqzTlUl5uC2Ykw29/E0MaAnTMDH7x3iwnMVnpxL77LhZIPjoLarkrAWO2awECd8K4Jr/ensjisKnONSFQ8LfAHN5tmd8D746kNLbTBFkTuXURz6p1xDhxE2+PdZr3errZ16V6YjbPWIILgQ71513mOEJxFIEYDBSMiMCY4CsDZkIysu/UYdV0COMBAuYNVSIs9jCFSGi416OfWDIIOi+7B2O+oT5iJGvmvj5Vl0TRtvSr+7M/+7J/8k3/CEo3Bruuub/7nr7766rA/hcAPDw/eB2tsXYNzxezUIyJECeGyog5Thyc8wgmXP5nBJD/Xcsx8TC76fcPm/p0p6w/qq77k41Huj/uX7+6/3p2+frh/e2hVuGbPJbqb7ebP/94HP/rexx8/3VwX8qQyBXHTdicf39wf7jt56OLt/vT1t2/fPuy9YF3XZUla3YGcFcTO9z7GjXVFWUdjGEiEAQU1hXA6WwDU/7OwtYVSB5E5n9GEHLyIRLajGs8igqAH9JlFIyKhZW0wMLZbiGMHPBmNy7pNOQxn50KCp2RUnG+BtkZIn3e7nXYv3G63RVGsVqu6rrVlfFVVxpjet4Q2+eKGOFIRPYgHL+hoz1qv11oSKZ9SEkvSTWNMURR1XccYQ9d3Xde2bUrvT2VXtAQrIqraqTmH+9OxKIqmaRQ4ltA5p3qzuvM49KJ1IIGj90XhBjE4RM8+Zs3lZWpNI9JQXlJ6TPpAAlrSDfQDERVFIUB55HbaBREpiqIoChx14FG35GTQyUr28OilOHsy9FccxFgsy3IWSKkxgmZR0/WMADKXUc1YbSTGKBI1gNkHtqZwtgCA6AbgsAuh99p+kgANIQk5Wzpber8HAC2sgkBCaKwtiirKUKlV1XXf+qdPn65WK2PQuQIAjDF9H0SEI4iIthspSnXDYozovWcOCvAc8+VSGELKqvPR49CD3VhrwYCIAAdjMETPIXZd1xxPfd9WRVlvNgAsEo/HY4i9iDDHqirqskREtc/WdW2MkQgAsFqtuq5pmkbrl+qGdl13PB7rusax/Q+iRt4hAAiqHRNIhqIyig9uszFoUQMKRNQ1eFYI0yYlik27OOOks3N69q+kqjhJNhqfnJ3WudygP6GzT3kiDcjUVHxRMPo7XWeWN0qxssidS7Od0RVc0t/y5y8Gos2ACRnQOPO4fhfBJU04gXQ55nk0ZA2/nFnHx/m870W5APHYV++5JFP746JDffY5LUdjhDBCEJHAMYTAAK9fv727e/j4o+e+j3VtAYIxTiRqwosAaG/NIIElikQwddf2zLFe1avNpuvx7n7/8Ptv/uY//uK3X3399e/f7A9N37GQsaaMkU6RrC10kj5wjCACEcJmswocuvYYo0fkulSrG1TRsvTHwwGQrZH1WuNabAxwOr57++rrh4eHw+HA4ktniqII2qU+C8CYQUnVQgRDBIjE/D7YylRzg0uEiZmIvNypJb0sD2nI8HnUxM7ImWOsjBdMS0/NLwTIiB0uMZb8z/yATOOLiMY4Lal1NkLOalCxakGwiJi6R0DWVWLGeZazAgCWwZxJmYICGZkvae3iPGHKB2DBBGZryUd7jO/lw+qVCxyzhxGHEhSSGQJmr15eac5LhsZZMwm9nzzJ+ckyW28+YL5BOUOTTCHkzM+pCuFsp5YY8l1Y1uXFRiThoRMSgBACCSJahMAAIoiWpX/o4Ntdi/awXVEA2hbVqlh9+uQqdL0I7tvuZcfioCfLIcKhcwFNxH6NZTwEZ4vV+tq5zaq6Wm+dLdE+tJ73b25FGND1goGELIYQtgSjGqA15s4HDiKeW3AgApIAGEwG+wtGIhwOweEoVHkABxcx9AGK0sQYQc4HpV4Eg93HDFSTvtXx+erqSuSuKByi+NA3zbFpjk3T/Pa3v/3yyy9jjG170jAza6wxRk94zAwHADCO9v+H6w/KCTmypZksz/1dhG8eDiBvjiv/UK4d0t3+8O3Du9/f390H3wmQI+h8yfjhs+c/+vR7P/64+uCqunZYQ6TQBYlN09w1/cu7h/sObg/dq/v9qzd3h6OvKlPW9WpNV1ebqqoYsA/x2DY+sqtKKl0QjJEZBIGUfSlFhDBEHmo3Eo7ACNYyogVERNGMBkRUL4qIWLEsg/QvEoFT5F4QyRdOiCx8prhcZJIx/Wf2p2RXTtoXrRUCgtoKVeMeu7btO9rT/nhwzq1WK9UJq6par9d1XRsCxGDFMmul/nM0AaIwDyFdWmnGGOPcaGkdo9KMMWVZJqzAMZll0PrMEGaZFMJUATWPgFMHWlEU2+3VcXPc7Xb39/d936nTNcZQFM5ag2N0GI9MvokhYTiP3d5hlOvU85kmY61FElcYGGJc0VgLACFG66y1BsZo0hAoxuicda7SXcDRt5lYYkJm1Zk1SxPxvF8heK0kjKM7MYQ+zS2OLUBwLJGas2tNRk2qqaSgQkEZ+kJL3pciXQMGjilmwQdLBuBcqwaQLZmeGwImQARhlhD64D0iOlcimrqqS1caJBFkZkbgyMZYY0AEmsPxTZC+7dbrdbm6rqpQ13Vd10URvY/ee4qAQQRi10XmoLgEmadUt2mI1x0DdBWvaKz7oFixWtWISGbgF2RAywhVVdE2p+7UNMAtIocoTowxXXdMh+BqVfd9fzwe9g8PdV2rTKIYaMkpqmu0fgjBx6EnSl3XRBQCE8AA4aHYShRB7zXlBIb5mcKMObRERGgBiASGAFoydsGCZYlAOYs0077GS2aaTnpFyjjtNJXO6YSsuUwmovLJ5Nz6g6x89oy+7rscAEs7yvKZ5StwceXfyiXxGqchVfmYCVz5HCQLn0t6cgKpXNIGL84ZNCIXzs15pqt4dO3vkZ9mB2eMy1oGkK9oJpkRabmtoTwoKPYOgf5D3WIEYuYo4Gxxe3t7e3sL8GMlOYTonIsxsABH6Pq+6zyj2KIkSzF6tE+qDYlgc2p//dXLX/3q1z/7+edff/2m77npQgwGyRGVYFwQYmAEB2KC56bzMXrriAyhRJZOxCPEsgRrjSCH2HadryyAsDVSlMY5G9m3x/3p2JxO/fHQnA6NCJRlWVUVknShwyIJTKRns7Uybodk8Nfj80LiqCxKoVzEsfxbgIlgfR7tkV/x2OYkPZy2b4YeOZ7nD0jmqsLRapimjTR42Oba7HSxszcukTwJGTnLShOeTW/49xEyUVfzbOTc8HQeLcsYnO0FM9MixynJH8uXJqEoH2T53nwtM3KbvSVnQed1AUDW5yOtbsmjEBHpXIgvsRfOmi7mP8+tAzME083NZzLD2BmL5izE9yKmpXlOZ3vG0gwTLhxViJju4VTV/DtdxIhAACDAbISRAQABJEYIAQCssYKxi/Gu83TqPzwVYDoJWFCxKu2HN5suxqPA6fXdDqBjYTAtw5tjYwGDmLWJtpeVF4femWJVr57fYCfw5tgUb9/ppkQWQAKyqfi4CACN1o4M1AQwr1O80LJm4MUsvULdIMNQaBAjAIQQAE2MUYYw0UllCwAxxo7x2OdN7Pu+rmtj8de//vW//Jf/0vum7/vDcffll7/+6qvfF0VBaK0tmH3PLUcwxs1o8LvsV44Mf+etXbxlhrFL0IkIF7DrI97um314aw6G6dS1u77ZdUGscVaAuS7o0+snf++zT77/yUcfb8OmdpVhE0PwIgA9YBtx38vX7w7f3O7uj23b9WChKO2qsnVdrtdrIel7f2jC4XQEonK9YkEfQ6elfdR6eJ7qENWuDceQGAalLqI6AYZYCAJAk+SK0dERIwOCG0J/5xYlHPUWGUN+dK/7vsfRNpcLcjMyz4G5hC08wtkA4Hg8ElHbtsfjUZMGVTO8vtqowmZt6HuT85CUrQBZB2zFq+U0Uvu+9HMV+knAGKM1PyXLw89zxWWMH44xAqB2p9ChQggoEH2oq4I5RmZAQBm6RxBR07FkwVMpMMQP6s3QRiIxWE2VTBSq1FqW5dAxcswtVHVXc1ISeUom8mlUYZ75SUOq1Lk2RAg+ZtV0vPdd12jCYVp133ptU6lPJpTITQP58yGcT0wR1IIpOOb0jgfNAOp0KITe974NIRABc/C+X1UFc+zaYwi9RB98JxINUuiCEBpjy7Ks6zUbw4JlDA/3e2sVmMQ+hN6/e/fu/v6+Xu3rur6+vrm6uiqL2jhbGK1PE5mD974LkdkPZrLM5J0gSQAiopmZw4VaGMYNOa/WmMF7KsaYqiqKogDk0LbHECSyM8RlWZdlXZav3h2rqqrqom1bESaCqqosUdu2RKRe6KZpNqutMSaE4H2jhOCg8L7TxoMp0BfRAAxOPxkq7RORpk/ZZCUxSMysFWhA+TZgBEFhO7Mcp88zvnw+kqeiW3p+aZHVizOFMJd7MIs2zjHpUljoBX5xnnBG0stv3zNIumbiSJptLkYsvp0b1C+OM5tS/hVkMFxyxnzY2Ycc8tnGTbzz5w/I1lrCc8JPmhIRzeyvs1k9BrrvePSmNyrPytlQGmd4Fw5/EqBIBCAwEgOHEIqiaJrd7Zu33ndFUSi0hFGYus6X1aqoVkK+9X1gdRSUh719+fLlL375+eeff/Hq5etD08cgAiZGLOsbV5quC11gZGH1WzoKHL3vfGgQAdGSiZF7YXFG05VD3/UA4JypV86yIAqA8b65f7g9HvdN04TAwYsBV63KFFjvYx+jt7FCBAQEAkSDyAAWkWKMYyUrEdE2FZfbggNcOKRnu3YR/o89s6T3i29MJyJMKR0y0shPndxvkxM+IgpLsoDmmkBciPJpPjkeZtO7LD3M1jWnwUc0W1wEcOZzy19hsiQ3ItIcwhhjiBHjZIYTNfgS2JPHLH2booaWNP6eBc4Uttkzs1fMtg9mOuF0nHScX2SqNDYpfs8uYKbJp/HzRc04YT6f/I0yqqYZXp0/D+e0TM6UJcxnGPV3vYgVzswIQALAqovF4E0QFRsAbUDZM3DX354cWSSIGx+cwfXKfgTXLbldH2XXR89UFAbo1PrXp0acuS6kaqSuouHOoEVjwURXWpAgEFGGAixRwIhEFo1QRpKkCuLibJ1BIAf1dK/PWJGgZK1t26GVKxHFKH2QqjK9WsSHGoQIY+cV770xQxcvkSEdGhF731ZVgSyvXr36/PPPnUMk6bqurmsiUPUS0YfgEQnBLFEix4fHruX5+3e9LtIXTOlI7wze7wit5yhN28R30GIQZo4E1lbBt+TleuP+6KOPfvzpJ58+e3Zd26eblUEw3IXAp+7Ys+w6fnMMr4/hm7vTN3cHz4IIJUFZ2s2mKMvSFdaHcGq6+93p2HXl1U1Zr1rft4FbH0TQEXgAB0ST3GNQ0sQBXzU3GNLpr8RibDHSYMqRYe2VwGOgoEqRxhgUCaFPzd8Te0yMIgcXj839EtuHC4LK0mY3KQ+Wbwozt22bwiNVLXy4r+q61h4VWiVSA0pxNEvpOSMydGJ0TiST5mUs/iFDSVLFZzoXg+l9UnoTG1RHTfq5iKTWF+/e3d3d3XnvS1c4Y7VGmrEIkUPfImJRWGZpmgaGBNfJOCl4ShVCvamCu+Ke930I2sFYCxB451xZFoioHrmicGVZ6tmkAYS5MxMy0uaxQmRaHQAUtkguh5AljXddR66wBnX307YWNiKdeTVlUbv6Qh0h28fF+S4IdO4dYK3VdE5m7rqO2ArEGIIEj8ghRN81IfiH5lAVjmNHIghinUCEGL2Gd1VFWRelc66IwTkHZJ88sQpaZIHSAYvW/Lu7u3t4eNjtdk+fPru5uamrdVmWZem894gFM6/WVYxR0ylDCIKUEjhFxBhjiRBRsW6IKB5tZEQUus4ZrMpK40WJyFqyjvb3h7u7u7dvvuUQi8Ku6/rq6mq73vyX/8P/IBC//vrr//Af/uaLL764u7vt+94Sxai5oIU6ckMIp9Op7/ur66rrm6Y9IlFZlrr13vsYJcV5KEdFRAAyNZKcK4MmYwEHRAQyoJnhIoJIjHAuzZefK/nJiotrhmdLVp4wfmn5Tr8yYzEVHG0wg7EEcebHSMf/7BXp++Xb33OlcWYMazZ+Oj7z+0uB4+L4y2fyJcwezkW3fG55ThFMOKbMuKdeMxEt/SqEQGMhvuUhNxsnn+RsfDvtf5pGW+aMLUeegHpgEKkX1aAQ4hjVJsyIoCXIqqoS4Fff/v7+/t2L51e+byyx97HvgytWh73vQ1+stlfbD05t86svvvj1l7/5//6bN4fDYbfbhcBFURTVNkbpuwAgbcv748H38ebm6Xa7bftwOp04dgIxsicSa8QYjxgResRIAAToLKzKIhHS6dC2bXM8PZyaXd83LAEAEExRlcAkDH3s2TOSEJEpDA/9eyOAAcs4JSsAAGDIuywsqtTm0MsRSe/kZs7sd5eNr/mVE7JMgzPzN5pFMSG9lioHj0VQZsPqFisAE+akVyBMCG2JPNO3X1CBctqZzRMRL5brysk5X2z6drrSocmbLJLxYowWCBf8iqZ5NcvpJQtuGlYFjvzJYZenM8/neZEPwIKJQWadyXcZYMLfZqSdasksYXIRr3i8INMJRcQsCtKOUKXEiNIciEiLKs2WnCCmdYBm0INp2vYMsWcg/Y7HRLqsWAFh5IHzEhBEEDaRhjoGkQUxAgWGrvGv7zpDlQFcF72xYgm26+JDhh88fdL5h75r2Asa7BF2oefm2FPpSl8UvREw2EThxvvdsbnb3/nYCQjG3ggRAzEXMQZCRKQIOMSEPqo4iYgg0OImnovunuX7hNsxSuBYV6s38o7I+hARAY0h5qG5qxK7kDAoo+ahufNgmYZRsez73jAUBXnvmYUlqNYxCJcmji17EAE1wOm778tsu0XmptvHnp+Rz+z+ko1QFqEjItIDA0SkE2ArGvBoLIvhUDI+3VY/+d6nf/Lpiw+uN0825mpjnbjg2+BD23SHQ3sK8V0Xv3lov/z2/s2xOzEyiBMxFq7W7tn1uq4tEYWub5rmYb9n5tXmyljbhtCH6GMcOoyJkLFIRsPtVMofCYpoLPpFRMag9h5TfOCYpfOcKQjUEZSXJo4xwqjjJb1Ft0+yYlcXueiMhcrUSAQAQ0FUABwtCDCaC5OzSzJfEwBoa8TD/qGu69PppG05iqKoq3VRFJphONqgbdrKpAriWBcjASpNEkffOBF56nK+hFmqm65dS+B0Xdc0Tdd1+/vD4eEIyFVROlcwR5SIAG17AuCqLOuqCCG0Tez6LoSOyuscGjm4mFlVLx5rMuFob9UPSWBO9VcBQOej7kq9kuEyx399ABHTsHqnKiqFcGq2oWvs+16DFbUzZBgb0IMMXWrSTslYIUL3Kgf4yOMVmOcDAuK5boiIAJwTx5CHXFBEsQS+60LsXIFPyvW6rh528fCwC76zxMaZ4COBnsCo9nQRBBYCqKpKk14dmcKWFsl7H3qPbdu27XF/kMi+67fb7fX1td1uy0J9ZciM4ISIBCIzd2EQseJYms5pC1ZjzFhqleB8qq4Ka601zgKA992pbfu+DbE/HA4Pd3en5lS6Yl3X2+12VdVEYB199NEnf/7nf/7f//f/p4eHh9///qtf/OIXX/7681evXt3e3t7f73Qv+tYbY66urkI4aQlcgSESTfM5iRits6awttC0YUKLiNF4jSM150IVACAALEgAgiQECGC0TYVNG5NLcji1AefkzVNHU86Rc1abpAoy56qAM36xNHLrXZ6y9EzAmoi548rOOXX4HcyEkilFOUPMqXQ5Ts478jXmkBERwEflsyQNz36y9Jjlb3ls/otza14EaHwXMXPks8Fmyb6XK12+bvbtd4SzZEFo8VzadFBrL7yUECIAIhF23kOvT4avv/7dq29ffvjBTQihrEpm7rtYVWW9XvMx/OY3L//2p//T3/7s59+8ennq2n370Xq9rurnZGPbtsemI7JIVkSKoqrBoWk9x7uHvdYlQwoco7GxIESKiIwUywKPpyMLlkW93tRFUXRNryEHzcmE2DN7Y6Gu1wK+bdum9VGAowCQMcYWFlFijJ33JRUwVNZgjAaJRSaQx6TnoOqOF4qy4LTBbr53OVpmm/sHnI3LazZ+og6cXumZRLy5Q+niGxER8FyHJp8JM6fG9JiFDEDGVaYzH8TZixQ6+1OvGfLPVndxhNlKRYa55SwrLdnQvBD57EWzJefiCGR8Lz0wYwKcFRl6D1tIP08zvLwRZ0gOlwqI+dtxzPt9DFVyJDyP80if0tkE0sJnkMz3PePkc3VU36CS7iBe4PmNM8RIhDZb3WPQu3gZMAwsaBiZUQRZBIwQARRkUCBGjIRgywCxif7tvXe2R/aWPEBxXZeFM3VhPn52dWjgdIgPXfAW0VDAeO8b2RmEI0aUqzK01mHcn463h+Pr27chgkEI7C1aYMEghHgiKyIRhlZcjyESALB2mclOtCVN5aDT+yGEsixfvHjxu29eknHBN2q6JbSjPjjpaEGkLXwEhIhQZXEA1iYTZAyicc45hz4AgEHsxrg4MmT7PjBzDFHvvB9/Zlf+2MWlXVym/nmRh+R/UlalHDICtEKIhokimACWAI2ARDYRvv/i6U8+++hHHz95srHrAjYurp1vmuLUdMH3oQtHz7s+3u7bl+/239zeN+A8FRy7wuL1zebFs5ubbbkqS7JWoA/CXdchmc1mE0ECs+cYNZ2VhQXAMFgnKEhiCA0ZTMbBocjniBsQERGBkLDvtRGFtjAMyoRhIcrL2LMul/tznm+MCWPQYEawc1UwJ+fhGUKAc6QHXoosm+0ajnpRjFoDkrUr/SAQF0fnnDarSNU1jTk74njMRssZeIqgSy9SEkgh07OjJ2RX3/dN02gfjhijMYbIgVZ4DoElMAcCvL7Z3txclWXRdV0MXQx905wkOhnDNdOBmPv0aOzTqGVORKIxRiR2XYND5KqIRJGK2erzXUea+2etNcbltrYkrqu2jKOfMIGlPbVmbGUhcs4U2Gw21tqiPHfR0Of7LkT2ihh+vFSBT44+mWQfnGGbBgGAyEOUo1aUSUc/9x0AxOiBAJDb7mAtvXj+/P/yf/0/V4X73W+//Ol//Nuvf/fbd+/eHXYPt6eHdX0lIsgoIgapLEs0BsjsjyfnaFU6a21hSwPIIYYQVtfX+/2+bVsQOp2OyfV9fX09zjBaa41FaysAcDwp8677pHiScFWrESiZaBHRbr9vOi1ze2qaY+9bSwYRtuvNpl5dXW3W9coYIzH8s3/2z7que/r06U9+8uMf//jHf/RHP/zTP/1TQjkeDi9fvvzFL37185///Kuvvnr39q5t2xhj0zQAUBTWlQUi+tCF2FtTPH36lFxhTaHdLwFQnYQte1CNmYQEEBFhKJNDNOHliEIEZ4WQpmX9LsooMp6xM4qFzLUI02tGV4kvyCLkLGMoc4VnJuLMWMbs4fecCpDJTOnVsDhILh6Z0z8vF+7TReRiR1pUchrMjvAcIKN961w2QxY5hDFemNtsJnK+dJJz58/49rNKnO7ndoEcFBrSkK7EOB7LIcwXm0yPxhjAswqEiACkJ8TQhwAIkc+VZoBE5N27t7e3txpg6ZzzPkbufv6zX/3Hn37+V3/9ty9f33khQBOEQzSr1TMi07XiI4s4MoWIBK/NCn0I7GxZlKX3Hg1Uq9KHTntPWwOAwBJKZ6rartdPYgy+69++fn08HpumDV3PzGKeWlciWZau69sQO0So6xLRiFHIYx8DABOhLQqMiEiIAjChr6UAPeDkJZSb7UW6yZlEmO8LXmyw/siVBs+R5IxAi06kuYU4cfk0k4yKz5fg5ZA2JZclHSUCwanmlh6ccZUEhPTz/IIMPukBZpWq5s/n0BjmjKmR9rzvy+y9aY3vYUFmaiCDKUmmkdM+xjhhGrjggTPulxuGltPIF5jgMJsPnI27k1BzWThIL46fj/aeB2LWMzrBzWQ9S2BU/2ZbM7uDNNpTplPK53ARVt/xwlEtVTQeLhYENkggWhaYyBggAh9PTdifWgdSEpVUWIrGFc6Ym3r1fMPfFof77oGDoZWNwI2P2HoJe+paaau2Jozt/f7h7Z73J4gWEIGADYJoggeaE9C5mCfkbEFvEqStxGHJA6BGS8pFOJz5EshmtXrxgqy1gAhCxlofAqBujQFgQSREEgRAA2YEspAM4Q0iGGK0SIUpYu+7U0MrF71njt73KkEygzVj5B5NckpzVH9PjjpkVJCv9LHnZ8ufEUI+bHp+RlyIWGPFAK1IzwLI1joSFIbnT5/+6LNP/vizD57WtCnj1cY5G9rTw+GI+92ub1sfut3xcN/6r+/2X3x9u/ckdcUgIcZiVTx9evP0yXXhqCxLStUXha0ry7qKMbKM4RXMLEJRiIUAYSwrr3Us0oRTwZXUgJGIUkTYlKkawJgFSQ4N68LQbSKoR0KVwFyHCVknsAS3WRgIjoLlQO84D/qXTBfFrAG9lvWHaRU9fZe6CrXuqPe+KzpCq9HIq9VKF07kR057rgiiVxpWo/5yi3yM0bkiIYmyplT3RWNQ84KrANC2rX7oug4iqzHW+66uivV6/ezZM0N4Oh2Er7qu2+8fGIZ1mbFvBw5FKd3o0R3aMh2Px6ZpisJWVaU6MCLWda3v8t6n8qeppZAxpus6zIT5lDSYeg8mnNcf+s6n9obWGq2goxvHzG3bKnA41R1l7Pqmbduu65JTUeTctgTG40NfpMmVOAjbZ8Uvxjx38ZyOqLl8XdeJsIDvuubJ05tnz6///V/95Z//b/7RX/zFP/6v/6v/HUS+u7v99Rdf/OxnP/ur/+XztvdItFqtjHG97zTeU7uVGBh6TRky6kOOBM45LdR5Oran0xHGc1Pr4jjnXGHMUEumJ7dKCK8WBxRRNEhKNcpALzHG2DWn02m32zVNE0VJjwWiK21dV1XpCjuYKgwKGFMUxVdfffWb3/z6b//2P4rEq6ur73//+8+f3vyjf/SPfvKTn/zT/+6/+6cA0Mfj4fCzn/3sb/7mb379m5++fv369vYWCNU9qBcIoXVEhGC0zaNIFCatSHTmkAIADKIdNYegGxERCQAWAKw1RrSXSIww6JKIiIYo5wIyWo5773O3j4zWDhhlWR7Jm4isc0spB0aLS054MNp9Y1AWM1RNHREL7Bj7nl+oupmwCAtHEAaNOxEY8kF5QP3R2n6ugDRj9GnwOPYMnLG2nDGlpNFEdYN8j2xNcV6+ndhXzruiEDbGWqsEjGcR/Kwti6AIMKtly4pIjMI8mfl4qkG+rpEPI4CN3NFYmDgdgeMPJyb5cUWzwjOpgZik01Oyqj9EpB+ZmTkmWI03A7MGGwgiMAftkaKTMcZYO/QmsoQStTUFRmZjC2Ptoe2wfvKqKV8eV4fiT3au+5//8pe//vXvf/rzz3/729+zGOFrkRsUUC85OsQ+iGFrrUP0IYTYA7B1CKFHhsIIIhDDqjBEINIw7owbYFLX9Xq9FZG2bb99/aZpmuPxqJqwMQbICBKZHTMCAyJacgatmi3VdaEIYQEQLTAAAxivCD38fKAdgijqOYPRPEbaxgCC8ADhXKhNikR+dmKmASYJfvxr2Ij8uEWwuh3LK3f05a+e4sZUUxoRIFFxGiHNDQAAlSolhkE5TiUJS8B49vyDMYrwHKOua2LmREQEI+ATu8BpKuPIEM6TgdF6h4gsQwNAEGFhBJCpODjsgoFRAwAaean2vxsgi4g6Koi7pHirpRAzgTVBDxGZxRjSZUpmSfU+NY4fOJ6ObIYJobaUSxyJJc1nEpMZ/aRvavrsjBt0YAYUVMcsCpoBZxlBu0KOU80ayuktRaskUZ0ZBAAi+rEIW5qhIXLWqp07oVBivEQoomgJWe9BNlaIzjxZGId2jzDoxsYajEYhbK0NsTd0jukaQGrIe69HQQ4HWPR7/IPXqehQXw/koByOT5IAcJQANLq1fCCAEmhfUXfqH9gewXl3ZTZP1/WT5y9W26478fHV3XHX9A8RYjTGFKWtAeVI8GUff/euRS0TKlsm4RUzc2CV5wCNQWsBwGLeHjZX7QYi0tlSWp0Z8rcRRKObAIBZrKW+jwIMwjFEIrKGQbwx5el0+vWXvy3Lcr9rLJpVuXrYH4uiAgCtsDrmUkZRDzMCICCB0NBiR0BMYUKIbd/bsupDXJmNb3tjnDPOGRujVEXFEcBA8CoLqq+GBMakAtF/5x7OXMFYXjmbyu8nNpV/lXAgfzjnOelKv/K9EUK2jAYYo0hvAdaGn5XlHz+rP13Rk7q4unJo4rHr9/vj6fd3AeAU4W3jvznw7x6aV/t+hxWuDEiAcHxS07NN+WS9fvrkuUMoQPZNf+jibRtenvz2xTNfbXogBrEIRMTAMQoQBbKAhN4POE9kBuARA/c+qOTGzCBI2vA2RmtM1/XMbC1pKrsAUGENU9M0RMCsFSY1rNdI9KHrkaUwNggPYTWIMUZnrWSQGVmZuieToX8waqsAQwv3AMC5NXkSmQBAC97ODjsACDEAorEWELu+92Oxx3W3dgenQaTq/VPdpnaVQXSOABhBgCOHEJgMxlY8cK+aQxz78nXMzjmDBDEKRJUqSZFBPIgHEDAQQPbNaXfYg+yjeOiFIlhrQbjvuui7cnXTsdsdO+uIIfSy6+VbKvcIq1GZNMwQg0BkD9SIaMKYMUZjs51zaG0Poe8P1tqiKIhM2/Z9H1SzTTqzgsg5N2qJNjXC1RYao3hgcsQOoT+dGmMo8IAq0MPhdEwpczgK+al+DBFx9IpUXdcpb6exNwaPp48QEqnEQnbI4YzMUVt6pboyzKHrPHMwFq21PkTvvaG2a9oY4ycfffxwd4ziP3y6LVl++pd/9euf/u1ms3r+4un3vvfpp59+8ic/+aN/9F/+4//xf4z73enf/eW//1f/+i/f3B3Kau0MHtumMMgSRNgYUxiyxqEQMwPb2tUFFd772tVd8CGE/X5XVEPhIiQQFokKW6wlCIsxhkM4NAcictYGP1jANZhWCxF579u27ZtTCEFr3qBAZE/GbLdbY6iqqqvrTekKAraFY8T21OwPJxY0tgyREelhf/irv/r3RPQ//X/+1fX19UcfffTJJ5/+4LPv/fCHP/zxT/70z//ivzi8/e0//+f//K//+q91F8qyLOvqcDiUpQUQ5l5pCgYJR0x4CsP5O7QaBgA0wLFzzpARpta4aB0UhbOW7HvOxVwQTASc31leSR/IMTWNljsh85LBOUdOBpv8h/kDM8lvNnhWXSoTXKYznK3xPWvJ55aMKwoBzambzQFwLoIsT6YluBJIaVHoZbZwyTwz6aVn4Wlq8jyzaZiIyxdPwXSdJekpZB7zHKaHE1tPeJL/HDOVO/2Qx/zjOHYIVTmPx3xrIooBI3c/++nP2+5ffP7Fbz//1W99wKaLzlaqg4kIChINNa+JY4y992gdWWeQTO/98dit1zWSKAICMnPwPjIzY9hut1dXV1qG63A4vHr16u3bt3nbpWQTGYE8NzMv4bmE4QxiyQ88oxQdfymy5NuXHp7RSBpfxly+BOREmMuZ5Puek1W+hFw8SvEt+TTewxYeW0s+pfReHoqIXBhQREvvXKYmnnbaPL9uyoLyf3MndvauOcHOYMuTyOcLa/yD0OAxGye/mVNlvtGzsAJ4L2otX5ru5FEYMMe3+QzfwxVnL5XRupS09PRAUs8ugkWEZ/fH6T26vzN80/s0RoLNpo2jDjvfpu+mB06g8Xd5vg8syE3LOyN3e3O9Krfr2lqLSEIObAH2hAIs6DkGJgkBDZEMxZZYMQRksqLF7l+83rNrM4RERF3WyBBQIzy1CaGgERH1AACS6gDKmQeS0v51wCKaf3KBEfGoyuYTxjEja/RLAy7mNp335TCc9y82//Yir5s9A5eo6eJbREQQmHJdcWDgZV2LtV7AiwQxINJFOfXhofM94EPnv304fv1u9/p4OkUQa/rOI0WLuN2snl1fb6rSCEOMnqhp+/v73Zs3t8djc/OJrVZ1zwz/P+r+tNeWJDkQxMzMPSLOcpe35l5ZK1lVZJNFdmMWcUYajNAtjloSZwYQ9A8k/Zr+CdKHkT5KHwQIkjCYRg80aqlbgDAim83mWkxW5frW++6955xY3M30wSL8WLjHOfmyqtjiOBIv48bxcDe3zc3czc0nB1hEiFAYBTjGWLlc8+tVkTyF6BOR8HG/CzgQUdM0VTVSOe16ZRpJi0ZFKioIj7PVSMF5PoIM4RnyzQQ6I4cNu0jMkOmrU5RN6vHu7i4l+UiX2ldVtfON9346Z3WcxDcxElHb903TJERVVRWFq6qqnEdEN6bVBQAYOIYQ2ra/3+3uD/v73WG327Vt64QRvfPooXLoBCIhsbjDHu7eBA57RygS2zb0nePYDLFPcs0R1O9yjg67vXPDeAsoOCIabyH3I2xp800PTN7cvKqqSg9SIqJ6I1rBe484ZpVUMk2RsYjTPoSeEQ0h6MKxTBHCycm0O/Zgdghh+jVtF+lukL2GgYBSJgS9xSFGEwmCMTHkiNth6Lou6oUc8f7q6qpZVU+ePtqs6r/6yz/frtY//PVf+8lv/+YwdM+fPfv8809/+tO/JML1er29WP/wB7/18NE7IYTtdtt2QvWKBRkhhF6PNCeOmgYybtU0TRNC8H2niYtUy4UQ0B/zCQFA27aTTSIxxrqu9QaIZKnGGIdh3C0cK3jvPG70TKwEJFpvGua4blar1cqTg3GPBJ1zutcqEquqGpMEE3lPIfSvXr16/fr1n/3Zn21Xm0ePHr333gePHj1qmvjp5188ePTYe397extibIC228u26xARwCGhAyAcpYy8xMgxxin16LgNQ3qRp2OklXNSN+q/y4JDaA2FtOcwahpzEKg0mxKj202DtCuYxF5/sgarNYt1GSMzcbBYiT+lKbA4bYVzyzWbn6AwgDKEWFVlmsXMnMJpWR3MDMdLYaVWo+H8IKX+XsIwV6P5VJcgSdUWP09QGVFcuJqs1MLpc/smYdJuqOqDTi3JYbY4xymkTeYWsH2QefG+3reHP/mTP/3kZ1++en2/3w3b7aPt5qrrIgIJAgrqWiQiiIBAV9WV954ldH3LHIng4nKFqBcEjScuEMk5BIBm06zX6xDCl19++cUXX9zd3QFAMoBUEVsMEyLkYS8J+CNLGPZYNlysCWXlqzyGatQZ2mI/t8SFSb7EnKBI7digkewhETERKGMtixDLA3YUlmfGPycELAva/I6ZqRFG9JZ5bHenZN8yasKJTFkoMw2Acy/C4HNBBEr0QiFlqVhFl0r6M1kw2Yjm6gWSzrSRBeWQoZi5S9lPD+W4ss/nIvkNfMJFwKTIOZHpEDALSWb4y6HOPO0kp170EyIadzsN/DBieAbq2P43dAjLAcJZn5kFI8BhGNx9cMyeCAjvusF7/+xufz/AABAIIqEARRh3aM90itM646h/5mf7M4b52oGIeR4JoweKRIZhOBwOfQx15QFGUzWZeqwxEAKIggIMMu6VkjslDtl0o70554g84mDhEUEAGTO6z0E+xc9nxpgxm2X7DNTyIZORpefEh5NgIjBjG+Ku7e6bunIY7/eM3aHv7tr4Ruju0D17c/fZqzdf7vZ3AYLzgA4cO6BVQ4+vrz545/GDq02NDBK7QPe7w/PXNy9ubroB6tVmu73Afhg4QuQpewdwhAjIkcHNchCI3mYEx5uy05BH/Hu9etsxB11yretagKeQwjg5h2MgiV7RLiLoHZq1sJEVTxwdL9FYYjg1RdNtb3ZuKpWhVe9ivMFUX69noCmKUh1CgvHS+XSXulbWfTN1DBQh473coJkkHRF5IoejJzBeQ3+3u727u9sf+r4P4030W0Jw6BE9MIoEYAdYHXbDG+x3d4GZCYWZuw6YG/QOERCRI/d9Pwwa6SB9DCQwcHTBeSTCcae045A0m6rT1Wq1XjeaTVSvJteJTH0zdWuJqKqalAnGRsbSdFOFTi5ucghBJCISosZjVN6PloPO+4hR7Sczj6RsqCMzzMOUILIAVN7HGJnHWzfS594TM+oG5BD6vu8Qsa79atP4iuIQauff++63b1+9RJAnT548uLp88PDKI9ze3dzd3d3cvH727Nnt7c0//+f/HMGxeF9t6vpiiMwMF+vN/QGYSU/PjlBpjNR0H5+iRaNiEn6UCe1UonKBiOow9+3QVePB0ZGfw5jUR2OY13VFRCH2AuCc0zCKuvHD0DdVXdc1AY77qCIisl43zmHbhshBRBwCIjbNVu+W2Gwu1nWjPtF+f9+2bT/cAUDTNJEByXvyAgRIVWODFqcrKJnjsBMRh+iU/8ERkfPY9y0Qk5O6cZeXmwcPL7bbdVVVPjMQk3hnc7kWZnbeZ/UzrYrzqHFbLbH1og2xaLuk1s6USQ/mF3Dh3ELNerG6snxpv8oG6JwjM4vPQUWOxxg2q8LK+dL+aiqUrunMDi5DZezaPJoCc90q83vGEHUOBlgiYglqQm+qnKgJZobA+RXYqabFv/08IaH8lab0viHybncIsdpurrfrSsAB1YSDhu8xcIwsEFmiE4fEkdvQCQA4j7XTQ9g8DINAJILK13XtR6VA8OzFZ5988snLly8Ph4OuvTFz27YXFxcJe8mgoTEUdoaipEqUUouYFFOy9xmZ0DjpZVM4L2Im+7QKo0WDRmRuDeAUs1dSOR2xQHOh7SKoFuasWEJbamaNlFJmsWRby77NJDrrOqs5MtUcdSUk9l9EBDzngFlhz361o84GsljT1tel1gy2NLNm4pwaKWlRDhDmPAYFUSyHLAKcFdua/SSFNNuXJQUXwZhhb/YBpRfTLIBp+hj1zAl9lVEAS7l9uyLFDuEpyo5weieAkXkXBtm1DG8Ofby62TPz69u752/29wEGpAieiRDA4RFvPIFKuqyJ03LJnPdswWLpoQCS7ellKeqkiszQ9/3u/kCXY2aOqvKA1PeBfI3IiICot8giTGmiF/GKiACjthRBM+MT0RgZxBpPeLwgochWjcesBCUPn7IK7IRSKpYzzLnYkRVS8z6ZjLqAgQHwxes3X17d1OQDR9qFPh4Y474Nzwd8cXv47OXNy13boad1jeh6jnW9cjLUjq/W60dXl1cNVcLC4UXHL+7uX9/tuwDgPJIH8lTJmmrNmx9CiFE4wjDEjkPSNmYiRpFjBFaMEQS9H0+pVa723ovErhuDrlliCBqSM6QbJiaWYN0h5CkUGYx9leYDmBt4S8wwVkhxHPqn+iTqw/CUaGCROmmYiexiCkyCoPGNKeWm9x6FnHP7/V4v7ktXsQ3TxYD6Rl0CIpIxNB8QsXLO03hb4DAM+317f3/fdh2Rr6pqs9kgYkOXiDhmRuPI7Al9Xa36oSWsw8AhTrGX7IiqCIzohCWEcd8MEUMIKQtOjFFIiAQicIS27xQwmGytvu/3+9GFTmP33q/W9Wq1qnyjLqKmCRUZjwuZrKSOx4sK4jAMTe2SF627W4nKWiGdlhxzQUU+BvU40vrplARN4f6TgEvfHfTs5WREqecfttutJlxhCTEGZm6aqq7r7bYK/QAADx5c/eAH3293u59/8tf3t28+eu9phcDMTVVvHz/94J13fu37P2DmIcgnn3z613/zadchuPXd/eG+7RTaGGFcxUIHoMfhXIVHxwkR9cBk0zQalKGYsXMTRxEBImSGoQsd9yLSdV3KkuhxjHfQC+LX11dN0whEDWaOPERmX5FzpKdXZDoBzhxiHDQ1kQA39aqp/Gq1Wq/XDx486LpO79usqLKKf7W5UB7o+55cRUSBkTXPtiQ7AWMEpdv1lQshMItzA6EIBSX2o8fb9aa5vNpcXG0vL9fbi5We3vLW0M80o90uKIUzU5enZib7lf03PSTaJFVeahNbM2szMZ81FHAyfO3eI8y1jJxwUbJhJss4FV1uKPGgbFaqxAzybI7B3MY9gjTVnAHJPIsyheJ2R8xcaByT5p/X1+khC0ktrZDsz3THYGok7SBlveiblJymRIKdWtJ47+53db1ypDnocBhi2w7kgcglEiFFACYQgRj5sFqt9HDmod0DQF3XVeWalXPOr9fr1boZhuHly5fPnj27u7vbHW50qri8vNRNf+fcxcVFNCE6MHmDRCRwPElvUYSImWG0iO1U7ObPouwsikOqn7BUdqQfplMEdhUgo5TVjLq4uLh/a9h7FsRou7Nzs33WKhY2ABCxyzH6pkQdlwlXy5J6tMsQMOfqRb00hcAdmW3UBicsxVQt64VN9tFMY2Sf65/Z/iEWEQ0l/JbWMNF0rjTyNTX7vNiyJb19ztTyKYTb1tLnPF1HuQg2zNW+VaGWbU45bdOOdzodOiIhctRPZ+2P1fINtF/AIfymhQErV6ETjrCLIdwf9kNs7nYhhK4P+37oAcF5ISeAjDKd6RdORJxW6hJbotk2LUX+vJ4REZiu5RBJXtxIshgEJAKIgMaWa4ggV1XlqAogpFcYzwg9Am76lWwtDEZ6af/H2VmmLLtHH3U2EWQuq+jwYC7Fi+Jsx5vJQsaBpbbEGQxfW6YzzwJIeiAZI9Kbtv/sxWtm3l1fIsYhHCLJ3f7u0+d8e7d7te9aBq4r9FXPEoQ9jCe6vKNNU9cOuN/HobvZ4Ys3+zf7NgBF9Id2OHQ9eV/5GiquIod+iFEiA8AQhDUrC8yzuGWRSgiUNnNour1QJ+4Qwm6/06PyXXeYrp0AxOlMVAxBjhdOHNd8DQJlnsPTzlxnqKbguZS73xxaTp9kf6oGOMXwqUcRUU8GEQkcER0OB902nC619yl1ikZKp/WIwAOixhpRrTtpoFfkDfv9/nA4EPqLq83l5WVVVYzQ7Vs33WfIDBKnm3H6ynmUICSqFWOMgwAI1uPlK8LoqHbrkQnpqME0DkdEhMNms0lKcgpW4uQ8Jx3b9Ye2cyq26cJ6zYTpnNtsLtSdS8t2mgVDRAh92kxOm7SKljSrpiUbIoLI6eLBpPCFSO26o5Aw6w7q/nA7OYQzE+LN3e39/W3XHYjIV44IgNbV0Lv7Hgk+ev+DH/76r73/7tPh8IP/9v/zL198+UXz419Hlgpgvbmom0qjTAHAbRr/vYoj/ps/++v727ZabR8+eBAB+f4WACBOU8ykrtAU55xDaJoGANq+U/OvjyHtc4pI7Ef/MA5hv98rivRk5jAMCRsagisi21WjzrnmT4lMQ+xx3CvSzUa1ASRG0DWL6wdX6/X6+vKqaUbardfrvu9FRIImatGY4cp7v+/uqqpyrmrWXt3+IQyVr2OMTELo1DnxfrTj0d0iM6H4qlmtqs3m4uLiotmsN5vV9fXlw0fX63UNyG2/3+12bbv3pURZAUaTxCLxq5Xnxc+zaf6M6OqfdnMjNYKYzzGnILQ9TvrweP91BsPifHCmQnIsYT4tWVU1BwnLX6Xwxyxy7MCzmQ9N4FOqYEsSWjEzvUUvIsJ8/RWPHuOCG1xqWzSOIhz1cn7hWBrjeX2dvrKLXkqv1BqOS1lRRAYE5yqOApGcDyC1bs13g65iCjkmECRBFIAIGAS6EPvIvXOy3qwuLy81P/WrV6+++PLT29tbveJTB/X06VNdbtE5UhfGNPgEC0OfmZHEXOV5tOaJSGQBmYgzKqeflCJ273EkGTKYldRF8bHtpzp2q9Z+mHrXNa2qOiZytPRCRNsIFFKThmZ3pE+VuaKYUT9r2cq+9mPQFRHzWxCxcPAs753B0hlQsy5gzqtWssquFwduxRkM9jIgj9E1c8N0EVep5SOrFKC+zdCy5+Ny71KbZbF1Snm3kwUWChyNsUh01BslvwGA3R6EIyanrh0AggCEKItXF2qrRx2oXwHACU44xSEqwNmbc+wEHAERgVzNgG2MfRvdoNeCU3Q1Iwk5TkI2n1MQUQpWL8khp2fGkkURWeVIRGyEtjrMHCMiII7J7tW1W602rvLt7uD16gLkMfKag4wqTdR1RUpIZREEjNNJhFFeOOUnFGHmKTP+AEKIAqN5VCgTzCfQEgMnSVCITHp5SrLKBkv0Ks4F1SFMeokEJQAMgF+8fnO3373ePWwqDDIEHl68fvXVPUWB4KrBY0Snt7ygr7qhrzyK6KEmLxCGLvSH4faAt4duP0ggP4jsh6Eb4qZZxRh9ReQceCFi0sSGUkEMME8JRkQ4hQ6ODgAQurQsGA6HThN1DsPQD13btm3bxng8EkYEGjiqFjAYyYXJoqD59KTPzGzvAASjWPjE8RmdVdX9SKOw7H1K5Za61FJKpgR7zsy/OrMfDgdE1FVjNyVEma5CAU3WJSIgse05hIACzMdNMOfRIQELh8ggQ7j11Xq13tZ1Beg4RBGPiF3HRDQM0ldeRIYQsRMiIr+V6dK/BHbKVjDhkOq61mpV1SjYUxqt0XM7RoohhxD0Zr8QQjv0OhBE1LsciWi1umuatS580xRSqxuJ9+1eY2UtAhUD3nvnPPoqYdg5BzAmD1M3L0YWiRJC7SiCxDieR01XIwYe9AqNlFtY84geDodhGERis6qbRpHPMQ64xYpcXdcPH14T4HvvPq2df/7VFw4BObJ61wASQgXUNM2+79d1c315tdlsDoe7GEIUJ2o0IgBAjIMIMgLgmCROKa5bpozjSkclrHvjfQyWpYe2CyFIZL15UvE5DENVVTJd/ein8Nqu62LfhRAePnzoa1dVlfM1BGV+ANHcXYQSZUrJ+fHHHz+8e1hXFdF4LnEYut1u13VdRVVVVU2z0hW0SZFC3wcA8HWVSCbAVe0BQAgQZyfRiOLFur68vHz48PH19fV2c7ndbuv1artdi8ih3b14+cXd/Zu+13Sy7EvDLtOPST5x6e6EVMcKsF1pyFWJWeHOdPT05ricmbSJFNbSYoNawe7plcOxjo0d7+Kkm+qkNQM7xmzanpo6jtduziziOcOb7cIo0HxzwMKWbLJTdIHJRy2/SjaSra86OXWXcJsp6EQgu5PGJsw4MwTThzQd4LZbkdl2GRguqrYrZh6GiONFOiLAw9BVtaZaHhwyOQaMIANzqCppuzs9ZPzuu+9dX18fDodXr1794R/+leYa1mmgqvSUoLx48WK1Wq1WK5Xnruvqur64uEjZX8HMW8xcOQfFWaaJB3LOtC/RWMO2QvZyJNjceYbThedHyCye7VSK086tPdsJc7bPSIDTckPGV2V3luJWFmzjNsQF4CjaVnzSXl0al8h4EbPpNGH7yM8y37csOdCWTKElEoy/wlFAbC+lpEywOnUCJu+F6JiJAEb7W0afQgDQZKSwwpWJs2UPi+oMvRbnpR4rn0uclPrwLcsiJBb/Fs+2wsRXxwVpaywiEAgB5Pnp9fQU4jG7FU7LMTDXVwaenCG/0QBTsZDD14skIwMjCoAgRb11TaY1ON1TAmEQQNFUM0cOpNEbRETBo++aqG77/VrNAIVs6itNQWqwMTreMcjQ6xEyrptmtVrtdocji06rECKCo4wce5kUnUxd6A68iDBECIFjGDdsj66p+vbg1ILRlSg4rgGkddCvcdWWabB08rYU4QxRZc1My4kII2haX0TSnJlRMAgNKDeH9u4AbQREiRLEwZu7/gA1eofOC1IUiCyI6AiBAYj6Lt7d79/cHwYn7a4bDt2zW77vYicYhAJQFIwMfd8TiMMKp00eQgIhRpJAanwnQjvnpnNSbtT2gpzSRYao96N1Xdf3vV4H573XrUONJm3byMwptz7MjkUsrJGJCINEYWap0GWT2ljB7C6ObGdurkrTffZJZstZApVvsucRTnMZqZrU+q3a9G66qn61WmmGT/TsvXfkhZmjjLcLBIHpVIWItPv7/eF+3NWkg69QoCH0AhGJUe+0AA8AkcF5BEAZc/r7MIx4VgiPiehhPPavW21UedC8qCI4zdqIx2TjmgOPJTBDiu6pqmrwA477exwCqlfWdV1VqeNXq7ep6Vidc044ZWRN7SgkGYbHLWKQ0A8hhLbvku83DAMRhRC6MKgRla4qidKHEIYh6htlQhGp65oIvPchBERhDiJVCGFb174mT7hdrytHlb/46MP3nz378s2bN1eXF5tmRYigudljHHphka7t49BvV+v+Eg6DRAYhX9dI4kPoQ9DNofGAZU01zfL/hzF6Ngbd92uHXkeqscfDfowO1T1J50hhhjhGBvF0MYluKnT7HQC0bbvaNFdXV+tNY2cN78k7H3pFWh85eKqaakVE/dDGGJum1nzOjW+YOUbu+179D71XcLvd6rHe0HfgfV2hdz7GAYkBmBCd181hV1XOOffuu99dr9dXVw8uLi4c6TZm13b9Z5//tQoZObi83L733nfee++9q+vLmUNobaNy+hwHNje8bM30Z7L8UrWsDiKmdKBJtkeGo1xrL5pK6df0E5nbNlX20k/GEj0CaaGyY8xeJi2WjldlXaevRATx6Dyk5X9rxmUYK0dhZ6D0e1bHDpOmkkZkFTHoxDxBmNSrfgKQUweWvILyvf0kUTC1nwXFWdyWY09bYWkIFgPMfH9/W9crV63Wq7qqiCN7T3UNRBJkENAAnAASInch9CL+gw/ff/fdd4no5ubmr376Z5999tmLF68ePHgQYj+EAZAFHDPrzQHr9VpE2rZVNaGqUG//TMi0u4VEarDOaAfFyVjLHgVBc6fXvrTsZKmj3FVi1XKXbSS5OoncE56hbME2laTJ8pIldGJCC20iXAa5hQfmXGQ51moee+QJVftPn+C0YZ5BJWYx4mtXuBI8JcwwrRBnXZxZ0HHk0sI2GHlMCyU8DxZIF8GmXjKBylUcjFGsCa0yvTyid/pVRNxbO4SZSpkBaQITzpTEITKFM9n9Rp42hRJdLP9kUfdny3gzjS5vW6pZSOwAp8st81XF49rYWxfEfIfwfKkcjqtqrPCMG/KsYdKgd1YxouZwQo6jIhIQ4SO0aJKFICIBxNO+0OL7RF+azvIBgF4Di0gpa0hiN00ocmj7ZtWtVhvdQlHrJ0xnXEUEgUUMO8238gAAkEWEIxOBCAOCEZApgx8jQ+LZ0dtdHFqm3KyyOkWCxFRWphYRlbGHFcnFlmfvkWEKdBeEgUHQVQj3zPv9fohhfbEayGFTM3PkGFEAHXpi5jiEhlwUbLv41eu7L17dXlZ+2HdDF5+/ubvrhp4hAkYAQDfEEO7DqqmayhGgqCZHgooiYIqJs+KclO24LROFYZxrukOrp6S894DSdYyIdV1X1UY/bNs9TzkkwcSwwFG4ZqwFAFEYpk1gT5V15Cw5MuWcZiXrIibIZ/e8Ge1hLUHLCfZfWyHGAREjjwGNkyFHSKQHvXSk+0Ol6UarxjdNU1crRySMIupJizA3TVM733WHu/u7rusqT03TvL6/2Ww2b17f6XYcT/fp1XXNzLoPhpMJWlVVCIcpqc94zYarK+echq1OAw8hCACEMIg4771vPFXEzDje5AQDR0B2vvLTPc8AIIh1XSta1JZmAWaUabEbcY9jntKNWsjvPHqouEqxuylOKh0otQsBKKzZpw5dqza8BljpsbouDGlvcPwKx2OuPAVFKJWZ2XtPDmOMMQ4auMTMu93w4PLq6dOnH330URz6/e2bDz98/0//5F8PfddU16t1PXT9MAQS8L5yzrVdd3Gx2W63urHmq40QHfp+s90SBCIC6iSOvOeoWtWNjkUH2IVBd1+7oR8XSsLotequ5rAfTyQqinSXtXZBRDRNUbL2OUYNP767uxuGoWp8COERPvC107viCIGIADnG2HVjatNh6EWwqhoRIYxN08QYeQg0bjzg8SpAXTocusbRulnHOITQe+987WJkXxE49BWtVvVms7q8vNxsV03TrJpeGXIYurv2TdcNIoLOXV5eXFxuHj169OjRg6vra6iUbQavnr0enTpuQYooT7A5IZM0QvJAjvmIibruePg1KW5mTnKi9dNPx0OZE/9pp+VKkvITmdOAVuNP3JbfdmCtNKt9rHoCs1mvBkcChpmdc03TJLNGt4bVYYhxyFSw9sgx6Dyepk+toPy0CHBCGsxX4NLZPOuCqiUhs5kexVw+m3g90YuQYLqPy+4GhBCIfIZkAHCOElnF+PZ2Q88adml5D+b3TKbPjxOoUdMKQGISTXemv2Y+AwCIRCJg7rt+R1gjVUOIIYSmqQC73f5N5fHJ06uHD59Wni4vL9/c3P35n//ZV199dXNzo+p4taq77kAETVOpiZZAZXP+IXUqUxbchPY08K7rHFVJBNKMJSIp8MmOOtktE/mO93PAPGBSNYvzo4jJ3KahKSQVpwkgE8xs9rUbKQkAmDxwW9/CbF1frZCxlohkH84pNeZHmXECABHZi4btqKEwwtRHtkA6p92N9r2Nf07PSXZSajV1URzOXIhkZMi0Ops+H+dFOua7sybLIp4TQqx6xMlBsvrn+Dkdb1sRcyGqVU3TWZHj1lkauBgv2hJFjLteEt1KqLW6mBlxlhIg6ZPVaq1wHrMFzCPTsphnMOrOLu5YTZhGioiaSMDqExlDCrMA+BEVqooTcUWAjymsZ16uiKgTMl0RaPot3I4MUfbN+H5O9GwsCyX2hChAgsCCIhGcn8w4RGBE0gPPIAMwiszkFHVvEEbFYYFx0+RrIUlCnTjZMow2NHGR6jc30VGpIHoIjQi8rw6Htm37EJiIvv3t767X29evbiKI9P10xRkwAwE6cjGGru8225VAZAZ1WkWOASAxDsyxbhqFSsMuJOLF9mq/awGgqhpNveB9fbyXGNSlzBGeob3kfy3lwg0UUpAkJanZnOhLJB5rOtJQNBIYuQlJBCIK1fUgctcHcRVU/hBAqAIOgrozzAIsERGAPMbA0Vf1xebNfvizv/n8W++8g4w3r3f3h+G+D+B8EIxIQ4z397vNanU4BJQhht5776gi78ChY9dsNnr8YbwUzkxMMOnDGFhwuiogRJoOaOi9BRoHqLEzGvKnIw2xjzweRB/10pRBJCEkyvGIAZlUgtlkJMbkI7NQrvO+uhwZIez8OMP/UhboVKw9o//6eRJEmXZKVTWpia/d6QUDsB+Vm6Oq8t5RVU3zSN/L+uKSOcTQC/e3N4f1ZgWyev7VXXfAzWajKcq3260e4M+MIkTsXRTqun7cYKiqyldUiydy63XT933XHWKM5B15x8zoSAKn25vnUTac1HWidYyRnAOAaroAHBG9ZxEh9OMW8ejt32vMZNdtYLp1XffEdrudOjZt2+52O/WXNGx1GAbg2E9OlEz5V0cKUtqZdmn+VcYb/VVjPCBi13VDwKqqnEOtUFWVd5vb+91PP/mb//K/+q9+9IPvv//eOwCw3+/37W5zcdEedhKZOdb1KvZD13XVqt7t901TKYRXj95pW+kjiIBzlV9XjaxijMKsO2x9P6hQKMyHvru/v7+/vxeEtm37vmcAzSirlgwIVVVV115dQe99RXpP43A0KtSXUVoLd10nIkCrw+Fwd+fW21VV+dW6Bo5t2+53d72GnjpwjroWEV3fB0RXVX7oox7YTpqKOUpEInKevK/6/tA0DQAz4rpZMQxE8eKiIceXl5vHTx5eP7yom4o5DEMXghKw7/ueyDeb9cNHD9995/2HDx8+fuedcS1vPA6sZh76zIBY1ImZuWOE8/gmpWkCo3nB6OtMUK35YrWGHmaY2wRIZsU9UzRpHct+sqgsLBjWcrIWTFafzZUYidFFBGC2HXRsc34nhK2TKaYMzwY/eaxLhiLnZvsn6dfkz+R4plkjSblnXvdsFMbySHRcWCmcNI59j2Yt4BT+LfJhmm+sE5JAJaJhaIkoxoDYV86Tc4LMESIPu0Os6+p73/3oe9//+MH1xbNnn3/yySf/4l/8C5V2ZZiUVazreovP4/OJJWe2YWx2M4ePRwrTeMV4BTC5c3Z0tgWLZyoCO2XyfDKoxGxvnOJwS4J8mMd/0zU7s8bSzryMi3mJDxeyEGc9JmYrhSiBNFcIy5DDkbHzvRxE1LMDmCV9TG3x5Dkr9yIRIE4+5FzD2JHmQVBWQVlAeX4GptQ26U1qE0054gcAx6uiMBErExaLTzYL5EkxWoJaJKNxwKys4dyHTItczKz6JNMeOF02AAXFF19mGLNrOllNMa4pFDwzx7mGMIyjyBb4RHSjgmlayLd9TSfeFoqlF5xl7HJ0ZyqkostvMu2wChCByLjLjQCAEgkYgeGYPgYAYArEPPZ4ChL7UAr7WxYaM7uwRAQ6Zlrf7XZXVw+8r588udxutw8f393c3Ox2u5ubGxFJCQOVJzUnMwBMGwsuRkknBp2rnHPC463fjjxir9CbWR51Z/I43jmDpaWTUwjJppJFziyrpcpZsyU/JOHVhwg8BsYCIAgDqGojxIiAglFPmI+5WIGgRwEEIRAGSlclOecEIYALwm/6WN23GOPNbtgNfRAgJCDPzEMf+75vqirK0BMjMA+BPHsBdJXzXsIw7XrhdOHTaJonkww9CU5L8DIulrFERNRd9ykKg4830eG4gi+MiCiMQDhdaKwIAUEkoIky03IqH/nTilipDxcRbmeBksowIn7x/YJXLyLxRLS/c45FQFcyiMg5daXabq/aZpDYIaIucAAQYFU5lKFvO+HBO6grQRk8brerTeiGu/4WdOmzDXpPAGiMNBAiRlWMkRkO3nuYrv1QgarrWhBTOKVu58K4Vj5LqjxhZpyM5kvPIhBDOG4GKMr1w2HolGq6M6ZzkPf+sy8/S3uDOHnp6lypT5jcdbWp1nUTpzyuxwkFwXtfeZ+uwWDmwJGZw6CnQxeiuBGRxtvSI6IoPIL04sWr29vbVd18/unPKk9//Mf/6tB1P/3krz/66IMnjx/jag1tG4eBIdZ18+r2ze7Qo1tdXV397LMXz756UW8uHz9+vB86EWVS8N7FGOMQYmQeRl9XB7jvWr1P8tDpITqIIs65zWYz5nfxq9F11+1TgiRryooyLZvqKVwOo/bT06pV7cgjgHT9AYVjHIa+Z2ZyQOT0EgwR0TtFAcChCAqA6P5TVWm+nYS0uKqp8iICUrnNtr64eHh1vXnw8LJpHDkBF0Po9/s3XXcIoRcRqh5sLq/eu7p69OjR9dXD7Xa73W6hWUHoQRCANfIEcCTKMa4yE2PLf/Zf+96aGlaSF5XsqXkr2RAybi/Mkk2lRfosO2X2YdYjGg87G1eqYDV+trCUzPRk29l1WZ6HQ2TDWVzBtTXt/GTrTBWO62cJPClOGVkkZA1m+MmGbEFaNCay6RPnJjIYlZ0Ua9l+ueB3Ch47Olsh/dk06+kS1T2LeE/kHEu4erBerertZlVV/tOff/JHf/ji009/9uz57WZda7NVVWlsZ4xHLZm6zp4tDy8aChZgEEgmUTa07MOMWBmq7eyY0CXCiz5qZmQnpFmYy35L2pE5NSfFESNLvgmkXKIXR3EKt4tgzHCLggggx0amcUm2wwNzgyMrlt+yxYVs1kxdpJHOnFXI5HH2oR2jpbX9RP/VM2/J8YOjyMyATPUzfXssZ0W7xDZNx3FTs1bQ0uZnGnvac8vIpGthmdtmG7eg2lVwwzlHAbEMNmnmUW+kb/VXs8Mwwz9NwSMWWiKKkdnsS+AUswAnChpFaum1uLME1mE73WbWPiKiAKMgAIIAMoFahOj0F9AZBPQPyIZ6tizqpfMVTsE5/p9IxlMJGGPsA9/tdqvb28vLy6qunzx5cnV11XXd7e2tnrHZ7/f7/b7vOxHx3kceQgiRh9hFREo2aIxRr8MOIRwO3X7fMvP+sI9D1fdDjFHTJChnzeYLnIWhpp8yni/nl1JMsvelqpR5dmIxs6HMZ8ZjI+oOAeB47fWkrVV+kdIh5PF1ag3AqfISYmAkinqZI7q7nsPtTkJsD33gGEEceQAUkX3X3t3dEUDtxAkTR3aOojBg1ThyFUxbgulAV9+H0UCvKj0nRuQYprMDHnH07AjH1JEgoj5Dr0nwIw8qpyEEIKXIwjQBkhMFETP9Wf4LhbZZ1GYytzdM1zPiLn5rf0p8MlfCxyOLqqhTVMt0hzvoMhMg6lKjr11d++128/DB5dC1EvvDvgmhf/7qvq7rruuYeb1ei8T7u1tErJpaFZE6EgnINux9TIFmEYRcNd0KONl+RKR3JAAAsCTwpkZm+mocOPLow8+v0QMAPbur+13OA4uEoec4TothOB7g1JJQlBbWLaV2YzI/iBNHJWeJiFzlATEKD2EKN8VpvtAw/vFYlciozCVGZgl6JaCrq77jQ9sPw7DebK8fPv7qi8+evXjx7Msv/uhf/9HhsNtuVk+ePHn69OmTJ08264tV0zxZ++rucDhIH/s+DNuNr5r6fn8g7wPrNiaDbvrt267r+kOr2WJ1dIe+0w32yHG9XqtnXlXVxcWF5pVoqE4+iBupk9hpRJx1CIExxiHGqDtvevKQObbd3iEwB+FIApoUCjhW9UpEYhwQHUoEEABBks2qBmDhXk8da85YzYRzdXV1dXWx3W5W2xVSFAjeQ9fv+9C1/aEfDohycbl6+PDdq6vLJ09/qHRRxtbgWAyD5shFRBjvlB25ZXZnQGIFnLany+nfTaGMyc6ApQNUts3EuFYRJFbODBTEvMckt2hit2w7b1/OqI8EjB2XzPcQypK+Tc+ZQ5j9u9ipHW/SvdmOXGok2ymFObZt5WSc4Rz5dmjWPdDPk0JICMmCN7LhlJsbSdVmiukUEk79pN8OQ3Su8pWvqnp7eXF9fU1Ebd9WVXV/e/fZp399e3vbHnbM0FT48KphqSeVLzpxmybNECSZsEfnR8xGq8WhhYqIOILlf4OQmTU/9QUJ7Yv0Sm9GToNjkowCP7konbF15oAtAGy7TvyWEVfMwcUSZij4s2whA6NsZyxTNo35KMSeJzyBk6KlYphZtcwymBPr6BCWYywbARNAlQFfMn8GDE+nZ0sgLfYy5za1bBcI7EPaUYGCLmwCPnEKQB2GPgGctZnBT1Oos2XjtJJt0Z56x2JhLgFjQ8vsAK1+g4KlSyp474Nw6mUR51nXJZLPf1J+LnNvYV4DRUDvkwBAAEZBAT2xzRknCuIpX/CMqjwPtmXaM+VIaxEAEhjP/LRt9/r1Tdf16/VaL0eu6woRHjy4Hqay3+93ux0ze++RomaMCAOHEPs+pMMgzOpTDHd3d19+iXXt+77nWN3f38coiByDRI7Mx6RiqNauuQtUrS8p3DOeR1OfH6z9MFW2XFpi+5TsI2qaFgEWBBTWFf0xLwhABATN04OIJMoPQoBHmwuYkABAM4IgYc8S2k5EIjkHAcHztB6y39+/evVKQn+5XQNHgrqqKhQEckJ1RX5VVWhCnNq27fugu3wJM+iI3HRdBEeazvuIyQHbtu0wdF3XhRDiZOKKCOJxRcni4XiAci7FMR5vGlykV0YsG08B1mhZWliEwiGEE+o9NXWK7rb3FESKiHXjQfchnO6ver3RfH93ixCdx2999N7ldnWxWV9cbtZ1/W/+6tMY483NzatXr/RgWNtKN/QAgx7gBIBoEg16alCCxCisqXsjBBUTjjEy0BSJ5hS/bty/dJNlcjzkr0NwTq+QGK21dB9I0s8ymtDjcaEUOIpRMBxvbsw2/VKDmthznDVoWrtFrJwbt87qyjm97I6HoRu3PTlapwARNVXYKLOMzGKyA5BzzvvauSqEMAR58eLV//n/+n/brKqh3w/d7t2nT7/36792eXUxdP3ffPbpT3/2N1VVXV1dPXr06N133726fvz+R0+/++Lu8+d3A/swxLvDDh2FEDRTi254dofRIdSDnSmvjObXWW3W6/V6tVrpCks6HVZBDQAOjqtUukxaVZVEjpxOukaRWkQIHOtLGCeyw+HQtjCEjkAQBUEqh0heJApzjAMAOwTnBGlsHElC6BGYnHjv1+vV5eXl5eXlarW6vlprmDdLGIbu9v7+9u41cxAIl5fbR4+fXD+8urhYX1xcbC/WtFoBb2WUZUBE8lVVe4AqDnsAEInj0fJJOry1EjJTNZvaS9mzJZ17yT7MBDX1kurYmunwWzpBJ/P9CjbrHzKt7C4adrZTMeZ4FhyVoM10R2qWTFZM05rTdQWYzxmElL0U41mlnywaSxPnlFmT2kkVUvs4d2CsTuTJIcmaWpxHUZc9ing5KHYwxvHOIx7Tr1KEgJZjzOpbNNoBtvcHdOw1IZbw/f2b/X7/5u7m7u4uDAMirpvVgwePkKcsajzyzAT1WEZDs8gHgyZU0kKY7UVMWlWTyuR3P2Z4AEPWjCdTU6l9MhlWR9yaFuafzygLhXxZ7NH8JAkYPs+mzCRisCTg9v2i+GesaD2EES2FNpiRO3HaFCZq8SlyPAk2IgRyGddSksDiuVRHi4MSES7uMDiFcO00TpvPSWr0T12QWxL2oxDJCS8dLDXNHl0aRSYmtjXrm6Vm7bpvNhxbbO8yLf9PduGRNDkFi9WiEsmLCLRApmp2p84yarkQpsU5xzE/c7vIqFqyhbYMpAVtBgtXU54pcQqoAwD9SERgnHcnvQe6j4TJ9dFe36aPhPlSkC38i+rdlqRzaNzRcnoO4v7+XkOqdIupqsb87HpbV9p3IiKPrmka8GpEUgxyOLSvX7+5ubk5HA6r1SqEEOMAIG3bPnvWOodEFAZNFj3eFRw52kCAiQ8SrzLRTI7SoOwOyZlRnyJu6iN7cxb3AKBhVhNLy/iXYDJ+NSI0IiIyAI5nREWEgBnHGGgCAY4CiASCFJEHXXX1BAFBU/MjAUAcht3dXe2QeJChRtmKiKswYqigY4D1Zo1TkgIlzW534HY8ma9WCqHTi+mdczEMirEp+Ue6J6AbTws7iAzGPVhAERfLGIgI09ZJQrslR9J1X0uCQmFC+eeCnM7PcqeHcm1aH8qzzdq1XkrhqHJOr0gRIXIE1w+u+vZw2N3e32+H7na/q717UtPlj374cVVVjx8/rqrq5cvn9/f3r2/ffPbZZ69fv+66w749pJwrGhcdOomRQ4yCKnQwDJGZV6v1MPAwdMHpHQzH0614zAI4y6+hQyYHyV0UEXecH4+p0SdCjCYBTxk9dBNPpiuprG5kcyIm4Q0RnR/PsZN3vqpoSoSjHmAKF0882bV6r2PS9rpZyogIonEoUFVVU6/V5n/5+u5waKu6ef7ihuOhclBX+FvvvQuEEeHxB+84pBjjmzdvXty8+uLFs//2j/9w1WyfvvOt+z3c73Zv7gfn1+iqF69eDzyEEDj0wzDEIWj8s0Nh5qqqNptNXddCWNf1er2uVw1qQhPveTr0WFXVhlYJD0jjVhkREKj6oglRk1qOKnrDEPthGELoh9iLcOTBExKBI/TkNcMqOlg3PsYYeYgxiEQBvYWSH19vm6a6uNhcXV9cXl5uNpuqckSkOYdf3Dy/vb0VkaryDx4+3l5tHz1+sN1ur68v19stEIIG6fZCTY3E3jEgCgeOsW9bgdZ7D6qhEv8jg+4QQlHQWIqJI5OAnZHnUlytEGbWAE2n1LSL6YFofuxY1x5sJolM+LOuF1XPkaGnxAxoDBG7wJypFevYZLKR/YmIUzKD2ajfEjn2eT7DzWqOjn5hgWUrcMc2C4fQjq7sWk93nAKvREL2BuZRamU7Vn2n51NzgIh4twoD3N3udrsdOmEOh3a33/OjR6u6ciIYQ3xzc4cMTdOsVluabtEVHnczOApHnl91dRxjlvUxw08JIRaow+O9l7O0NNN4Z7suqSky5wz1eXKfZjAkPMi0WZdawCVHNEN4ErE0kGjShyRoLSmzIWNxj+Ip+bJUE5PdIY03dacvRxhmR+PGExfz1k6OLjUFZsHCbh1knGnhse1kP8nX2TG2OFfZDxPA6hBOnph2pOM9CqzFQ6KphZmIYh5AsbznBoVgJgYu69vRafqBBEn6NUxnIbIIjsS3Vk/SlKbCkkaO6zKzclSWUwvJFpF5CHqqbP+coE0bjOOKj2VyLBzsRSyVzWZ17BssnK7FwtOUiiR4xIQgoN6gwKC3FbjRepZovh19Qjrtgs51y0IMdmKkM0DOh0yTNeMAx4wjbdsmnZMiplDz7E0n0ypf13XtVxPIQn0/DEOXGInHk4QEwF3HiFDXMQzjaXwQChJ0BFZdZKfzM2ynIWf8bLVTenkKgfah3JEuu5uhV5N2s6CwZoVVcKaVvCgACLrR6UCEpQJUtc4gGmszLjSgsCAACjPGcZomJxIkSgSklBk/kkDbthIHYIkx+jpixT5yHePKZPKYzg169O7+/p6Zx61ChhpE3QbfNDjbXx0DChT/3vvITn1FdRVEcodKEEoO5IlpSz2j2Mvst5Iii8x8ihDnhdGSuNTeSedAATBMEVIgo3szOmAouF4BsvPUrKoKI8rQd7svd29evD4wh3fffffq6qrtu+vr6+99++P333mq+Ta7/qDLK4e2DbEHgM9+dq8uYgzMzEPkw6Fr+4BIwzAMA8CYcmxSnip5rHCOy6xmdf44Ch11hRbzx7kVjmp2ZtAGa686B3PFa2V/1LMw6mplmIFjuvXkiF813eeRZfZK4fQax+Q6dVVVHOFwOAjg3W5fe+cwbrf19YPL737vw//8f/E//+iDd589//Lu/v7u7u7y8vLi+urq4YMIIkO437Vfvnj++ec3z57vDh304ebQhoF1oZYhcog9CnhXu9pdXWx01tMsyuDIe980DTrSVEBpyN779XrthiO6EBFJ5QhAZZ9h+iQxEIYQui7GXvq+H0IXJQBI5CDISOAQUKrxlheHLL1AIMe+orrx63Wz2TZ1TR9/60PnyFea4FSY+27oQwi7nquqWl2uLh9dX11dXV1dXV5cba8uwVcACCGEQb3Z8Vzo0O4E9X7RCqn2nkGiRhcLkD6MBAIEPUNoBSPpfTTGhJU0u0OVrSiAUc3WaDg1RVkJp+OJmqORoe24KaF5ZmSUutu+z1RGAibZ0Fk1NAUKo7Bo6uT663lzwVbLwCh/WhxXgjRrJFNtM9XJOR5OQYWoK5j5CeAMS3ACyWUFO5DEDFJ4g5ZhwFifAOBw3fUdS/QefINEsN2uLy9Rk6oBOO/rVdNUVMWAoQ+Hwd4fqKl7FfLZOrRBQr7tdr6IiA7FyoJtIRvyZOPNMJ+RfhEAi2GLzFJk7E8WgamFbHcX5jF+2VRtCwAoq5/h6oyaZypb+h7RZfolIkC2PqHOeWmRAhFhia8SwGlcbrqvqUSslS+L5GnMQvPTfVCQtRxgItOZahOKjktsczznUqPV7JJcqh9jdNPkncmRK2hdjsViwzmnW+v6J5kNSZxWK5IJoifdS5paeqU3Mr+uQ3JTPneA4cQah0wGpeHtkPhXAw7Tfrgl7mLJZDxRLX2VSRnMRexrC5MHFtALJgCBZLw6HARFNRIJjvlFEJAgLjRSHp89Df/i+69tLB0JkWnNUWlejUkjhkkDj44ET+cL7qcLtRFIRFYXVQiBGRAcgK7BHZPAJYqrjkl3psE0xaSNmlL16WM5/HKkic9LuYZC9rPWTi2vnMLzeC+iCEyhHAQowBGYAHhMK+cQEYUJUMALCyCjICBN2fxAOGj+K4kiqJmwIB5Two5Rprqe2Kwqxwwc2/YQOJLvseqr1VA1g4tBr5XTDUDNh+HqCgDSXXB93/N0nKGuvA1OSeZvyoos81TM5bZ1JikwLWRYpQEFZ5bv069ydtEK5tPKKT4vexn1ockWbiufcgi99zClBZJpfZ+Bb25eXV6svCdygiBIfHPz+qd/9RfPngUhvL7+4urqiog2m9X19bWr/BjbWRERxSgcHEpNRN//7hPdgIoCMXDXdbtD27b9Tz/5GY03vxMCxBBZbyY8TtkpwfJxptP1k1n0mblHFwDsTuOk5SLiMYP9YG6wzKQmPdgdGiQENwZIx3jcXcRphQ8m80NLU60AkqxYEwWJgNBpNrthCG2rNzFIU6985Th2bTcc2vY3fvO3/if/6X8KFcnhToR/+tOfvnr14vXr17e73TAMMvTbi0dV1XehYxAi5yrXiMMYNPbWAQKAQ9L8guumSvmWAEBocnrdOImkCW9MchNHg3/CsHI86jzujpctjWHWEsdzg4fD4XDYh9gLMiJE7jwhOWSQyjkiWq3qVdOE4Wa1bjab1eXV9upqe3G52mzruiJEGULb9+2h63VnmAiR8P2PPry6enB5fbW9uAZoJvsWQzcgVYQbv6o8+qQ5KwCAXphBGFCEI0PUXGcIIMCo+c2mfRGfyRKZa/dgHmukSyaJ6lbGzuxUZGrCvswasYZLxuU0Xpkq6YRM4tc43WcIJ3RNerbCn/i7ZP0MpATV3NRYMFn0cb4TNStY2MqZkrK90DwIKkGYvkhSp39mhtqxcTTPZlDMDObMW/oEcXYhbKJdtlecPmQTcpyp1znGxq6To5K9T42jsXGZmcghusq5ZuWpin1/GEJHhH0/NE3tfcVBhnYYICJUwFBVDczplU0Vi/jPVOEidY4v00TIsxU77TYbuH5nGc/iweIwW3zJMJmBbUFd/DWa2wIsOewnCedElJKLYO7VHLP8zd/nz1mFIz+ccBGnX4+wpUCABCQWRl56zn5N4mCHbJYnT5r1lmlP/Wp50v4qItN1c/kkanfMbP3IMTmrYpzzLOTefmLlIsXkZIPKpNgmWUnyWw7ZCppMV2VAIezKISmsyNpSFi1WD6RmVW/DnP1wyuWQ/iSaLcTasdgHRBSJSeeQuUz1FAVLgma4XSTTsam3WCSaFxIUPSemKRSEEOLRdBYEYAJEGdOofj2cJWynylyTn4Ncb4SHkS7HIfd9z8yI6cpsDiH0/eC9IyKHGBEBwHvPUXT3g1kAUA+pqUMI000YRMQcEURvEwlB6mpcWWC9HMwdb0AZIU//wLKTYPn5jG63cr2oS0udn8lviVIAEALShSlxiOwABQX1xjdM2l5Ac7YAMZCAoIAgIEREB8KAgZkJEAg1A63TPCJREMU5L0BKQ4lMKHVdN0RD34YwdF0HQ4SBK4aapYpBs1+orFVV5X1FlUfEw+GgyTM4jreuOeeExwvGYowis/RLWifdQGjRaNUgGXtg1Nt29eoEf2brXyXVrH5Y/FUfNOkgLK0lJbWTZpPy81SsA2wbYWYEPOb/FUASEEHEpmnIwdB24mKzaTgOL18+v32zcc7dvf6K6LlvfIyRCC6vr/b7fdPUm82mXtc4Tfcap+ecq3xTVZUghSG2/dD3QUGqPQgSMzMyITpXBR7SoCAX8Nk0MW3gxTnqGDSD0OT5iykAUDV1hkAYV3COkXR6bEF1btd2iROOGC4cbJp2zXC8etTOhqRG5lQn5TXt2rZFqNebJgyt966q3RBj13Wf/uxnH338fh9Cc3n1gx/9Gvgfx7Z99erVy5evXz376u52f3P7WkQePHhwf2DqZXu5GYaBxqOAWJHzNO6Yhb4dU/iozpmcCBDWDExRhIg0cKZt24vqUoc7jZQBgGCyY6eJmzmEEDjE3e4QQui6Q9u2bdsOoWOIiMDS197VTeUrX9f+8vLyyZNHlxcXjx993DTNetOsVhU5DrHt+v39fXdo7wUGIlivVw8ePnj8+PGDB1e0WoO7BPAAIcbeORQGEUeu9s1WhIQRxKM4QCcMIkJ+SlKIoGfaxlRFIADW4pwcQqv4cFqgTdeG6MWLiRGJCDnqGobKHACQc6grjmO6MABAonGOH4YBkUDDJiSJK4ikKEcEwBDUtva6EGK1RjJKkhbIVoITl+OUCycdEYb5PTCgyvpoAglOmXamV1b3gWg2gKM5ot0dlVGmLgFcUkZq3apcqfWTPonThTxqLSXvS2VWMSOSklUkmQeR0SSybvBkS0k6EzwhTUSEo+hiRhJRxY9zbrrrGFFjmXhMikUmSCDh1lpsMF8pMMM/4iqRBubbDimLFxjdbfXdJGA8ultV6ymGEA4duOAQPYHnwE21JSBgQhTxugM+oB+TvqmjYfET45D0Zrq6g5lBjnOP8jNk2xQilOYeEWbQ7f7kxeFkIaWxT6zOIqBnYBIFmUGXXIZhnJidI2aYekaEaBNpJuTzPBIyva+qalEcEpns9ICIemFuMu516UckMlsHcmoNmbCykNhnXTcYY2qOsB1Fz6zpEEeAKcUYokdEYYgMriJCAJossCmLgZj1CBFGhPECjBR5gkdQNPAXHRIJIvIUCHxUaJykdcyv0LYtETlygDjetE1YVSQyzNY4iFKF5P8n/iciwKN6lKOXi8wMmPTGUf84PDp+1kiyNY/MqZYiQJhy/XvvnR5yGIYxVziMp9Zw2Z+fXZWZXtoumIVo3AUaBr0X7nhDo9bUuH1cOssn031imcbQAWZLeKmOZmbnY5L6ZOKPmLEqBcDuaTvnSHk4BK7qioXHq7f1P0nikFuBdq3OYikrll5ZmUkZzKg28gwf1GJGQUHUIEF0JMzMAhD1wmERARmICHVvU7+FMSI99WXxllTiKbDBLJQcX7GDERGqYsZfh3BARCIkb+4pEQAY8+9HljgaP7XzIABDgICoi8hDLyICVOn99kQUp5VJvfMDyYtqQiAQDEGFEUJUjDnyyjmDnT1k3ErQfzwABDYh7qpVFC2ECnEiASISkgKdYcnSOpunLJ+XmLfEHXErUQCExnsWwgStYMoLOaI7AohEhztFg8I/Gh8RASsBRFVnAhBBgpAI1ethGJxDESYE51w3tG3f11dXrPvP2gOH0O0odjte97E/9Ieqatbr7WazWTWeiCpf45q8qw6HQ9sdYowxDGEgD3UIUaaQyBD7MAyRhxCHyJ3zggMjcpTh0O18rTe4EAtDkqcjLsftETeCxCzi8BihkOgCRWhuwn+mNNKHyYSza/QiMgzHrMj236Qf0ptpEqcMGKtuywXAOl4ADex6cTGCCDsMNUZa4WoF261fuxg36wa5gbjuuuu9RGJ0etc8kmtqINr1suvDbdvHV69lNO2m3t1qvE4gReFFEZH9/hBjZNE9ZnRePCJRAI5N04QQ+n68h2BkzgktUVcWEDny0PXe+2mSTVo0X/ibsKFaSOwRRDjGTCp+RIRjCMf5iEfvDsftT+KR8iNR1WA6Cg4GEWE9aTtaoUBE5AkRGSIKjvc1BGahlUePIEQx9FcPH0U+/Ot//W/+8qc//eh7324uHMQBgCAGV1dPP3j/6QcfcP+dF89vbnZ9hJe7HQrhO08ehQgAOvWw8+p/BgDw5IiulA5EEUWQhPy43Kl3ziOoByGEbl03gVvvPE4zNSK4KegMRAD0DkLuuu6w27dtu+tv7u/vEbHruq4/aGZRJIihXV1vnQuPHz/8zb/3vXffe1xVfr1u1h5FZAjdy1evbnf3fRjI176uHjz88Orho6fvvHN9fV3VK9HFRECE65FVSaJoCgxmpR2yUBLRgE4AIMgj0HtvRKbbShgAIg+IoKuWAgIQdd70SUJSQpfMnkj6cdQBzrFxwFI1ZpbpLpr0MqlR61Vqyd4kSU5TWmYQ2DVva0g5c9l6aiQ5P6UCSk5p9qsr7u9KRklqf6Y5loJjwcwfqTWLq9R41pdt0EJblgwtdnQWh4koyZFOjrFFeNn4KepnK0AWmKwpNNY8zLW2hTObAGwj6UM0Oy1ZL9l+2pmSIT91utg7mlLiQf+AOe0yhs+4SzL7bCrZRhYdNU6e4zvrxfIkmB1yy2ZWjjKOErPjlN5rarhFOO23JU1t18nxyHCSOl0kcaKvXfGBuaRY8jGfM4iz9mFuKEyDXUhPMjXOyd6xcgQn5AWMy50qZHsX2YdJqyQCJQitlCVQdaFBK9CU2gpNVlgxQTsW5owts8GW0pcRLiOurZNxOM6SABUWViH+pek2rzlDfuIHG9is7xdjWLLFkb87BeeqNZXzoFqiZAT6VUF1vkJG6EWVkt7DXN7PdGo/SX9+LbRZnYx5LOvCnOW+tuVT3ZUq6xQM2cOiwizhybpARM3mgiaURi+TAABd/kst6J+6DVjX9XrNzND3/arpNA8hEWkm/VXfaGbFw+EgISYhDiH0QzcMw8CDphiZIgB4SiXlCDFt+ULOhEfCpX9DDCVyfoGS5kerEsuIj68ts9U9o77SdGMro9E+gEio+YEdAnGIbXd48ODj6wvXdvv1xRaaVdd1U6J+QMTJ0yMi2mxXMcZh6KY70Mfsu/u2tYaliOiCQl3XAKQrmSLILIgACK6u0LvtZv14taqqSu99UWg1Myl6pxcS6G3yM/Gc1lBAz4oRqgOBczfbCqBM9xBmHJuUqkeXhDfVSXQZp1E8Ijaz03jKZ6M7N5mQIqKuhjjvBF3btk/fefCjH/16XdfAPGbsGi+G1BKp3rzz4cW3vvX5z//mZVVVDTsAWK1W082Ho0MYI4kIAXpfEempaQEAgcgwxq6PQTqSWACJSJ2aJBEAEmVcIY1B08boXS99d2j7ob25fxlCUB8+cqzqisg7T1ePr0Psmqb68MMPv/Wtb202q344ILpnz18OY3rfoWmad9/58INvffTk6dPt9TVMISSDREFw4Bw4Tu7osdC42CcEswMIiqU45QMWzX6s9Qk94pRhFBjAEZKgeEtsq9Z10XdmpwKoYz/w8ZPZNLAUSKkO27g/O0mdlYdMoVjFakuYxzonpZ+4P2s/41HT0cwLTd0lONO40tkJmOyYjPtts6nHxbM96U2mgDKBtKg+X/QTa/3YpjLYsmgBnOIBLAwwV45ZI9kcZiHJHO8MHphoWraTFSqijnGerMKyDRTpqi0aZzx5wrtIn9hOU4VMG84+ma+0lQTNoGXO27eVLVNNMQyzfufUyW3xRNC0bZU5Bqla9q+VDi3Z8kcGZzkENNahXdzJOPDU2I8YiLl7kAlCBmrZPhqry7KfhTB9JXPHFeaMqv9PkNid7fNSmYmwTGdXFqG1X1kIk5K0R7gtbu3okkRnApKxetrHLpM6WHxmnG9xYvUqTHKaADtFi4SHjCFT+5l+NhVyDsmIm+l55uN5FdtpuSD4b7Oc4pZTUlD+tFjzPBOe6jGT6EyIzlc+Ba3VS28Duf01ccLXilXWqeWu8+PKpAzmuugte8wUqWTKd46uDA9WlaVfM5iz4SfI7Y6NiIQQuq7r+z5JPcz1m1Yg8jEKIoZ1dNNV2lr0AoX9ft913TQul2xHmU4DaQYaFSP1XpxuZ8MCpYgIgDKUIOK4P2xQt8jeFgm2ZNiw7sQ0Q+V5wr+2WOVWUiHVGSFELt8jga+o71uR6Lyv2IUQ2u72wYMHb17cIwICCaGrq6ppqkrvnhViRuccD368gL6LMW6bJiFcRFIORUU4ACA6HWgy25xz2+32nXfeaZrm+fPnyZtSanrv9dI8dfiViGUEWTTXI6FxCMsVtFORCKPoTZEgo6adpkuako2p/5ER3aI9i1ixdZxzms7Ho3PeM4cf/OAHf/AHf/Dv/rv/DnAAFEAGpHG3THe6up1rLq+vr+/v7+r64eXlAw7O+bptW0TdtjcyCHrsYkBE3ahEDSsQadvWOed95ahyzjVNXVWV93XXjjIYwhBC0Gz2IYT73W3oh67Tm1r60e2Psec2TZEOqa5rIqhqLyhE9PTp0w++9RG66rMvv7i5eVXVHoM8ffr0B7/+4w8++nB1/WgcXWAQXR2gCAKABETgJh8vkSZRbXQFEUbDHinppemQ9vSJbkETokCcbv4gVMsW5BgyanfVlNhTONlRBTvnaLx8ZzaLi3EIM/2bSeNM9pbWqu3KdybDSSvhvIjxARZnu0z+LQtm/9KU7zFbyE8F5zcgWajATHJY7MhlBY3xmgDObLtT32aNgJla7EiTyZiQULZvMW9xW2LMTlGLyEyQpIdUIaNpSVkwGiR7aZWaZaEEgMW/RakFUpYsj4wNsvqnHiw/WGjJRHXCpN0QMcaw2IWd6hK3MDO55aXoDMl2sNaAWBQxS9BsR+s8pyGmJKx546WgTS797PSpFHZVhnOZA5nVtIw6vRE4buLZBiS9mSrrt4pnnNSoIMo49ZumUod2OKWMlOydOTYJ7FItaJ20+JoNzfKtZTatn2k26+BZ0uM08WdoXNSKi3CmmT6a+wxtI1lkh1WAWWV94CnOFs0iHRg5Sn9OYORWvuV2q+5kMi+sK5iB8W+/nFEm5wXNohQLDXym/ayX843byqVIyvyidlvf6hP7yaJOPl+y1s5/WPab6L44IoIcnr9tfsi6s7KAiNkquVWei+3YZWVlbPX3uq5L0QGpC2bWdDL63HUd0RgmGkLQ0wFEJMCabKZpGh7G8D9E9N4jrgEAAgG0emAnmbO6JSAAALMzFEnkxQRbJaiymTqbqrLBwtdRx+Jq6vcbyEU2Vdk/s3WuVEREJAgzw5imkmN0TL52H33wXlW7r7764r333r26fPgn/+Yvnz9/Tm5rkZBmjRB4zAXijp55jDHCmAsjhKCLxTp9a0w+WAveaGPdhlI/UOFMeE796h5XNLeApoFTunZrQnhS7zZCh4yzd4oibG5yilPsqDU1LcMvqr5sYkrw6xvvKyKIMQJx23b7/a6qKr9qILRjgm6JcJy4o6trAHr48GHf9217+/jJg6paRWHvvdZHFABGbIjIk3OumpKoMSJquLOgbDYbAGRmjhBjbNv2cDgAwGH/Rv9s2zb2Q4xxCJ1uCXIYlGrTwCMA1FVVVVVTNwhc1/XFxRZR6qa6ut6IxKsHD7o+vHz9qq5XP/nJ737wwXtP3v044QIijLeYkotRiAiJPFQakx6BI0c/W+ec/DiAtGs6Zj5NDuBo6oCSAkEtIqfJGwRIzxPqewLyiR6JldMJt3LCKAk5m9dxxgT6rNEOWSOZcrGN2xsIbV9JRcJ89c5ODNYmyCYM08WC0YzmgJBVHInY2cDBaDrbmrZjt+OzInPL7NREe+pzfV9uflrEWhlzzulx0PRTNhWV+E/DyQZYKvSSlGWDpUawv9p/Fc60KpEomwGmZRHD4+jOYk+LnQxspKIWnqdGzB4sPGmMi6Jh0ZjNjklZw0RQCwlizpw4ugFHucj4PJNEZk5JhnBuDtr6NBVESseGregttmDfZ9SReUAmFPNWKrZCghmNSVHK9QRYLj4Zo1rYFncX7W55OSg7nLcpWWQBzFdkrVRa3FrwJuIeb25MTJiWXXEy1tMbu/OWoOUxZflsdQ8Kx29x7DJN7epilWd4rGhYUO1726wdaaYuSsKJUaRwguXKbe2EeYv/t6fd31IptQEUI7JSXBbLrnCCXt8UpEUxOUWvjCjnASgbOVMy0r/NJ5ZvS2hzSP6WqZ9pvLcsGdrL1lLRvR0yoTE83YqWjP7j5yIAsFqtELHvgyZi6Pv+9evXdV1vNpvNZuO91wuv9WBwH8bMMQCsXgpgA0RIwswh9F3X4WT8xBgFUFW4NcCmDa7ZonbazgKj98DsCH0txkrOtF9NfS0kE1rkEPuVGIsx/Vl+gjia10bBROSATLe3d/z+1cWmQV4DctvtY4xN0xwiCEAUHkLo+l5UTaHUdQ2QNmMFSQAREB05pa/3VdL5SlmNupDZ/uEI7eFw+Oqrr+q6bts2LQ2kBGNt26rS7rouC5SApVOyljrpDU2JJK0GACOkI/5TAqrkiRg0RsiQOVu+KecCiwF9GfuByDFzUxNK9fnnn//Jn/zxj3783QcPL2G8uIWnq5sEAAEJJFxdXa3X608/fblaP3z65DpEpJUHgBiHEPrx/hQRiTwMMcYYQs/MLIHHfAqxqipmUdHo+tD3/TDEEAJILyGOHqAEANDPJY6QO4/O64q8AwAiqeu6brzzm+12u9msQuibVX04HL7//e/+5Hf+3re//eHl1Xa73bhVAxCgB0AEInAelP1EhMH5KXeDIhPQASLJ0dM7RoemGXPc7puTYLxgFsf3jODgeMORyOgZMoIHQJ9lt1MExRh1eckylv4bQVIUXGKdESDIi50FMxlenL8z+8POQ+oo2u60ZDseqX42YZiHI+QJQqsyrL3IUyqtDE6rBy2Lw3zlMlNPpWKy720vduBnUJrBkETXUscu3mTqNYMtPViJTV/ZLJR2yMMwnNatYGsm/Ng/F4FZHHVWLS1kgtFZCf6kBC1dLG7TMMvsrKkRWDJMk5It34sxTMUsSWScY61qnDRvmuwRI9FMUnAy0zO0pVGkh0TrDMPZn4m7DH0X1miICBHQJAmQ+TyKxeGx7NfykxKkkrKluJ0aSzbYctRl47AkBXPxOddpiYpMHtPcVu6wWYATDOk5hcTDPFLATqUWRUuQH1VW4tKkBCwkmfuEcy1dyr7FFcyFWg0RWKKjvskMxDSnlMSSudWYXF8w0mTBS45rjLPAp1Oc82+nZGRKD18L1SKvps9lbp99I3hKVW9lc7HBxJypml2KXRTqktUX4TmlKM7Av/jeLqbMOzg3rrcvmVwc5f1EKPIigRbVGpp9pKwCFtE0yZFLUNF0kFg0A2wIelDQuagOYRiiRocCgH7rnQMAPWPGIeiZJW3Qe4+aik+chrf1fS9yHL7M4xES9WOMKWR0NpZJLcD8fG+KNbNMBYY/My1U2nVTXzNsg+HVRQ6081emRUsKAugtfQJTxF2yvy+2665rh6FDkmHovvvdHz5+/K0vvrx5+eUrrRAC6sUJAAA4Js9TLKKJ//e1bvMS4pGHp/U70twzYDwo9foOh8Pt7a3aKtZXVCWphz9lSv2VDaqMCSqXRK3utRZj+jVVJgZE1B0lK4M4BovOZmcAYJNUDAz/y2RjZCTThDQyrXR0Xff8+fNV00w1GCSOSZtJACC0EbGq6/rDDz988WIXQuj7joUAAiIOQ7ff7+/v7w+HQ9d1oR+SHQsALEG3amOMyZdGREKP0wkOh6wcpNB676vKeWoiD3p9hfekQjTOdyQqjwyy2aw0jVZVuQ/e/fh3/8Hf/53f+a3VxRpkAHIAMAxtVV9ZBADHoNHaotkgAcEdt6nARBzIFD6Kyc6czfKG1R1OOfkmb5AEWMCJRAAnElFT0CB6m+jSSo4Uy67agd6gYvvDqYA545TqW3nW+nRCpSbmy4ak35aGUYIQ58V+m8k/ImrKTTBzW2lGJFRwkTEiG0v5MmstW9UuMDb7yb45o7mykgTYbi/A3HWxAp8pAjuExTcWmVkj6ZMSS6WCtmO3LSRh02e1pNPSUcaZiy1YvEHhuixS0AKZgZ2RJoM/bZ5jkfBGCh1aer8yLZiljqy8EJHAMfofja1v7WPbmu16kdkyitj6xsg7YjL7MJPisk2YM3/mIoLhq4wE45+TMinFwX5uKuT3SaaxW6Kckkco5MKOcVHuzkviqb0pSwuLQywMWUtre9JDTOCWbSG9zODXh5QuPIPf9pJJE5h1Crs+WMK/KBeZ1l2U+kSpUiFb+MXkN8pE2DbLpugleJlfegqMf5vll4fBKrpfps3yq0y9lD/BnKlgTqPyTdnj+UFleu888Gd+LcEoOe0Xo8LXsnqm5WCuajKSabF+XanuLNgJS0n2E8OLiFqizDxGfzKLSFVVVUUiQhhguoRwt9sNw7DerJqmEZEQAk6A6YaSrkNFiJOhPN5KqrZv8i6soWXst4UrQ4+J5Oca2E49i0oge1OGQU3Vlidcy1S2pOsl4EQpKMisy6MI6h0iIKEAwMXlKsQ2hPaHP/yt/9X/+n8JcPnZ5y/+7P/0f5moKRwDxHE3CtvWYkzPdIkIdseFv1QBEauq0T/Vu0hIiFOx82nGMzIdGky+opWyM0aIpYVVoRaTmbCjO0qudR0ZAec5Diz1U+XM28xGAQBN0yBHImrbFthVNdzc3Oz3+9XGg0TNwgwAAFHzCfv1w7Bvv/jiSz1k+/rm5c3NPaCvfNM0TVW7vu+7/rDf7+/u7g67vV6PAwAIszz5aQMcEZ1HR2M2V+HW+8o5B8DOucp55xFgvC9+WjQ/lqb2urASQvCeNC3c9cMHv/3bv/2jH/1wdXkFEJkB4gAAVbVKDvMEhvdjHn4EOBo8kUMIkZnrutjAELMlqChFc3YJ0hnCMSM6ggcQAQKJepdsOpcrAp6m8OLEDbpTkXY8YApAUlqGGGXKMn+EY+ShHFQ0Zk3WVMncWWupgn6omit9m5RjYqn0FU7eUQ6NYcFULb0pFVCmiU7poPQ+tZOUuK1j+zIYGxcFkxinOllWvVQy8U4aJ4WKJZRqszYEN+2+ntmm4xP5GBZ3ksVc/JpQmiEk6wLnE2GCc71epxCIREEyaesthnFav7G7cGOFpTUSyxv2k4SQkl5279E+8DyWj+fnBmnKQlSiF+YqMqmADFd6Q5ctWqeua5nvOrI5KqAv7cJzuZZhJcUin3k87WDZL/t2sdi5JDXo3LGRxFFWojNs6HaotXssr2ZEAYD5AuixgpjEcTIvYBggvUQzWWZCnQn+ItipZGf2Uu/lt4sYsOApLdDMmmWu9vQhpSxEAGDUjr17Jus9A3IRLal+pg/TG6t1Yc4npbzjPBkYmQuE2EQI23GlUFULLczvWbUNKg6sFPxdK2cAO8VjMEds+hfnodRl/YUuplWXEgh75j+VlCVCIHP8MDHrKD6q8WTBCj8PZ6lbcMmUt+NanEqW3yAo5Mel9BM6TM79ODNSM8izHjNZsJ+ICOHs9F02/KyylcdklTGzZo/Uauqq1XUdYwRmFNH9jbpeqQao63q1Wt3f3zPz3d2dcy7yhRp13vvYB21B02GItIgIDmKM5GBaYeF0D43FA5mCiHo+IydEQcZyaexUsb/SFAFk0UsmyV+qqW+sfpAlMyP7c9G+QkSOzMCCKKPzQajXTkGoPCHCerP6+c//5p/8k3/SdfX/4//5/0qAISIAqpMOAFWl8/VkWnMCLFpIps+9ZoEBq2YZiQhwDNnTmyqGYVDGsGfLtfAU55/sE/sTzk1Qmab+xI129rHbuWIM4DRvinE7YYoUlawYUlqzPwGQEUWf27YF4HVTxxARYb1eI2LbthIv0YHe7AQ4Xj7DzDAcPvnk5//0n/7z/+a/+ZevX3eu2gw9braXq9X6wYMHDx8+uLjYbrfbq6v2/u7qsNtHCJo/qeu6OATlHBUoTQzjAOu61rsKRcRRPN5iL0JOCcR1XSsKIw8yuZREQIDeE0sIPGAUEVmvm3feefI7v/vbD54+BYjdYd+sawAfYkfgGcGRI81lCjFwhCktE0ha8yUiqqsaAFlmR/CWi9BRsY3OnqR/AEf0ATgA3XLXbUMQEa8h49bIBhMRZ20sng4ihykqQ4UqLSmJcSDFJKqSaeUDJk2X7uWTye7PODixY4Ih1YTplKOON1sBStZnYu6M4eyFpImnM0Zf9DnFKCacG15aUnBU6jpBm7ya9NLiPLN+rNsGZuZICjEZQ+mnEEKqgIjpshoR6fveaPBjm3LC0S0nQh3CFOPOGcmsR5Soj3NPSY6K77iHicZAZObD4WAHlVCXyGS/jeaeSYsr0euAi37BqLzE2GUux3I+zubv1I5tFqagGv0kHSqDaXkFjK5UvKWu00CO3js4nDtv2v4wDKoXEsCW57XxmeY6sSBilXsmbpYBxumBpfK+lJrkjVhRHeeG48Vi+SJIBsZUPyZuzFCdyhzImbpgs8yZurMCknbYMhJYXrVA6sXHFkuWl8AsnCXAMjniacHegpQ+BHMUMA0q07cWq6lfKxpVVZXJY9KIUt5wyw8Zz7jiDnpdmZZpNcoiKl1CqGeSrZhYwum9iMqrPN3CSuYQqTXm2IQAWIrraqWMGw3HlTLnXF3XANB1ncZl6bweQhgX8jPHxoRY259KR2WRVxeZ1harW7J2rK6Q+cxSfniqI4sxMNxVSsf5kq0pnOrXwpyNMVMOkGlFzsMKEpcmNrYyYue7TIoXe19wZE1HGTakOGCfSqkPR5TGYklxCS3HNmXWuBXPHPJiuUTmd65k1bTo/QEi0jRNmunattUTawCg0Z6q1pqmiZoBP8a+75vG1XXtyAPAdrvt+163Tfq+3+12q9XKew9R1Jbtuu5wOGjqyxBDjDEyO+e6rqvr+uHjR33fv379CpBEjtLB0z6B914kXzbFaQEIjDiU2EATgJbwlin/NNHYFhaNijSPpHZsU+Ukpb3bGW2migHH2E1CYQImEYkckWC/37VddXnRvLm9ObTyp3/6xauXN2mhfAKS1HxIR2lKvZT06qQWxgOBMNlyHI1SwjHvtIgo6VXDZ5DzdK81T2e+LIvqr4mIJVFgunchfZKZWElgebrnM1mVQRj4eAm2No2INA0hzQs2sU0ygSw8IkLgyI3b1AFlvEiDEb0HFpABJELdxL516zUNw5dfvf7f/m/+iz/6V38aBte2MIS+6+LVEFertutaRPj4448vLy/3+4NzbrNaD9z3fd+2bdM0BKjeoOK/Ipdu4U786WjM2UtOLXnNUIre+/vdLSI2TTUMAxE5B33fV1V16PaRg/e+79s+dN/9/t/7D/7D/97Dp49BAgDUq0YnQELPHIEowvGmFiDSsFvdcEUAkOmaTwkAQDo/wlHrirEcSn6T450TypwgwgCoO4cmpy6pc+jLiSe9sQKc5nXhYwhTIqRO/DJ34ZKpZyXcqonEyqmCnIorM/o0walvyqQOqZ0Tk1/O4scflupPCIVs4ibKV6xFdMKbZXMGo7ky1k/v5/SbIUGMPWERmCHH4jlTQ9Ygs+8tcS0Ap1Z2Mz2y2NqZcaVfbdeWoe3QbP3kP2edZpCUb+yfODdZ0kjnennGeNmbcjhZg7adkvcyopQKGuaoK9tBRADMcIXG1rRdLCI/A35eZ5kZEPOzl/bzTJBL9niLfs/VXHxPgCiaGUtQgAAFNFhlvNRbj2eIiFab1QfQy6H1YeopDTZ3WjJIMn6wEnSePy1RUtKqt8SY/bZss3xO2+wyWYpg1MiIwxNhvbappFQty4m5TmNRAJN/TmbL16osMb6BlsxdT36pzC8rSsmxNBYOxsP9uh6/vOJwCp+nVMfX0uItS4btX1WzvxgY6c9CtAtXBwDm6x1v1cUce0l3Zb2kBzLhM2wiCzLAjox3cg9vwe/KxrsoxW9fMl36Nsrta1tLpdQztpFycmnbNt1Ljmn9BdFVFUy2lq7dCAEirtfrMYtM34cQNGViXderpkGzNuo9hRD6GELou/7AzFXVrNfNer1umqaum8icMp1Y+YXJNMooiIb/EwKtGjmFkDT8M7ynCmN6zj81jWini9WOHVmQjtyiV6wLgKAIgqBmhFmvm9WqBo7D0AH4PvCh7UNg8YuR8A4A9XRSRkptkkH0Xz0Jx3ZuBQISAUDC6TqAaRFtQnUyd8evph37setjyOsRNeNBrxGLeu88IIBwOKUKsgdlm4qOK4kiouG1swUdtVSmoquUIqj/JQKFYO1MnCwQaOrm0O5QYLvZVA7u73d/9Vd//Ud/9EcSuydPH66uLyH2IEBYQR9evnj5f/w//Jd/9If/5s3tnnDd9YLTmdvD4cAcXr+uNpuVc9g06+vr69Z3XTjUdaWrLSigzl5d1xB5hlsWQEFEhF5X8IlIfIzi9TBe1x9C6J1zzNT3bYiDxuLtD7v7+9vNZlPXdR+6H//GD//R/+h/+IMf/xh0Zw954lO9EpABLB4opXuRLGvokZQ5Qxc8trgTQKajMbsMAIzhppgyscNx9pW5E5UZ4pnKRuNDowndkXkcXTY3WM6z0pizlB3/PMo5M7/EOF2WfW0FC4nVTVA4S1m/U80FnwfMRfbZV8xcOswWmWwO+JUaM7Wj7JW6tvKf/s1mLDtkq+/yKRYRzUpeNrpTpZwPMsMuw5ud+O17W9+68SV7LA5/kVezaiVyDDVntrVdVswqZ8Mv35Rd2/YTMGxSyJRNJXkph2wbnwY7c/vtbpLFD5nAjLLHE4TODxvINNPEaQEoI3GSJkjppsxeRKGkjoNNWJKzc78F1VYjhOmuLIVBTRBGEkQNQEVkHBeyRciRPhikCZokOqbxcUqFJXJbyO3Lkqz6MltQSx+6+dk/NBHji0hY3PE4pbVgQnIZEHUqtNUiIVHWMlWBpVnvGSTZjhabcIYMRZZbUnc8pa9YxLbMTx8syukZnFj4T0nHL1+wUObw1v7VW7a/+H5RdZz589SokyaBguipQim8i7pr8XMs5rJMcLJmF4FcHLVV7LZT211ZThGmxPPI0gVlrQ5c/PA85IvQwtxb5umAhj7r3M3MBODrerp2iJKixungj/e+bdu2O6Q1FJJjOtD1es3cDENHQx8CtW0rEK+urh49egCERNR1nfPemkAJPE0qAwUd9doPq+rBCEUpFyW6zhNdipkdCgHPOloUjaSfs2k0nYGECCIAETwgoqu9X9U+xKHtuqq64Njf3+/6IEyz2KK5mJC6phYMAePLyTFOJI7XEdHcs5UpdeTRgs3GJXMTF5YYPkNyiS4s5uts4SajwqiBpwQwknZ6p41B0/7MckPjOIAR+YQ9nQKE42Hf+Yu68vXz5y//5b/8f//pn/zx03cefvj++0/fefztb3/78sEV1CtPd5/89ed9z5XfOFpVVdWsL7tuiDFWlRORN7evBWKM8b33Pmialbsg2Y87fiLAYYzdA6HtdsscYoz6UmCKniWNjAsa9hLiMAxd4BhC37Z7RBTg3W7X961uhAYeLi4u9u3u0N//+//+v/cH//l/9q1vfxtkSDGZc6K4yU8zTpru3QGP8jWPbUfgKUphYSvrFyymKZ8oZAkjonEFx5J4wuPMLk8NMTPLbGeDpwPQMjlFdlbIHJKvVf22TgpKBADVdFIE/h23NOdzW6a4F1WSXb0rQ0G+Fvs4N4ASGKXUlWO3+o6mg2qpghX1UkGUFez7RK9SdZaAlTo0G0s23kVNZPWO1QhQmAWlpkvQWkjOtH/E/FwfnVeL9mU2ivMLIuVL20iCk5mnELiTtnWJXpgf8TIYmxnKaNYXkoTiNMmVPGB7LPBGNhQw4VaYhWcnRTNy2O7SG9vFKWIdue6EMFn8z2A26sgCnLlAlqyJ8Wz9RSlW5CcIreJaZDYwbJm9X3yJJucNzJ298yG+Gf+cUQspFDNjFdWTaE6YpAZPyX7GhKn9MsorM6rQzPopRwIU1MQpatp2p6FWaCxIi2eLvfGnX9QnzMa4iPZfuJxRO39LJeOHRbbJ6qfKcFpObeWsju0lEcUCU6oaMYZEqpnIbSVUykZPg2SV0lvK41uWt6Gd1cNY2BupTjndlI0ktteXVpQsctLpfRSBKT21c0eHUM8veO814WHVeg067bpuaHs9IuWc5h4cCzOvVqv1pmmaxnv/7MVXmpAGRi9lLEktxxh1BBkjQcEeMBd8W2Hxc4uxkmo24gAMnywu0JcYTp9YLWSrCWmafhZBGS84IL2gzjkUiSHEqua+C4dDR+QBjpsfp7q2I4KjKwjJ95tGSmmKSyPVe4xhOuSi7aTzKXYI+pBvHk79ZguLRzEsiGXpgnNzIhElgogIxKP8Io7JVUqDOTGY/XNRSGHMFeIAYtd1tYfLJ1ff+vDjX/+133j04OLF8y//9R//6RC66+vri4uLd9555/b29vWre+Cq8lXlt843F9ePhDEKE0E/tG3b3t7eaqfvv//h9eV1lCGEoHuVoQ9934MQjWcXATjGGFn06hcgcABhGAZdVQkh6L0sfd+SxxBC5BBCPwyDQHTBEZFfVbt29+Dh9e/93u/943/8nzx55512f8sSNpvNnB2mOXT8k7M3p5jJHI3WDDEz8cyamv4sy3IYIAD4RamTJVNeq1HlWY7nVVLQnXMOph1Cu1Jl1cQpIGDOheN4C41ZTjC4dHh9xqNF1xaeRZDS+/MAlxMbFPolmyMt/Olfu0tm31ucp2ecm3r2q2w46cPM+Es/pSOdJZzlwDPNbvuyAJ8ae8ZCi0XmRkyqb3daLBozAFIjVOyNyHznoexU5q51qUBtwaUdGyjOqBzlhWYYQ6M0y4EgIsvx+hZZMs5gTo6pl9wDPEXHssGyZmrKWgzz95iSPFle1U8WYS4xBgU53r6UGMjyYGW6IvvzFGIXf5K5ZbOoqcoAh0X9I3JMwrdYc3Gki9Sxz1ayeMofkEF4ap4ucWIZtdQMizLO8/xGdkT2GLD9NZ3hsb49IiaHMBsXTt5jioPVFUAkc4C+QF6Jq0X0nuGH/w4VnM+2v/yIMpYAwxtJcy5aLmf0/K+qZBrPqqZFSE6BdCokVQpDeWzkGyL11Dz19h/CFB2dTvKnI9Ds3P39fVVVTdNUFTqHzCx0TIWtFpoGyLVtG0LQUL1pGVHnVgYE7/1ms7m82jLzs2fPPvnkk9vb2/V6HeZBNCWK0p8l+9kK2QKrNZ8WsXFKWk+h65TaP0n3qdgeRUR/ESDQEwZAAEQwRstXVQUohD6EMPSR0BMd73m2YBP642EwMywQAUCdBhKoIqInCcuTfhal5RRjHzJRXcTk2wtmyk8DcyGSJfuB9LqM+ZSXiV7WtV1VtPrEke/71hOu11siGPr47jvv/cP/+B8+fHR58+rly1cvfvazT7788svPP3v21ZcvP//888N+uNg+XG+ummYzBCBXry82zbpp230/1E1T6VnZr776qmnWl9sLXaDUOwanxQUCgMPhICIyHqYNKFBVLnovfAghtO3hcDh0YRCJw9ANoeOO0WGMQwiBHDhXa9BuH/rvfPSd/+n/7B//R//Rf7+u6667r9cOwIn6b7q+AEeHj4zzxgggNF4lf7z5JEN4QmN6r7Jsq8Wx7byc9AMBRg/T2/k4UVFkdkgUp3UC/Rd5nODT/RtjUpmpshW2kgWzQEHLOnB6CS17maTFrgxZ+Eu5ypRFaqdsPL3HJYfT8rdt08Jml70T9spPLBIyMFKICM6jc60jl7WwiPBTc2RG7vTVqeymMLcRF/G22FeJLvt5Ru403vRhpjigwHnWI36dTizBLuGx3Ls45BL/GXKs+l5UiBYh6Sci0ozB5VeJRNl4s8PxFv4MjPJbO0Y95K1vjkGMOFv+tAxpk8GIyVmSLlEs0YXfxMBKjk0GtpvfdG8Jl0lrSkhgmeQM9+p7NvX182xJpdRLixKXNXsk8aQNLKXwtOFS6p+sU9u4GH1oRyrzEHQwLLooBWmkpcDavWLbdSbI6ds4v2G5hDPDg0WImHVlNYJnqQvewrgpEVtyVAZbSd9foPxKGvmmPZ4f6eKf9vPsISsZ8xy7S2bLnJEW+8Xi5tKEeSutJXinhmy7sB2Vn5+kyOl+ZsM0Lxdb+1oElo2f6itTazCdIdSkMqqrNT7Ne9+3LZHGdzhmQMSIjIhVVYUQNGUgy2j+6qmedK4YQOq6bpoNVVRV1TB0d7e7ttu/fv369vZW/U8NRrX6pFTsM7RI/lOm9jMN/DaEtli1mtO2n5rNCFGq7nIKyKEF7UAAiMAjoHprY+/kmLnvuAtDjM7XU3DEsTGCtNmXmfOIfOJIQsrRor8mvWeTJqbd0UxfWeYpNwCSHk5/pgIAUpwZtgg8M9lZiVbjP8PtVBMtaSwdF1tm0aQ7IAjgKEb55JOf/df/9f/9t37rx48fPfjRj3/zRz/8cbvfv3r16vPPP/9n/+yfvXq1W69ktb7YrK9YsB8jb2G1WtWN327XXdfd3Nzc3b/5/PNPgeWDj97Xrruu011BBI66vBjiMHRt2w59B8B6ttC7cMRBDFGma0UQEIWIqlozaw7e+4ury49/7Tu///u//x/+3u8ByN3+9mKzQnBDaL33KMoODOMNEA4AUDC5ciQAQpPLjTgqJ7OSC0ApZFRjcUFGFtN9BPnavcF5wbyO5ynrEUyMqD+kQ/xKRRsqACMPzALWLV3R+DCZI2TrZNsLOF/GXvwk6cfySBuc1hHpW0QkWojHg2I6Md0ta65FUUGz8p26WNRWFgkyN5iySdGqPyxM5PTJooAlZZH1m8aY6fey2vmZL8GTaSiLxgzbltwZfjJFr6U8faQPaccvG10YhlQ54eSUlrR4lrknkOFhERUZQbMP0aTJTjBkDAY5s0EaTQYwM9tsihnMOL8nY5EfLL2y9yLH87SlTC1S0x4sXKR+2ZcURk+J0kVk2nZ0orVucMJPFke6SNZSEjP6ZrRI9S1C4ASHQEG1xWJRbbFxBgmLgypBQsSUaA6M/V2ux+Nk38Bcoksxkfl1r7br8sOSvTPxTO2n+SW9X2ShbNQy6Uye5yY9U7JBlfBbtjz/7duUUm1abPzy5RQ8Z4TOVvja4Vh9awXQyotlA6uCYM6fi7JjGSNrXyardzacEx5bqZbL7kqxfftyUuQn5Zxx0TdllVNqxDrMFqv39/cq2uNd89O6cMr0LtI7J0REWBwjx5EDq6pCB5qcBhGdc1XlvfdCAgCHw+Hly5ch9n3f63XYIQQ0C7Kn+GGGh/lL++v5l1Dok1MimTUCJ4hbqrLs11JUiUh3WhBRBIUJwZEgQNQjVGBWLoh8ACTMtHF+e1OmXrLn9JWyfTKkz6gmy+dL7S/PrWdUUGJg274uEJd+gWXLY+PzuXVxCPbDMpwqPUeOq9UauO/aw6beelf9xZ//1f/uv/jff+vjDx4+uPr+97//rQ/e/+53v/vBBx88uH70+tWbf/Wv/6mIdN1dDG57cblZXyDiod/VdYNUI0pdexEh2h0Oh5///OeXDzbXVw8vLi6YQaLoLV9D33vv0YGId871IsPQ931/aHfbtV+tVnVdMweRSBEGEsDKexd4YAne1zEOh8Pw+Onjn/zkJ//oD37/448/Zgj90F9utgGG/rBfrWqRCEKTiPB0UBAAKIWEqX4hwXGrUOuOJwlTCQjp5dENHymCPPcJS2J/jZfoJ+ae3TFlSZ4ROMYIDm3Kfki25jzCWx+sLZ44NcUz4OTgwWS+lFdvWTWReCutoKT6di0Zxxj3Ixh4NGGXFc1JBH6dzZpVsD6A9psi2aQw08WkP05vYDLo05asTAWnZBV2PtZSjddZ5oAlxZHoZdGSmgUjwItIyEI3weDZdppatm8sNhLAMD9DZaECMyVY/rGIsniwJEh+kcVeemnnADveLG4ttZ8gP4JUYmc+fDs6IgrhmGQo41578irBjOfWIJa7y97Y0ywWYxmqzwzBPCx4U1rsvU+L0J4v58GAE+43GCzh/OqXMl5RjCeT9Jud2LKO0udJ9FIFK9QlPlO1AnvLw8lQmolVVjLPBwv7IDWVHtK1E3bX1zaSGBKKvEp2XGnBTvGmaMnuCUyqNcHJU7GAZSyEk37O1iItiS1UWt/eNgQzhjxnOGaDKuleovRMC29TMvVyqv1/y8WqPij4P9N12VfZn6XwntL5JcLtcbhsasNih1+bWBxOZmmU3Z0a+Dcq34h8pzCwqMrSdVyndEX2frfbee9jjHVdY1oUixGO18xE59h7791ReMcVHzqqNSe+ql1VVVM6X+77vu3aN2/eMAeczqfpfRXe+yEEC/6c0PnyEOJ43XeGvTQZZWNMw89U3BkkZzq25NtFHigbsWxpi9rWyp4CHidtt25WuhNWVSRSNQ2sVithCtLNRzTOTRr6npFYJI+ImWg9S/plJm5iHqzaTPjnImmZRUuJATuj2TpUYDu1nxBllXOm3MZ/5xn1jxWm1ZNTs21Z1CUm8lVV4Wgbg/f+5cvXn3/62c/++pPtdvvw4cOnT58+evToL//yL/u+X602ken+/r4feHvJl5eXq9WaSMipMqkfPHiw2Vzc3+139/d/+Zd/+e2Pv/vkyRMiAkJkjDHoVSveV01TVd4Twm7HXdeFGCS2RLTZbHR/nsUHCTFGEY5tcOS22zVz4737tV/7td///d//zve+JyIEzlfuvr1brVab9TZyj4hpVzDt6o3PAICgNwIqxkho0oOJaumEIQKAwDEvqIhtrihf5wHm1R9cX06BAceVgExPWYHhqY5SThN/pyWrnFFMLLJaJzYaPrPS9GR0iL3wuIog5jo4mBRQalyms2FWYMDwnxHy45s095RbKACZvps5nGVNiyiaUtOSY47HgM/UFDM7V5n1vHyLbI5wCSFoTAhMqeQRkRzEkFu9CYAM+BIntqYuN5abKojjNR6IYi+jDyFoAGGmhsKQO062lOppUREkUKk4IWkAGxvnKVmF/Twbrx1R5hUo8q3Rn7rL3liMwcSo+eLr1CMV0f8wTfzJPh6DK6aiDC9TePZYwYHyD88Pl9Z1LTJLSTKV3NfVku6itANnZk+NKh2OEIWZWa9uJCI27Aoysj3ZrGhmvNnklADzmdda0hrzOdgWWxOLCU9EvDsix1JZrzYlc0Giio/J1SdivC8rmzLt7RMRh0UHKd9hTutwTOeuG0lNGVCjFEJhv0rsNCqHeZKABD+bgVkE2itJxVwMa7cl03qEbgIk3QXGnbOywCZZqG3Bji7DwJEfXJ3qJ8BwbvrPUZGHpGbkU4Hq+35c8qPluZCKU7Vjs9/QMaACQksyy0W2a/trwkwmpPbX840fgRdBWVCzQMtOCwO4JZcjg8QOwYpGAkNOJz2yNbMuyl8REXjZPbOK0X6FJ9Z6SsgnFjkHVVkW4YS5SGYPCbDsk2whGCZ6Lba/SC8wNvT4yVRLQ74fP354fXWl9ut6ve667vr6OoQgjLrvV1VNVVVVVYlgXdd11Wjkm15bLyIevRCGELruMAyDSNRYOZE4xADAu93uZz/75IsvvuiHlohimOXns0jI6JJ0coa9jK/salRZ375fpHtGha8tp+pnDlX6lZ3G0zKKAEfC6BCc8He+/a3Hj66vL7aVp1Xlt5uVRN7v93/4Jz8NIdzvdvf3LTNUlWOgYQjOOQECIB45cpz9I4Le4w1z+RIRmR/6WuSWRIvMAYM5Q7KJrh/t0vnGTOoX5/ZnaiRNH3aqTVwAxuIFY2WldhbZG+et6NKDTMcZNA1SCCuQ6Ehqz7XjVYUPr9cPrq8uLjaIDsSx+CgOqXF+5V2964aUUPfi4uLy8tJXlNZNEFEQyWEIYbfb3d/fH+7vLi4unj59+vjx08r5+/v7u7u7YRjqxldVhRx1EfXV6xcvX74EkLoSItqsthcXVyKyu73ruk7ttOfPv9xery8u13e7m29/58M/+M/+4O//O/+A1R4b4zmRxuuuFkJkR8sN3FRf0w9aD3B6luMkF3MHr7x+cM7nS4GjuKx7gAQ8zUO/ZrrbPNiZ3gZ1pHJqpXny+I9wW9ZMq852dpSp2jik41pFzmcJKlzyIqycJ0a0K5SZ2kpIAKN6Tk2EMjeAFtWTxWou/JP4LW5epZKJvZisrdm4rK5MP51RoMnyy34yyVfyewgQZ/i3UFnU2ecMD+lfmmLVrAZBYyRl+AFDTf31lKNeloQfS6nUlD1bZY1gzcqY/kwgZTskdqS2lwxLFo1gqGbxMyHhmHjawszMdkHEsn2pfGUpSnBqJyAikEMSYgECRFKFo37i+AUAAFOxd5pKliU49cLzhHiWZCVpShZKL9MKOsx5Kc07GXXSbpjdjwUTXGoZCQwdbacl+WwX2auymi22HYuQGPOd9lSnRALMaWfhsUtOJTstNp6NRUyypUygys9P6ZnsfakfkqGQ8M9noz2tBrDLHxk+KUXafzP/7ldZFlm6rHNmsP//LSWfwDy6HqcZ/1fVo8hJcllCvw1iYY7/RTHJROZ8a6cEP6tQMvmiOP/Ki85HzKMhnjTA/f09MyO4erp/omma1Wq1Xm+dc4TjbK5zfYwxSBBCzbAPkyh59CKIjpjDcdSi4QDV4ujQxE1YDGTYODVDnS+lGgTjdsISk5zCv2WqRYbPypHZRRAIAEVCFLy73XlyHtzV5cX66vLJ48cXFxdI8tH3fiNGefbs2V/8xV/8/Oef7Q6H0WabW+qaC5aI+r4TnK19mKk8Hw4umbUlZqBAe4mQkgpp2bREl8x3/GgW0DfbKkwDWdwhxKUcHPYr+6wFERPszBwYQgh9GPo+EAk59M55twb0LBhCqKpKJUL5X+VCuV1NAkcEhFVVrVYrAKgI9/v9s2fPvK+fPHrcNM0wDM65V69frFar7aqp67VzuN1uh2HourZrbxFd5QbNk+K9DyEgSdcd3nnnnX1///Lly+/94Nv/6B/9w9/8zd8EcAABgBBQQHDc+CvVnrHNsjXzWcznMV/oN5Mf297S5uEpIUAAX8qemI3+zKqz1dK8nhbmFw0FaytY7WBDPpJmYeYEf/o2+S2Wn8mcQc/MnfS5faPVkhlRmlCZAZ3KWzqEZe+LzwXrL9vKMoVKZlasTEk7rDaxGMiGn3YA7EtLFIuf4tkkF5mPbv5Mi41kjoQYfy/Blg3Zckg5HMuNsmS/2o4yaG1NMmf5LEXQhONaOC22aQqZS1suFvJUOb1HswBha+J07DbDW4lGmYJ/0r1PGUJKhSuT40rzkGxIDiEgRtVRQgiIERTOBD+Ml/tZJJTEWkYjzNlyDlUGZCY7acgyz/Jqq2XLxlZOM74q2TV7g/MtL8WSp+OCi6m/cPBjJFPhvZcw26Ys8NlPi3osq2/VhYXE8kOG6gxRGR5OZfu0n8NZfsuGmYFXrhxnAligaARGjJFhZR8mKdavoizDU45Uy9kDFt+gLKoXOCEvf8dLqZ8tgX7JEb3l56Vrcb6UUlNSZJHN3hK8srWs5t8SoTNapCHov9779WpbV6t+aEX3WIJUVb3dbi8uLlarVVU1uh84RpYKqh+IOO4akejy33zzTafsKDHGtm3bth3FXEhAMn6wwz8VZgVzbXamlNrsa/Gz+Hym/UV+XtTPiMgREJHAjSNgFnAA/Oz5q/v7/etXby636+dX189f3jx5+HC9Xq+vH682zbvvV4GhXl28ubvd7w5t27dtK4KBYxg4hCASxk69gylGRrvWOUgXWN8GYxbgckTl/GLf239TnEg285Ztpk0jREScHeFJX52Rlwzh5bxsmyICBCQUmnbIGAkAAkdk8eDJUeUcuSYGHIA1Usx73zSNRkUxC05HEkafEEkcrlYr5xxyrKqq67ovv/ySQ3z33XcfPnx4c3OD4Nq2hRgQcb1uLi8vnXOvX7867G9FQt/3wzDUda3OJ0sIYdhsrwO0zeryd3/3Jz/5yW83m1Xsd1j7yd9DAgaEaZPr65fVJivtmGD0LQoXc5rAN02FbMoYd5dN8xmBLdlonp/D2iKwxFJp98+2rCWRLWMm/VUtWhtrlAS71DjW+MiagiXnzT6U08miiZaVUgZMOwsCkPbWrcRm7RvJGW2vTLxlab02E8hsCoGJalAoETInf9K/ivb0IZtrzU4uLBR4g+K+oES77EY7i2pbP5W0l5hs96TiS9SJ5N6gVXZ27yh1lDFYBpJ1yy0HZoM9j5P0YU7NJfgtNix1LLosGIhOJ3sdq8E/AIBzJHJcSSEikX5cNkrHGQQkjs84g0EAxpXOUulbKZ6hGo/wn0KUxaE1KayAn5JBlnyH39I6Qy8zO5ffJ1lKutjNKHJZjyICcAQGjITC6ZK1gIXNnUFlO5Uli3Cx8Qx7pQSd+bZEnWIgWz8601T6tlSGx8ZFbIUl3GZfBVnYG1SuAERRtnRuttLxluU8yX6Zcr7lv71+f/myqGEssTIx/wUaf5siS9MWGA7J5P2UHs5iXuzn3whyK6f2IYvQ+dsoM9k39gMRrVarhw8feu8Ph4PugXjvNXx0tVppbhL9/O7uzntP02UGiON0RkKMAJACtUYbgxlCCG3b7na7w6FL19IyH9WRFfNTUpwh5+35J6uTRg1LlswpjC3+lHVdQmgbQaxIdO5zIAwQhYklItBuP7SH4eb17nP/cv1JfXFxsdlssPbr9Xq7uaia+uL62jfrqr71u8PVg4fMHAbu+77v9e47AYCeYzSJN0UkHaESODJ2NiNk0FpCZHNlGfOVMHlq4oMlMVnMESAiGohj2aDEZxLMbKa2nyzOVgDAEgAESRhBsSAIgiRIAhQYJEShUIGA87WrdUGQiKZIaUEc76DC41E1QAEHCOQ2m01d14fD4dWrm5cvXzZNc3V1td1u33vvvdc3L/vD/s2bN33fPHr8YLvdamrfYRj0VK1HEogCkTlcP7jc7+9r737yD377P/i9f+/i4QX3e1f55I9J8u9wPHYASyVZYZj+0fsnpp8J4OtTxeRcbXzCb6i2fengnelPRFJiFst5bFIIQKEFssr6E01HBHFeWESr2EXiyZ9Z8D9T8hgw3ImF4V7OK9knpexl81NWVMLLaUnbO9HauZWqsiM09nF6w9F4aAAA+T5e2Yj9ySJwca7NFMqi3M4RuNht+nVB0YiJTc96KR3XtzFP7XN2DYmFhOd5YhBnetm+XOQKKY5mWdQtaslMsuyfi8PP/szGKLLQCzoSESAEAZAZHwIIwrg8JVN83eSQ56COo+AiuMFk5S1Vv5ZyLGUd++EpXoV5LHExfG0E1ZqZOJYRjwfjVcrg6N7mZ5NKXWepeeaglJVWLBzCU1Jc6oGsqbIkOPGETyijAZeFcyObg3+2craVnXVtfSosFLttZ3Es6fOkqbJTJYS5/Mq0FFiO2vaVQJ0HLI1NGVFdxOICu45o/4bG/C8cknoKY39HyiL7nRHMX779kaC/xOp11tSiiij7PT+oUrL+bpaUdmG9Xl9dXXVdx8x6xbzKWgjc922Msaqq1cppiKmf8v8lMw/N0TmZ4usEZBiGEIJeXEHjVRMatCVp4diy9OKRhFR+YUaaz1+zheDzds4p2i0GIpXfHu0TdFMafxEhEIzCKEAOOTIIC9DQctt2u1ZW626AEELw3l9eXKvz0Pe9CG63WwAQwqqqvR/DbhExgAwsyXochmG/3/d9T0QsM11aDjZTkovjtUch7KScVUvtnwr6Le2ocgK1jaceMxPFHkkr0V6WAJGEIzCIRBlEcAgcWASJ0OuZzDAwS6wa7x1hHLcZUsILdQg1vFNT8oocjy2s1+sYY9Osnzzx+/vdF198cTgc3nnnncePHyPJ3Y3b7e5ubm4A+fLyUpdaACAOoe/byukoGIQ9+d1+9/HHH/z9n/z2ux+9D8AwnhnkwvdbXLXM40JlnGpwejPeTsFjutFyJxCyFjLsTn7pEp+c0HMiU8iozB2PjAOScKLZtbBUF3O2LbO9yjNv+m92L1aSfN2yHwdq0oHYvcRkDdg3dmA493YsYKW1mo0FChZfRp5pJPtTX4jxFcupK/s29WghKQUJj4vlR5nPdEdmbmbaITVr1+CzUaTWbHAX4myH8AjYiQm1HCMUZMrgT6BmZM3g1DnMhjHYBo+Hi+a4TZcvLRJ0EbByFtTpVuZJt9KMVTYLp/dwsiWS1BQzgxyHttjsDKUF/Bnq7ABFhNxor9OUokqd81NRCrYvu16r1wZYySoFwX5+XHjCGcBZUPTi5GG5wrJ6KfsWaYnPoRBSmN+nB5aO+gYB4LgteHQLFI0AOCWLxgmNFvJM6OwoFj3ecshgxNC2YwmqmwYyPy9qP89IYJtNWDoVclzqH55un18EKaPFkTGWJp9FmBeV8Dxa6YhSMnlfF8vX6u1fsmSSlcoiKf9uljOyBkbofiV9fS05FucLWJLxxW+P1XDZ/D3FLaWcZvAkMOw8Ulb4VSHqlM7X94fD4e7urq5r3VYCgPv7vZ1TEFEzYFVV5V2VssHHOIhgjNGjZ4lg8m8zswCHEPR0yWZzsd1u9/t913WnZh+cYruy+Su9sVy0ODtkQ0stw5LOTL9meuMUz5SNL+rPM91NV8aBgE4KxJFFUPReCk28F3jYD1hj10U+DIc2MHPfDeq0v359Q0XxSNV6xQJgQsaqqprC6PKY7TOzvx1RGmN6sMM8xahznkkLAQtGFxjinqG7iIwqH3LIs8kOpwzh1srSB1ehJhFhYBEmwSDMgpEBPXlfA1UAXhBjEAauiNI1KopVckDTy+l2lmPA2jAMiK6qvHOOQ9ztdrvd7vnz50+fPt1sNo2vnMPXr1++ubkTkdVq1TTrEDgOHEIYhsE5RBAAPrS7H3zvu//xP/wf/M7v/hYQw3AgEhnupFotsSHNF8IweYkiBBBhZDM5u152Zr475ROeEI0zDqENmbOmbWKXjL14ykOQCCBFhE/Gyonh7E+6q2utFhX1EEIyiFVbZfxXPlg1JFP2vFMflvNcZmWmamdkEk5I2qT+FlSbSotFSKklDTaOaQBnNZc2JKEQ8lTYZMVcHKalrL4hShdG2xFptQX7G+aqYfHZdpFdkCqmLA4hpZRM5Eh6JNs6SO9tv6k1Ww2KDZYEswUmI5D9ybJcGgjOrXyZB4viuV0OSZVZj/BNzuEcP0ubHkKQeGOk0VRHQBhxdt2toKfjIGg010WECY1PSJzsKoGEGShKwrlFYEJvcoZnx8aOvtWCyVUiDeZcVJIj6QqrsmDC4ZnkQ5YWmUBlFaRI7XhkMMoJuji69JLIZscdX+uzjMG9c4t8rHqsrG8IkZBI05jBaE8oMxzvGhYRZiTSnHkiwCwACIiA+mY8kzyDEQDG49wz6QYQRG1BYZv4fxwLA+hyHky8CiAEJhPVKTqWf2ZaOpNEmCLoytZOlWODf5uO4iLR4bT4/B0p2WSX/rV69dTQ3qa8zdhtpJLlAQsknrArZpVPqJfzpZzuS81jkZN9+/Yd/QJFe9TEoXd3u6++eh7j6JLVdb3f76uq2mw2eluaSkail11SV5MR5zc7MnMIAVBijH0XkMR7X9cr/TyF3mWTPpqjJdmMJsbEehu6l3rVUr+MV7K//q3IFB91jYggAiASonMVADAHzcXtAEWkHziE3jnnfDWwCCP6ChC7EEM3aLwiAabU/4g4vH4l5KxpF0KwRvWiPsxKxop20szq6HuaHxS0OKfpdq6MypkdZc0nqxakuD0rjXQR2tQLzovpCMajKkkQyaOrgLygZ/AePVKFSIyEESJJ5R368SR8VVVV7aZI0WS7jvcgIGLXdZtNQ0Rt2zrnLi+vQgi3t7fOue1221TV9fU1otzd3R0OB0T03lfO92bhNYZ+GLrNtvnhj77/27/1G7BeQWyBAJyDoYeZAkkuHOOYTRQAUtZAy/kMgNNPmG0SzuJFT4aPntlC/AbFp+A9q1jF3I+X7dRbumashsZFhCU+SNyQmY9iSoiB0Nvcd4ljMtHI+C9JRVZsZTEBVFkLJdg4uZeLiCvPyGnRa3gyjCUNYzFcTrQWG/YexaNjnDc4K9l4tY4l0GL9DI3H2BKcXbegreLc57EfwpysODcvUp1hGNKCGRQ5J2zLaUqwDqHFPywpMnvhteWBMnAuqcISMzBtQchxNh1B7fs+NWgBtjtO2SgsBtKg0OTfnw0/il4bpfOQqgdUNMoxfg11TQmQTeOn+Nmq7CGOfTkEZEDByJOTPEX3gV5UPFr8RyJmoaFpvJbBLAwiAvP7XcrPM1zZZhO9ZgJrurN4S2sfiV3tupXtTj9Jh2QsbJksLBIxVR/xvAR/pknsZxkTZh2dKiVg9v7VdG2JzJPxZF1bdJVUSz8ltVBqKimC/GGOYSmiqReR8DYlY7DUi5gwJCJSB/U8xmbNfkNA3j5ktOSx/06UjP1KGS9171s2+42QUMrF23+1yCq/DAlkyavJ0JJ1/SssJ1BHzPFwONze3q5Wq/FWNObVaqXOXowiEhHTYhCmq4NSBB0AIKNMd6+rqlRNEmPsug4AWEKMkRmIfF3XXXfI9OTiqJM8pmtvYG7PnBmsPliNJCapnpw4ilby7ddiNfsKl/aNmeMc2nGa7scsr8zM3nuqPCJKCIRU1TUg931P5Ju6jjEednvnPDiYtlhQos5KIlOEl2I+5ahDRO02467MJS7VqeXPbALKRpo0fPZTht7FOULmlnPqUaal+RKTU2u5PkmskkyU1LJzLvKAHAEAQViYwSMSeV/VaxGMLEjgyBFVCCSAMY7ZeiIIoWaXqdN4pzvG0CE55x1S0zS6x9A0DQHt93sA2G4vdrsdIsKqrqrqyZN3EPHN7euu6zbreroeLDIHEAihH0JX++awu/vzP//TJ08fXD+42D68AAB0hAKj/z8OmkcTCgRmVySNbiHypGoUpQiLPuExaS2k9vNYaN1pzKmwdC3TKZ2FAHh9dZFZ55YbYHISjrPd/JSXrZb9uagx08u082M5AxFZAsfZtZh6cjqEkG4Dy1oeLxBj1tOfON1VNUfW+O96vU58bDleL0WxOxt2f7LUiWl7KhsdS89xhAomu20SmIVr7lJI7Vwmo5iSRAhJYjiKPRp7y5IsAaPFEiXVTyHXGaH1EHmMUabAEq1ZVdUQOrsPo3OMDtai1GqWpaHlSw8W7MzBk8l211W0xCoiotHhqX7Wjn1OrS3y+eIWNxj1ahu37adxWZgX+7V8mMqimwQAjoBoPNmrA4ek0Hk00y3t0HlldUWIdYcSP6NZuInkAABJUACVzWKQydXkcdBuWsoCgePFuBZOMmfzLN6c2QbXjvX/SR4ZcgY46pY5n1iqKY1ijHXlLHrTv7rQkBaS0jwqZqfdLiL0fQ9LWisj3PgsR496kvoR1VFCeaUqmpB4i7REytSyVTi2fkJFHALMi0z6yqzUHN9PQTILJJuNyJSkM2HKAXYmc0ZmoKSi400lYdKGFlsAMvDSeMsdiSTmdpjpoRtCtqaZfrXslIo74eGdMijFLc9rMGezcl1J5orujMGaiQPMhSJvNnI2LhGRpW1YEWGAujiaAYUpbAEoQyJnHU14SETJ+CTTbxnVRETMGXgrfSV9sz/tkC38mbqQechoKQVZOzj5RTCnY3YtjYUzEdSqprSAbqshLty7eEowtST1qHVwjDY6arZ1s3r06MmjR4/quh6GoWma9XqtKRb1buEYY+VrnZ3TFW2adFRENs2mC4NODXoboVL80O6HEA6Hw263e3P7+tmzZ3d3d1VVZYFqp8B++3LkBBGR2V3H9teMf1LROa68TNheg1SKUirnkW/rmz+PhtzYsgXMeY8zVQAizAGmhSrNlGnaj+C8tTogmV6To57JxaLc2W8TQ8rcrkvYQBPqmZg8VUv6E4y9l7LTp0kT5nFV6cGu15ciY/WirWCztafKIsIQEVGAUYAICOXBgwfvvvvu1faKyANVCBWCR3TkKkRXV1jX9WazWa/XVVU55ypzbpaIUECHqBAOMWjvwzDEYTx85JwLYRCRyuF2u95sNszhzZs3t3c3DtER3N7exqFbreowtCLxnScPP/zo3SHsnZfr64vrx5cPHl48efL40ZPHD5+8XzUNIEnomNl5P5pR4EFijGpYeSLSPcMwsQahJyQAYuULRCJPQMo9Sv4ZckeslfsZszdsdqGmS+0FBfRsv82RDnoPof04UQtPn71JFmpG/lMPNHcg7fvE0LZCMnKSACtdnXNqhCXZkMkwgiVlnY0LJuvw1FkyuzJqB1gqpvRtajZ9MioyOf652BcW+2y2gohAEZw5vp+vBhVfHTtNWM1ENHUtpimLfOWnbPEGp5jvDB4RIXIZQmCujGz7aGbcpObSukDCp1WCFr0W4Vbx8VRkurA+w0ypqtJPpwyjrLsMw4vcXuKnlKOSEyA7Z2gmD5qunBopAmg18tSCjCtQ44ecUI6au3m8i5uJEAC7ISKiHsUmAQRBdKhaeAzmRwEUwnRHcgatFrtkMxs1J6QhgK5G63+THhh/gunfKEvmb4bhRaLYf+3UYkkAhsr6Z5oDYM6o5ee2BViiHSISzs6ypkYSG58amlVKqVrWUSnm6cOMGxeVSam9z7zMRvq1K+5ZsXfJlpBnegmW5EhLZiDCpIKSvWWxjYhDnJ1pLNGVj3TZz/r6ncCSjud6eWu7GWUmDOMELScXcU+1e4rN3p4HzrdW/mQJ/Y0+P6WN3wawrCbOJzLt4Bu1tqi03xKG7MNUfoHPFwvimLQezOkyxDEiTjWYGsGQBiJI6IiormvrEOp1bTKZTESkZ6KSF0HoiEbF6Kgi9BNuf2XDKfFjjUNcsmekMJOsPWZZcVF7TxibrelkiigrNJrvVpUxAI2RMiiIqvHHxhlQiI77PKiTtWdgFIYRfc4Ax7J0cp6Zs4vpM/vnDMxg2PgUDkuLyNonVh7ZhC/BHNU43xxKEGJxv7eBJ8+VYPFvJ5rxve5osfoqTidVEB8FRcgJ6SFh8q7ylff1qnFVVaVLOIkICNPqTIxRHUKZ7MwoHGMMIQxD4BABQENJiVzfd3EY97fr2m82GyLq2l3fHXR0IQze+7//O7/zn/yP/9F6Xb189ezLrz59/vzLl8+ef/nVp6tVs91uV+urR48evffBB++8+2S93QIKsAAhcA9UOe9gTB+DIMIxer8BiMwMEgUAEQiJxmV1jiPTjjY5AeUXzS9EkOaxo2i8welBwHiDthyXk0tNXVqfZU075WdNo3H8ynnadjTTFDj7MDENIorksxEYi0omu5mXAtus4GW92xYyG4VO7JLDZFamP1P7izOdHfsZCU/jFeGk+DKFCCdMhQR5UX8ZjDTYYg6bLSyl1mKMGq5ne2Fm7/KL3e2Kke3r/IpyiYTkKMJcH+mv2Y6EHbhVjhkDLGJ7sULJnJb5M6LbUUNBbvs+oXTOMEdUcxxDUxL7pYBAlqUAYJZxMRJFhGVaUNHbeAxGRDfrPDkAIBAERiIUIGA9JR+QIapFxQCOQUTEmVFYZJY7P1M1nlF5yZwQMyFlbzLkZ8JCRJp0SsY349eISOj1bJzousl0jpfcce5JVBORFERgtw0tZa2kc5w57aJnS0aL7TguMqd3Tol8qpCtlGXMfKwGMzWbSqblMuYvhHqGfPtQSg0UZLUlI02qaVfQv7aRsqlUWALMQeLxfCtPdQlwOt+Mx2+/tru3hOpXWCyfL8wKU6Vv1GYmMosVZu9P8OHXtl92BwV9S/Ev62QNZob7W5IjMZuV5cXZhMw9aW/TziLn/AJMsviJyC+QpJYBjneFI2JaSkNEBJc0FaEDQkceBDmKEKTNRURUbzDF0RzXy3gWRJC0HxE58Ml7dM4hOGEUmBmBb8NCiyVTF+MQpmSBUOilkrL2wc6YOhAqzqBqsWbkKZ08g5Ns7yLjDQCjcsPp+q3xCQAikNhlCNJjQwAgSAAao35cXSOcnfKwA+TJdyrNzjMAp1/L0Z1CKRf5NexzijAqlblFvp2t0Oz5JyVghSvrTudfC78xGASmW5RFhIVikL4fNhtCcESeyCM5vXiwaRpfUVVVvqqKCRpFBFj02oK0YRA5DsPQ98MwDMCigVQA0Pe9966qVjGGV69e6d30FxcXm5W7ueH7+1sBjoE5Rk3sdHnx+ONvffiDH3zM3L+6efnFF599+dXnt7dvnt+++PLzL/78T//s4mJzcbV99OjRhx998N7770dmVxMg8BBZZdDV5B3EASaHbzLtozAiOQBwKAAkiDgtqS/RftEnhOnM8NEbPO4QZieJTfHlXJUmMFqKDFZ/OpsnmI8OTDlzpJelkFuWVTZ1jgSOLJWW9pNmhKWFChvIhEVoeKppp2f7ucyvrwDDuOVLq4aksP/SJpv9dRGSsn3bbFZ/es0I7sw0lmlJC9ji/F02YtauZnhgZsDcv0rQZqtKKn4JGBvQlYWcIaKNTyv5LZsALIlLlZS1UPI2FEyY4bnEA8yJgsaTSZox/bkIeQaPhWpRy6cZLrnQydq2c9vYkETd2kMBvacSFR4BwiM5AHS3DmuqQBhAkBGFSVhD0kX1JqEgAjnWVbUoyTixQ4bizG2CHJekpuTzxVEvMuQicqzQ4bQzb79N3TmXr9poSVnOYM7MLEdHWgBYJDLPcsqgLvEJoAb7L4QrW7SUg5L5fnhZZ6Z8TuDNKsbFrs8I+6KGyUTv1Ip71lcO7ZI/sCgIabWxFIrFTq0BYXmg1OS2l1MNfqNSku88qN+ocQt8+fJM/VN9Wd2ibP21bb5NXyW0mVxbTZhpS/s5wgKTQMGfixr11DBtI7ZOyeRlKXH4TSlYfvU2qD5VB4/Hh1I1RL2mRQgJRSQGDkP0LtqpIS0w8XR6CqeDOTDNswDg8RgUhiYAatLqzrkxPaOldaas4BfFkl0ys7FCmXIolSHM1VoC+JTWhYLHLIecJ9BMPZKAkKSF6UnpA7DorMCom2DjsiyxZHMigjXltYUEDJmTFzAXjRInZyyH1JTWsbjVX1NkXDbYUnmmf7N5bXo5Rv0wS4zjLfB6fGocHyQgteuZvS2TrZ428WDOAMIoEoFFhAEpxnhoh92+3WwjEns6HkLRbKJuKuQcgYaJYkIIkp6xDySAgEgUOCYZoQkkPZvjvSOiMPBut9MjtX3fP7har9fr7Xa7u3sTIw9D+4d/+P/99Oc//fCjd997/+l3vvPR+x+8d329/fGPfvTj3/ghM//lX/zs7u7uzZvXt7c3X3762WeffvpXf/kXFxcX73/4weXF1ePHj68ePPHNGoAgRgASPTCJDkSmuR6RCEBAGQwJgRTMGAX9zJGeqLiYaWZ0C603OD0sswHqPYQlo5ya3bMK540ey2eWt9JP6XxdIboLmj2TBOt7LHHtbGoRsz1VsuAMI/NljBIzmZRmIqqCVyqbsruyQSlsu4XpDc3zW8/xClgSg/QnnPAJp0+W28lgTgcwUgXlqK/N7mgbSZBYxW27KyekYRgsKXF+LuUUcjK8LdJFzDpZRtxTLUthsELBOelbGwKXiH6sLKIcDfMpU0T0DGHOkJTiQpP+hUknAhEaYBiRqO8AAIUBGYUJZFQcjioicF6IWLCLwiy6MmqBTyRQhZ655acQXiJNltw/+3I2SZhqpaNSyqwl1qnK9tSE1h8XEQ2NzCelGhT7UzmQUmVlKtH+ZPkhV2Uw+9MORIz9bTvN2O+UTsuqWdE4o6+ykjW1OF9g4RWkCmUvdmW6VAUlwCp35ULSKbk+5WCfen8KjW+Pn1M69ldbLDOkB4W1FIcz8LzluLKmsq9OMeHbAHAew1JYJqcAPo/z8xT8ZeiVgfeLlRKrCKNcxMB933ddl3yJtK2XpmNmTmfqNDr0KEQIyWMkIv3KFiLS9oicXbXJ/v2mKLJbkXaMtuDS4lGqnCbKxM926ikjJkqetD+dDIkXEuZJVYwYy0HiUS0LAMt4VYDu75Ect3NPtH+EJ4E01Z9BWCKn/DNho2hqxkJnNAAVORTgLIlTtcRs2eEv+6GI2GuDsoFnlc2fhKjzsmOOHGQYYgjBuaquEAkIEIU5xIBdVW+cc7qn7VAPQApOx3qng/EjopxzEEBPF3rvNdBIU/sQ0eFw2Id7gUiE3vu2bff7fei3ekw3Dt1hP3iitm0/++zuiy9/vlpXF5v1xfXm3Xeffuc7H3/nux8/fvz4N37n70MI7f2bFy+fvXz5/Pb29s3dm/s3t3/88iUiXV5evvveRw8fPN5uL6+urq+ur7FZm8EzxF4YGcTVKwDhyIisOR2YWYRpfsrv+Oni229ejhfTZ8aE9eDtezahQZbGp5jJntlbNEFSwcnBU2dX+YZmVwvMogKkNKaXmNh2mmBLrlH2awZbOctmoy45W0QAji1rHdvseX06/ZqP5ZRwnlIE9qWN1UwaxGYxte/dMdx94XCdVcojJqcsUqk7S7gMY2Icp8QS6RgDGLJmqMvwnGE+/ZlNCSW67FhOoTQbcvZvFhqX6p/a24QlXrJdZzH0ZOCyzDmuEM/ZCRE5jklHRjdlykY7fqvT8BRjKcxOM4gQeEKiMSsWgKAjdN5VdQBsQ2RmxohOeB6iLHODrBQZNFMaItpbB2AyUk+Rxpa055wrlkL1yeTX2UYSouzZVysFVjxnZzgN5xjZyXc/NDI29ZupFzBIKLCEIqB3OqQ/YQoImUhn5HqWRfPYjp7dnY9LtPP0eurRAqAvFcLjiHVEAKChUCqFsFQyQsm42j3G86dxpZqK2lKQrYxn/GA1c1kyAkWehanDnJdKTvvFiu3xFGC2lFrrTMvlDHV++DDXDFYebe8A02ZG8eF5eMpqVi0vAg8nqFYSXU5khX37skjfrOtMJM+3swjzL1xycnzD1qaBsFo7AoCouD1Gi+jt597XiEgkALomSCJo13z/f9T96bMsuY4fCP4A0D0izna33F7mW0pqqdXqaZtv8/9/7xkb2bSmzCR1q/Sq3lJvycy7nSXCnQTmA90ZcNI97rmZryQb2rVzPdy5gCAAgiQIYMll02LMgJlf8oIwX6nymJyOXObYXb5f+JzdEJ+KEWDpo1/CtXjzOX09xRgNbqSo2WAttV3Y2N1IeUVXhGFuy4rspUo3M5sD2Wf4FrMVABh7UWo6DWWBpMxf3qlM2/GWzQsS/Cdbxk8urXh9/nJqiQfLu/Fwo1PtwrepVX7y33Jg4EWZmYU8HzGBAxODOYSw6/Yh9F3YZfdIPF2jpTK5TJXTeQJNKUl2nQeQTpHpiSh7pZ7Oew0pWooWUzSzcRwtRmLr+xzTPqhqDsjJjLztYukkIgQzjHEYP8Tx/uHDD3/9/h/+4R9ubq4Oh8O/+uW//va7b37zm9988fWXv/z1bwC9//Du7du3T6fj999///33P/zjf/vtf9Pf9t3+1Zs3X7z56vrmcDgcXrx4cX17i/6AEAgQM0BAic2SKZExB5EgQurMQZfDthWOIhs0nYMc5iuMlVpVUmDnXr+aVDwBtXN2GeYy0tVU7WfoLVoklwpj2Hw9FfP+/byK8MfrEwytaR8asVK1WGW+wCRlKdVW6DtbybXJgc9yfqoyewSurk4rRppQulQgKtnn618VH6vTpMcDLeOIVPDnQmWrb36/EGFl/e9tIXyGCi0lqXO32AK/2muPmfLXn0xukYFvpcpQiemtVNBYsvkwBq00x1KStu2eH2B5EvKVUI775JilgGqNFztaGnuT2xEws+uemCkIhcCBQAxoMlYA1PXE4ZTSMIxIA5kF8LA8HKOlClhR/tSDxc917HnSbTmlkE07cNXCu2RoJ7zp5xwDyp8E+lbqoV8SQMtNcxFr3tTaRvUTE18s7i5W+oovmF/mjee2v618qGD2tbVvtn4WPt3cQV/WWfq+uvFPRNjYu3S0vACm6KB+XLB2Yp8f2IWcvcCz5VO6uNBaeb9V4zPSqrBqMn1enbScJvz79uVPTlvIXOP3lbQ1g7QQLkTZcm1ZMV3bRAukzcc1bbvPwc/WfPH8VIENrHtueE6aRhnlT+4v0hSt4Oxm099/zt3NtwT9p2xcx8xI8FM2c7D58IQ1+SIT6S7h+cn48ZZoLQ1USFttopRVF10ZS2lZKqxK+ZG9DDxzMCsqBE+HEDo5CgRgS68eQlMoV1VTIIdxPt8ZXK4GJ3gIfkbOibmKXb7AT/vST16ruCovy6msNSeB1cxVifSW2QuebSa11bEreWip8vk8Ww5K1Ch7v2OAiELoD4fr29vbw+6q7/tuWqkF4VC2LcpdITPjLLpSoibSRt5JKee9RGTO1dzpdMpsAtLsbKnvQ9/3Gp9OpycAjJQtS1McYzz2HSlZCJAQUkof3r3/8OFD13X/8J/+4fbu7quvv/jmm6++++4X3/36u2+/+8Wvfv1rMP/P/9aenk5v377/01+//+H7t+9+fPvnP/7FkK5urt+8efPFF1+8evnm5sXd1dUNd52lIxGBWIjB5eZIufv3XAacRmFeE5aX67mJ6vPH1atBPmVnHgXXq5qQq39BEJ4y/JaDLysitiHBL0yBtKbeeYxgKTjanJWbaU/obeVouPRcG5+t+SsGqzJPDLB2WN92tkWU7xrWUJ3zF0XKliajVfGq0Qxa29OChzPyqRYTq6NTamidT5SlI625IV5H8mwTT24N2XZqtXg7rKufWpv7knO1fj+P+karu6ltwQpImXxynt0PTj7PkpKYNCSa90oy3szMZqv6otAXuZkJYM8UGCFwEApEhmTZCQuTBElEp6iaxjQOBiIOoMW2ky2XUi1fFNjQpE0x9DmpMnGvsLEkYFPVPEHnr55UPEdguQPqSQJrNAxHDKoLc4OKWQBU6Krkib/h0xLhBdRtnUh/LpIrvK2+aVMl2WxpUVLqAUC0gsBKuvo6i2VBuRKT34zjiLWx4MbioGqiBnt7GbPV09V6Vincd+S/c/Lk95yxe2aFbUHPcW1tq5XbctLxTVwG7zIdengutLtV/ALAPy15qn5O5nV4/PMEnAFTKNq8E2VmWeQXtzHe0Jo5hBBar+C5UcN0HW4O2Ktl2s2VrM5i2NC+np9WTf2rjcutRqtp2r/HUravbmNVgjSnyqnJOTOmODMAYGxKMKh6c6rFbjLhbCrCmNyDYHb336Yi09CIrCpb+VRxXJvBTzSFHcpPnn2VqwsYWBpV5x3UlgmOAT2Q5rYLfXMV5VRlCzyeYVfmmmRm+awUgYL0stvtrq5uZBFoHsRgIRYqAVeKQkiEfM8wx36kfM7LPI7jOI7Ud+f+Apgos9vtLMao4wjSEAIzjeN4Op0CJxFiwelxeHh4EEr7fW8mpsMwnoYhEhsRYozMYObb29sU4x9//4c//emP//E//n+ubw5ff/vN119//fU33/z617/55S9/ffi7V9/+8tfjMf7w7u2PP7z705//eDqd/vKnP//ud78T7u7u7l6+eX139/KXv/pN3/e7w565A8hmpziY7hDWIQcvz0dE9Qlhm6arhl998XVZkxARoDXD0OKakKF2O5uh0WVgaCLKS/kUB0++hURaMy0qnM1sZiWKWraiyzbBAMZxjKbZiRYAHZVnZ4wxxnIvsRJABWaheqeK3ILQbzZUsMFxBZ0PLes1UnbrZfkAekZjSqnkLzyQ9/n8uJpbFOU4MORWzjNUC7P+qiDNAU9z09mHUgt8TuX6XyWLfU5ff5sBQN/3ficmo0KXbqx8K9q4J85glM0eWlt+r0olD2Rprhq7kjmHsswPGS2ZVLypqqeWcRx9zaX+6gSyar0iCbPzFmvOI7O5SFZws7TyoA58w1CxJDBGhJqqJsOYLDvYDyEwGWsKrH3obDj2fS8wWBImTtYR9rsuEAgg1SwNGZR9GN/qR2MBBSMhCWTKUFgMrCY8AD8O+tej/pj4Af0JcmWjR2Z2egnvlEUXqANPhFcwUmz0zbLLL5AwEWX/bZwWO0rUzBYFMxPBLEOula+VPDmPRTqfOOVr6PmCzVzEsLzfYlhXFHJHimQrAiewrMoHT+eeJHxwWk8m3rWSx6fGRSD4lhFoOaF6fvdorNCyWta3XsVVq2rznJVT13UesPLAHOama1MO4pULMCJFYix2WFQVOKs4520jlYztPH+XkaVZS6OlFEqmvsUKmc9P5LST8oaWznJ8f6srGGc8U22RUep5PjBeyPhGFZCl8VgLdpW2TnJaoTqNy4bzO6IpBF9FNl7Z8KNTtVJS1FS12D5X8JcO+p62C5KpnjVMVMEMS1UKhDU5s8XvAEocQs+85pxgVQwV4+Aa9aNJGWMw3u/3X3311atXr8xMpCei/X5/dXUlIpa067r9ft8VdxvE2dAuHxsm06Kf5LjNZhYtjuN4PB4fHh7u7++J6N2H97/97W+H4aR29Ch1lkHr+Bfp4MWdw1XbX2vc0VUFW7GzWnNbtqWuCzTvM2yJyipVNFnBgwtxTe28tvT0UOEBZzo5e3Enl1oKLwXLBmUl/fxRYdXZ5SHzpW0gMEHNq3wFmMCTmbE32ZDsZRWsMDUyo3zx5Qy2JoICWR3QPQ5Pw2m330fTh6fHX3z77S9/82s1ur29Oxyu+27fh/2hP5AxwH3obl+mHHMipaQ6TeJ89kIy0cY4jk9PT8PxtKOrZJrSqKoJKdoYY0wpAtNqeYwnsnNUZxreibBhHONjHB9ASYQ6EVPSaClpdlUqIeuuVqLCFDxnVLx58+bm5uarr7747rvvvv3226+++ur1F1/g9hZPPxzfv//xxx/fv3//448//vDu7TiOIYTD1dXV1dXt7e3d3d3rL968eP0a+z1UkQRMoAAimAIECIRhAElmTTPTefZMOGY8eOI1s3L/iOy8FU6gs+/+eeynn2cTODrPcKuzgk+elKcle8OoNE/Vq6y1xkIr0sGjG2sMWYFaflYmWHDS7TIkBUWlwjYbzUEClihFwZ6HGZO900pzvqy/Y2ZmJW8FgO9mJXOrTplbXU+7Wk5MtKVaeerb9Wjxf6teoJEsBcL8UN0BWD0A8fnRpNURqSCv3qzKU//TU1oLRlthWwnLYi8wafIyFJgtAOafQtHMgDRt5LAJE2vWpw1ARxC2wNSzhA4ECYGyVOhDJ6awtA/MUEtKBGbq8sQgUNUddkqsRkY5/E4npASBDggywhCtOEZj1AGgsWbWsujOmm7XzjGeGsnpCiVnXkNWFM7MNN9xzfhsj/iq1ElwldTMmJ3uLATXRWMMP2tW1F51x++JwBEAU+0ts2LzCmmr1OtZrMqwyh20tvBbba4kv5HsCb4Vkq0ExoYI9V/zHgWWcglnE1CBk2NmxsyqK2xbTn3ZBe1cbX2CHzX8Py1VFLuKhMuJttdmn1VPqa19Q2usd7melg6rmaUFdatGuigE2la8KCgvtzZ6ttLWdPO5aXV63aqrjGM7B1XkXaDy03qVZzX5CT1vsc37mzs/k4Iob3iFbr6ygUlOToGCnP4AR4R5wdD3fd/386468Xx/5zJsPlXO5Co9pOKa1cF6DjbaVJFNqWQVzy2Zrbb1ScnWTny+a5Uo9nK7TVkfq4qs/rxM0h4e/7xatup4lR+OAc+16dmPYCUZVgHLSxXO7unUrPjhZ+LpoqbNwT2UDcoQEZAy4fr6+nA49BKUeN/v9v2u6/pd6HZ9IAiU+r5jzjDnoCszzifFYIHDwGJdF48xmU7HEFCbJ2HVs9fxwmJIyqpmChonnJx5UE2hqmRQVaQ845vIWbcnl06n08PDwz//8z///d///c3NzZs3b7755pvXr1//23/z7atXr779u7/7lhmn04f7j99///2PP/749t27t2/f/vDDD7vd7vr314frq9vb25ubm6++/C6EQH2HEHKvIQyYDgOxQHg6wbakYBgCd4qppwSezE9JIiIREWRWihgg5AXhkj7OxDR1ZQ7MMr1s7l95UqjIzsyAmlF9nssMMw1Po/wFYswHXAJpKdvT9GXOL3m22GxVnFXd971LaeVCJlwXKjz4zAXJ2D658vgsn2jDVttmXXMaydlu0HuUqTbnqu6s4qTtF5ZCBEu0Vxherdn3ve2L/1nwViEBywGqilRo8akCqcKDH3qbd3Yv9LTqHVfOTpZw+lKzwB3JkkEBMka+j0rMGEcxZYNQ2jP3Yr1QL5DQdUwECNFh1zGZxTEQSMkEZtYxQqCZnORGJCqSIRkxCzMHIkIcI5mwjErTjoHm08DVhQ0u7Lhv6+I+5VrMTDZY1dyaqrSoqm31GSR/Yr8KmJlBSXG2dyAis/MgztLmWTrZmUjs3KJnhIopKnrzkFdM2gqcCj++uapCrNm+XubiFl2+lVLWG3L7UoURFkR+UZae/1LduhcRFVYLDdraOryUKuvDVj7k/EzrdPu5qRICVVur+Ve/tmN0oZLnQLUq0HxDl6nCLwgrAeWzfXJqyKpSJScvd42Wa8JpvD4zbmGb4Tmz2GWozs9NW61ghKNwL74qBvE/Pc6XfqE9OyyaK+cw5WcRkpU5osGK0ZCZhb6bG+LsNiPGCJ3kT14QPj4+lqqKRdgqb7ZpVQ7Q9sJjtZJqyFoEXkgVK1XtrioDvmDL1FuzfAshGpLA84iflhsf2EZRVWdblZ+bLsjkrc6W4sVirrSlsyMiXd7Ny6kE0aumJ86zG6kATJTjHJslgsz+ebJ3Vp2fNQQ2syBydXNze3MVREIIPXNH6AhCECJmMKMPEOmI6ngbZiBis/NtrGIENKaoMNWUTwgTUqEuM5tOv7IbJ7WUEsFU1RCByXsfTdaLKJsrZjZvVmpK09lstkQrJocfPnygya6QP3z48PHjxz/84Q9d1/2//p/yzTff/PKXv/zqq6/efPnl69ev//X//OW/NhtPx3fvfvzrX//6448/Pjw8vP/44fvvv++67h/+6z/d3d29efPm5ZvXV1dX3PfQEWAODBionC+RgACMSARiCABDvuGSzMYg/WQGQXLmL0Kx5ylUcl6iTMwzm4xmGdSJEGG2/7EZRVCNzHmVacx5SMCMFM+rkZYoKwFnZrwUoGaGKcr0mdCJiAxjXhDywrTJ80zLD2bGzZ0ozIp+9bKCsFRLs+27/1o+LVC3bKKVTb5UldkHXvdN+Derss9XWOSC39GpGsJF+VIaamvIz9WO8iryq1TlmQSH0+TKQPsWq955+P3XQmbUqD5eNJeCflptW6+GrKDUY3h1OLwY9W94lll5v5Zmk9HSOqUjAJAR5RvtykZmUcgCI8B2wQ6CQ0+7wDthEeq6TkDC2PUdadKkgRBELCUzC7NsAmCKnUg0TUoAG9FkAaVJIkcQMCoNSdWMs4AonZ0YZDYDO5/oNkrAKlNUiHUDsU4tHsm+wnLX1ydbbjf4gSvWqhOZzRaJrhSVlYaqYmPBYC50zeqMiCU1tgvmy1zW0qqnq7Y5X1uV85Ps3ELVwlChtDyXcW85tKpwrqdursrvMxORZbOihrtFAlEtNuFMl7NBb9XrijI9eM/Ez4VkTr+sBPXlIqufPmvUVot7GM6wLfcUPglGC+0qb/rn1YpU1TvJsCUYW2TQNuEBeCaG23pWe/G5qYz0VnOtGCQiasLSVJKtIHnu8qUTWjjUkfMAl91d5CuFQUIIIZqy5VvlZ/AqKp1kVAmkPsOgqsB567MC9TKWvMmxb7ct2L5fHaOfQLdt5iLZShP1Ru22UPKyqB2vVbSUocHKwG0GyCmjs5T2NU5Wp56qobaJVfGyivBKjGwVqcTplvhKcSAikJAwM3EOYEjIR4Zm2Q+o5aYAKExENCUReXl3d3u4EtMdBzEVMzEVRKEk2f9olz3QeHE3jbG//KUJlqAJmkBEsJS3SKJFpWlBWOZrITY20mT5uC9BLZom4rPGaGYMAZFkN+2YXA2ZWXa9YbPJbmG6vDgUEZGQM2RN5q9/ef/nP33/9//f/3R3d3f78u7169fZoPQXv/j6yy+//vJXv8Hp9PR4//j4+Ne//vWf//ynp8f7+4cPv//DPwG4ur356stvvvn2Fy9evDAmCR1zh9kHVT7KFb6Z12UAzMggQc8jVccCDS0FwGk/ROfIyLR0MuvlS0VwXtwX1FTXw6o85UEb70MAoJZX7ebUHVIjnQjKN70qgKqpyDNeRfSVcFxl9QvSpMpfdJRiI15amQHmCntbaVUEbDF/NaF6w27fBXPnMBfqvwCYF09bSGvzw63EPBVVnfWDhZk8ikRuBW4FwOqn6qU6N9ZwVN3Ku6q5FlHtcBBRqnbcl/pQiyjBSERGku8oA6QwqO26rhc7CO9FbwJd93LoJIgRyW6365iIEVhMIxJ1gXsJhgQgR2sFE6YQSRQVqjBQvt+lGlOyvttbTDbEmCgaklre3GnJqQzfhA1boNTnJ/emMIKZ5WDuK/GdlhSy+kZtwUdlyLbiXhar1xkABi3IqWHJzQXb6vjahtfT5/SrqqpiIiwJxudcANDQ3qpYu9Du576v8NCKjgvysLzMSqevoRU1W2V9/rIg9KvB9sGX/0SGf5nk5dUF8fhz6vcPZ1r6zHpaKso/tywCPglSKzlbKV1o2Lfr6aEdr+fj8F8C26utlC4sSLTZEMkPFRJK/pSUlhcoSkGPz7OHhXkqjDHmjb9K3nrwMG+Qocx0mOJ0d113PB4nPoKW80NieCCfg4dWPuCiBKsQslrbc/h0lTb8c1XJM6v14LXC9oKMMheluSjA3oR1Ffiqxa0mLvSiEshetG4pWqXFUlUJUFm+snPu5csSUdbPqyYYZ2scEDFgQkACiLRwN7LzyykGu1pUJREj6vv++vq6Dx1DhLkj7kl6lo6l77LLXNr1rJyVECIQDDzZpUJVTQnGMCPTpFPiQJYYQLSYUjKk7NHAbF7OqQFKaiAlJjHSlANPGhGQNFkiiBWWBWe3tLnj3smfP64vLFlOArquY8b11YthGEYdP9w/vv/48Lt/+uN//s//5fr6+u7u9pe//tWvf/3LL7744uXLl2+++fbNV1/95je/+fHHH1NKj4+P79+/v398+v0//faffvdbM/v2u++urq5uX7548eLF4fqKQsfZd4Qx1CzFcfauEvq9gA0RAEFh7DWn84JwHsgFEZgZzacEU9+WJFvkdb6yXNGQueOUyuioovLy0tuJeVlABnXLyDObzTtwDAKoKHTUCmI7t9UKmvJztQvYkGt+0ioPBfLquRRfFUO+XT8lVGK9wph/6a2Wq74XpPHsqCY7ovQmrHDLaY+QtumqsxVUraz0ZFCdmJX6W9PE8t7joSpewVPopxjH2ry1geXm5aqorbBdNeQJz4/4BRRVfTEz4Hw8tZotCCxrrcwgLsbxXeCbXbjbhWvRg9htR4eOghBLdzgcghARCZklgYW+75Eic1+QnOYwCaeRJaVoIAiYzSxGikYikpINaqeUUszeKMgz0QSwfQJLba9zmvC2wPwl1b/iwSpnwWFJ3s8eGtpbgAoBnQ1F4EjRPrUg3GLG1Qz+vWvrkqntqmTwVbUMUjbyV/nxk8m3+8mC1px8rvJRmyE/+jeq6p3KtCILCzvV8/uz1NXzqJV9olJDZblw7iPq9clzMH+ha76eC5VUQvWT/PITUjWIZmeXqqvoXU2lU1V3VrFks5uoC1X5n6sM4oFvud7c8//YlEGn5ZvyXCkJlxGe8VDxcqFthwo/fOfMqppDEXZd1+/APN2gyYeEic82FFMRchC6lYkhqU7KwJSfLDvJypHuAeTLRWaWAzCUkEhbWErLONV+3l8VnpXygA22+iSTrs7CFZIvS+zns2TBrf9bcR83EZJmtWT91tIq8ADygUELVQv2BeQsierSIrlKBT8VSS8wsLzabWZCJPNXls7MEibvbQCrppSSdDsGsrfMKTIekjEZQWHShd3hsOsPXbdjQ0dd4C4HmgghdJzPG5mZdJ6xM+dMEwEkxoS82Jx3TyY84NwdM8v72TRfQDONeTVoSGRgpsBkllQTk5Fl02tkz2fItyMnHOVeLmjeT/3DMIhI3/d5pZ3vKJ5Op8fHx/1+H8I+pcSMrusI9uH9/f39/e//8Mf/8B/+w6tXr968efXll19+9913X3/99Xe//lcQgenp/v7Dhw8f7t/f398/Pj7++U9/FJHwz93hcLi+vr598eLu7u5wOOz7DiLU9X0HqMIUcQBAzCCCGUh5wgMIFPKk2/pOrO6SnqfhpSzzRdQ5ECv52WHHU3DBlMfahFhVWl4l93RMzgZSRJjOh3XlmLgidA+tNznDmripJq2qO5glcvuytFVZV5bjOE8lNh+cqtbtrqILn2JjaxZd5ZmWJyoek5VEq2RNwU874lVt1Ez2Hjn+wbdYwQ9He+1GGm0LNSzpc9Vaz59QeZhX3U+vjmzVl09KUt9WyXxh+pnfmwExb9TmGEdEgYOZdiI3+/5G0oHiVcBVoE5od33o+06IWdBnhyuWQpDHx8fQsYjAOMaYnfUxM0RiJI4xu1s0M2Uwk4IHtacxDjEpiDkwsxFYlyZAVgNPDWV6pi5vpgF1iL2Mh6rCJYLOuPXEUEmq/FdnBcXzXV4TZpCc95pPQ+K3LXyjLaieXBfYWIu/BOcdseqFz+MxVm3llLaK6XVbcKtTqz9Xx2WLf63ZFSrP7FwUtpmLyCp9L3eoSkNuXTO3a6xqZtlG9HzdoAVphTKXguWT5LeVKgw7aD+ts7b1tOP7yYGr0uoCWG0KVOOx0crqFp72ubx5JmDMXHkZxVJLrjDWTj1TLy5CuAr/KiU8B+bVtJBjANY4q+W7cx63l1EVpKVR2YVBgZvCaLkJkh2Ye2v2ZMqqXdcVd45mpjCa5cy0d0aa0oScKUwRmbBkF8pnGyLVioWfQwNlNFdn0pbGKmSikRJ43jiuZigUuFpDxXpbnOjZpyQ0dGtLZdJfYKkg2QK+Qde6vF3tfoXhtuPlofysxGB5mFwQuQy+j+fM8yHg+SjMDA5XSkFVowI2YUPBCmKjBOQTKZBOiyvAyIyw2+2ur293u0MXdoGk73fCOfZgEBEKAiYjVSTMPkSEKGXvGBDOnl3U8pt85S93JcYTAGKbDkUse5ixgmgBK0cdU9J8Th5VFUkBTYjT1copwL3RbLdY1pn56hyzzDZJ0zXCvA4cx9GfEPZ9nyIIMg5pHAciyhwzjuPt7fXj8WH4MAzD8Oc//9ns729ubr744osvXn/59ddf//KXv3zz5s2XX3/95XffYRyPT09/+f6vx+Pjx48fP9zfv/vxLf/hj7vdruvkV7/83w6Hw+3tbX91hZBdyMxL2aLwmFLeNiKaLxedB96K7J5EiS1MzMtfT8FUVtjz+3JEXhQiv0xiZu+N09O3zgIIDdGTAXym46kVghlU89hk0YxJBp7p29e/WPEWWvc843vXmhTmBz8BeFbJp3QFLfkhW19g1im9FYFqZjwPM3sp4GRBHoUzJF5MlHgbZSCKoKfGVJXcAql0IRcs9OCxYRt3LNtUSU+P4UqoVRLWlgvvYpRbqeC2tA3DkgjLg5/D8k+/5eGBbDc+PMxVEXPuwisYtmaaqu9lf+RcsMYnR9XRLJmCwaCehURhQxf40PFVoL3pTrQPvBPedxKEiSyI7Pe9EFSjiBwfNRB3wgmmBjIjAjN2YUc0mCmpAkmJRUgoDFGj6pBwioggIgrOmPnc02bE/RsiShYLsxc8VDzuB86vMFcxVgFQKZTlVLyE8SgpV5KmsBZaETzA+eYyLVduF1QOPxFuDTeWk64vsmylXj6RWxRhpjRqFJTys+WLVXR5bGx2bGODCY65/Pvqp7kFbVtzybzVvpc/ZgYs1BfMBiDnXhgXPxk6bTkvQrliOVLty1VSvIyfVbB9owsIP7+enw9PKViePV09v+ZV0XoBthVOKGXp00LyAgxt5mcWp6Xy+tOQWbV15syLKwo4esa8IPQd99zNjVcFAEaw+S+AKSotIethBBATmMCUNeRhGHa7HTDFXjtPNLO5WgFes72HjsRGOIfpyvN+CcLkTS2yPPws7FUbW0Vcew/ALY0tMOYQ6/HjEfv8tEqBbaOrlbdlS/HynPU3LNgt/5yWReWSG1Bcj2zCWZFEmSys0cesUahKEZ+zvPGVrBLkam3kFKQWJ1Czs4JvAPzQ5lZGhOzlkoiIBUTZWZ45xYHARsi3K4kSgK7b7fZXHDoOIciuC4cQQugO0oX8UskMMBZKRgAvumzI9IZpNTiOY9FHSvSEactDpw4EESUDWMhURdMAU1OFRmhUi1DNe49BhCkoUnGFbcYlnifN9nfzAqHo+eeAnzZ7dhzHkdA/Ph6Zeb+/6rouxsFM+37/4cO9CHWht5SPE+P79PHx8finP3z/D//1H6+u/4/r6+svv/zy7/7u77777tvr25tf//v/DXHA09OH+4/v37/9+PHj4+PjOI7/7//9f88LwpsXd3d3Ny9evLi5u5WrQ7bdBeWV4KxTwUIznZ+98E3rCrZiY5BS0jXb9/zg15YF7+KMaP3cX+iPnCZUie/MQCzSdd15AZmUeFpiDcPQh66i1KndOdac5wcz67qd5xZfpKDC987HD4QTJaparGRLKTNTjZ4h83OGxHc5ZyjRY0tmL2tWOHB+Y/N6j9xFuNZqrlrFecOqCshcW7vN6SXFKtLydEJOPfJCrUjJ8nP1+I6W2nDVTY83nmO8lKZLiyV/awqbt1Hzcx61smwu+K/++pHyo9+eRRTK8eRd8kgXPGZ8GMwWq0SU1GIyJUbYmXRRCQomdDBL43BSY765OezZ2EYYiCwE7kIAlGEhdGY0DMPt7fXc94iZjKNpigMz9rsuxhhHNUsGU4gSnyKexjSoqlEyQvbA1BhXl25W8Bcq8kRcxiI7ScoYnzY/sj9TW1E6V0dh+tvQZzEM9sAUup3pLZONeqYr0C5GzTbuIjrrA8+kVbuF+KuFq8dhKVv4kZZ3M3KajCCcBYQ55cDH/fNMXW2FlOSX6J6qvcPhttftWJd4er5mnR0XVwKksLYf0zNClg2VUvO0ugj0Oo++GMzrsiH0vhU/EGioiIgU65ZjlcjyCK8y+9papK2KPnLaWFVJi+dKDD4ztapk6RscestPL+s8Afip2VNXC/xlIPP4Vigyt6HmTXXKyaHPnx9SEybqZ6ZKyAfnlA4zBxHXa8gifNpeVx2sKISdQVM1FmWP0r9Js/xhZu/ywWwhajL97/d700m/JCIQ5ejHvJ/jGHch+8LQOQyd17aKXqdkxOht8i+63++Pp9PhcNjv98P4iI1lBpbjVVF1yzheHBUY8oFJi09bKiql6Za7i7EYllI3p2EYqElobOIq8sAaI1eCorRSSQws2bnCScWAJeXQ6nlu8psF5YrBBcnTUh0uipQWMMy8WbpZ8LZaikHF12NWyUIIGiNm9SzHRCGigYRIKAQmztbIUE1I2bwTmSqmuZVSShw0cHj14u7N65dBAlPodntl4d3OQgB3iXmIKh0T0RhjP/lBmDSuaUJXE+bRknupMcaUrOu6GOMwDEOMZgbSzNT5BLEXUYvD8aSmnTCAp6enEFggmiIzkYhOttkBIIKIsIjGaGqjP1BxIBnQlZdEeZeACKTJiKnr90ymiuPxqKqq0TAtLlJKFg2kIXRMrDGlZA/29PR0+v6v7/7xt3/4j//H3798+fL29vZXv/7u66+//vbbX9y9+vLuq18gRj0+Ho/Ht9/dv3v37uPHj2//+qfv/6LMuHlx9/Lly+vbm6urw/XdLe/2YCBFUgNR8M7ZiAjzztRZ1eYFSRVqa9m1ZadcytdfiQk4Pi9lK+mJ2SEPilgBGZ0PB0qL1miuZaYp8qgV6BVjVPzjRSG5eb207tGCJR+2svJCWsXMagZ/SFXyVAoQOY2z7RQc2qtx1FWnPg6Gtr9bE4Bvpe0UNZtPpaqyivBFViWvnyPJufZahbytEA3dlgcfKBzbI+jZoaUBctpAfqOzK+Qz3pZIG9WMOpZdpJCUYayU/TJDCPs+XO+63a7rCdBsXmdMFEJgRuCJ4Jk5JWMmY5F8VZugCgKChHwWLAQVjEOWOfR4Sg+n4XEYj9FGM7UEJKTEssLXPyFVyHT15Amvrdbce//XKpxhstzIo1ANNGyhUJI5/AMwJfCig7xlOKqThg1Md+Bze7pG3s/BUkX2W0U8Sa8KmXPflgtR39AW9drSZLodo1Xh5jMX4L149KXKtkvFYn6uwUKMAJPb6vMKYbZeo8ld+HwnpOL3Vtq0aTUQeS48ZfBAYo0w/6aplTxYW4g+s6q21Lm/1ZSk00wKzONwEW/thsJU1WeuXdsB8hKY2u2Dz6od5/ty7RTswa4Q1VJ4hcwzkJ85Lu34Pl9ErEKos6F73hZh2oXQMYcY1Yz2/a4LPUFSspRMBAoICMjLS2Y+78kW3swWNOSOOIiN2HKk0C14VlWmVsVqhdUnmbRNq/krZc/m7Qave1Qb+l74t0B+LgCr71sC8/1dlaWrkFSaSZW8wR2aHZ/2IcPQbruUPNVOfXUuUgS70GK7n4ox4Ix8nVNuRbhjmQ3INYHAIBHSMTGMSImIcwwKhhErp53QTngfpOt7po45JIN0PXd76piYQEDeqkZiJj8jUD7YEKhOlkoTWZoxkI/4AM0e+4pnpjwZpZSGFGFRLZKpARZTH4QYpgbmEEgNpvlUQGFgRr7M2HWdgZh5GI5539lrhsWlKjMzTfE2MqhJzcySGWVJTWbEs9BmwMwiLPOhAhihbJjcAzNOp/Tu3f3j4/E//+f/8+7u5s2bN2/evPn6my+/++67b7755urLX1y90e/GUY9PHz58ePfu3bsP74+P93+8f59S6vbd1dXV9e3N7e3t7e314eaauy7kpXwhAprlablaQ7QkaM6dmTSi4mXV073facjGu2X+XuUEP5FTsZV3IQTKyvss6EGE2gVf+VrA8MyDRoK0D+1cRW4RWDFM4UCdzUp/2kS+mqoTBiwnkoJeDw+ayaZdVvmfBVetWPENVWXbqXRrQLEc2RYMn8drlkWgtMeJ5tbnhcy8mKsEZdWFdipqFWhfsCpVFXda7ArSKtT5Canqsm8oJZNOTDozjgo2MzZYNBs66naBdz33gZnIEhGz6pjKhen5+HGe6ZmIWIQVZJiEb+jjeIIaZ4mYxnG0CDqN8WlMx1ENTMxMwaCGlQV2jVtaGdOWbC4geStnO2mVzJ+cTT0wS/DIG5ObmSUUvecC87aUY27x02ZeZYq2Nk8DW023kgrbLNlO+ZXcqCovJ35tR6peVK17qLjxoODaXXRktbPklLlStsJPijbfB0n+02cqlpup6v4Ze3+b6lcml9V2PQY8JNVortZDzWqqraciwpZOtpKv5+dMc7ZcmG1BWx7aKaCAsd4ALQjew9w2TUTQxRQ/EVWjFZS/n9vxVgT9nJTZwjOs0nQqMh21MU1nC41Dhxn4duzOtWVshzBFKlq9YJ+Tt0etKvLZVqfCtuDlLl+eTQrZt8Lfmg2vdjieOaCXmfcCV17O4HNWmsBlwGxDNd2aI1Zh8GK2GrtV1QuNOKJ5A09nfawccgYWIjrpwNLthFg4xZTGk6YYYAEmIEDJYGna+hHQ7d0Vk/Q6YjiGftf1h37XJRIJPQchFlAOjclMBhQLR8J012DyWppSShoNyZCg2UuoYT4ZNiZis3lByEzMZDENGpmUTFnAZkOKQWwKy0zK3DEFTTG3ldE361opO+zN3AfofNDrD3sXsfco222RqKpZMgMzcyZXTaY2O6yZVxmJgKlrzMYMgURN8XT/UdOLFy8s0l//9Ne//PNf/q///H/d3l2/fPny6urq3/27f/PmzZvbL798+auXL7/7BZ6e7j98+Pjw4eHh4TQ8fby/f/vDn7uuu3v18vb2tuucPdtMXjVhqZMszKxofFjNFFxstwplm02mwuVlJZpbIva0WFqR+aHlZJ4/TTVjvjU5H0YXNZxyZEz1nb2kYLUMQK4tz1HWRMvwNTxH8LVz+XJQFgqfX4uWcfEs7b+26K2a2+p+1Z2thZOfGJ6jW/gelZ4WOD1gPLtCrYpXMJvThC60ay61VbVv/DH1ZbFewPasXnCYfbec628UspJ/2jQBEZHlo1plYhJT1VMQDTQGRAZZCspsMBLuMJkWMEsgZiZVgtsgIGMgscHATEzEmqCqMl/GGsd4Ao5DOsYU1VSYqANxBnpxvkbkr/xNg4gF/v3Pn7M1sjodFnnSkmvJULGtNQoQkdhsl7WgPb7Ep6v84um2hQFLOdCW9XSy2l9snNjbhrqwVdtW/grm1e5cKOKbM6d6tr0DUOLr+Po9rtBIAMzTCmy6c+4no/lafL1NQI1yZ8VKEQABAABJREFU81lpwa2A/c2WhJt7Hz8Z1E/W4wVONRdU8upMz1vrrJ+6Alytp71WsPrwk5vYkg++/oloV9lzyQJnRP0keCpZ8ZPqmOoxM8z+BfL8OCYdNYW5C/m0pLiZyfoo5gdmXl5pKcc5fhdJiUhEuq7b7XZPT0+58QacsmDIn2yutd5ca2XR8/uLNV1lFYfq/PzRtrpSpS2WfA5gq5BcoGqsibut4mi2eKrMZaFbKVF+IUfLVHbcvMD0xbW5yeXBzj/VFiblZhaYbSlPRKSTkJu7DQqMFE9IicZB0higfSc9067rbw5Xh/2+77vsYaWTcBqHpCa77jro1U66fdAAUyOBsRlbPhkkw3wZbSCiEJioszRdXjCzGONE2Ennc0IF1EyByddoOU9OKU0rsTnoOhvyIflwPDFDNXG+52gszAQ2IwMxxMyMTE1BZhZ3oZ8xORnTMnOe+8yM4G/YMk+H8HOodplBOuv2eTQV4PmeZVEe5sDdgBnev3/PM7ePYRiH4f7DQwjhn3773168ePHmi9evXr9+/fr16zcv716/vvnF19CEOBw/fPjxx+8/PtyP4/Djn//y9PQUtti1zM3FqQxNO8EJqzJ0SdZeMyhlnSQ6hz2tmIHUmBmyuLGTt7F1pmDz54drcnapiNRKxmpqbwdtlaoqr9Bla7rXhdZLK5UsKOzavjen+mB5VlkJBVvb1CylzCUs5W/V9HP6VYnCFviqv/7Zy6YysnmHsiUtX7ZcRCnmClt4bsFYnVfKy/aCR372brU9HgrmPf0QUYznu6NmdplcLe8wGVJMMGEygbFFScPNVXfd815ULKkNsAASMF/tegBmiU0kcAiBTFUm7VljAhLUoMYEIRqjqZrqdJk6QzUk/fjw9PB0OsaUTBB4HvFPK4gtm1wY9wk5E4qMqI7T9ZMTO3dKmKkIa3xqZkRSmKnAnMXy5y4AWtYu1VZz6oVSl6VTxYmreSpBtIr/1eKVSSe5VMjDs+eWVTY1258zBsrPdemhjcfm8pxhS9Fmh6J+vbpw34c1llxF1FZq65keNur5CfWvyvnV+tvMqzz13zNV5v34fAyUUpXE8NNBm/lCPesfNsRRy0ETPbvA8WfArFR2/po5hDZOLD8Lzp88lJmhbF7JpZTMaBgGDeHQ70Qkxnji4cB7zqcehZVoKmXnuEdnZ4GOuiYtiJn7vs/uap7Tr8KMz5+Cn5kuKwDlfbHSqsTCc6h0SzautouWhdcye/2hvK8sKVqmLpC0n6xRBdtGfcGSuZwWrAK/ev/I1u7k2yzKfYuFGUr+fDZYrnTehDgcT2kcYOkguL7qXtzc3V3tbq4O1/vdi5vrm+vrvg+70HVdF0L48Dg+nE6PQ0Infc/U0wglEQkIeW1lYDIxZQUbLMxEbmqkhqSWl4KmqkhpciM6TR+WfYtoCVLAnO8WTk4ZQUZJLcYICSSEUxxCCNmes8yVIpwSwQj55ppAhHK853wlZcZYMsvonWbYgjszItKUklKiCZlsRikl1WhJiab6iYhNhCZ3ISBjUHacqZYIREK9dNPoJ1VKFqlsDKnGt2/f/+EP/3w47A6Hw/XN1Zsvvvjiy9evX7989erV1ZtX3375FdKAh4e3P/74/v37sMoMrdQu773qUNFiHphqi8JH/wPOsqzdyZgVjmklkOalcF73mpnMN3b84q2i11W6L2LR5g22tpvOmBi+4CetrosYmpWYFWb+3OSRvCqJ/NdqBMtY+OLtKLezckGgV0c+KU+rOotUqkxYt2Z934XypmwWtGhv4TcXaXAV855c29Zbys8PlclEKTiZHFxkjaqIl7N0Dmi6jtuO+JRSMhPeZTuGztJB6M1193LfXXcUSIVADIQgXR8CUkpkalBoghJBhQHQkJJqIghPMenBqknNlEhJLdrMhuM4Ph2H0xCTwlhokn0psHjXH2ZGS/0JAGjRUzvvYK1MdeU5j+uWMGlx6ys6A3NxSbBZwxKqCoALBN/Ox63Y+WQlvjYnND5xcdcz8jS1bPPFZcxXEPqTcHKpymZOq/YIL3znt0KqgvOzVqIJF4VSLphSSnHhoZqIJgfl2wuJC8jcSpcp8G+SWiG8BcAqDM8hqqqGvKL9ZP2LuWNjQ8SP3edC4lOh3udQ6U8YiJYfVyXtmcy2K2kffs7m1c/pFBZzK1Q1O8Y4Ho+hv54u1MikJpWbUeXoD9mEJE89kx3EinkC8wJIZp7t357bOyIyW6HbdrjxKXYoZbHN4KWGLalYDf0FQfFZJH2ZEVbHd3X0216gITnvVMY3V+2jtdej4OR5Xva0wFd62prQ9pAsaIOmK4VTF4oxV75kWO6I6f3bjvDyEF69fPH67u71zdWr2+uXt4eb/e7QydVh10kQlPiB6ds3X3+4f/jDX77//uHjAATY7ub1ixcvnsYkAcxMmtgQYKJQjbwLqkmjzrcJZn3eIpmVE7+SmHnUpJqAsxdMVdVxVFUmkGEKWA9Wi8wsgQAxSx4tIqLTAodzoEJitSkc+rrenovPXvdkGpcUZ7NSmzedAJ5NscCAZcacqIWVmA3JAKTpYExYVC2lZKp5lcvMAhIQ8S6lRCRm8vHjw1//+sM//ePvrq6ubu9uXr168c03X3355Zd3L26ur69fffnNq1/+JhRMVXR8vsPGZ891MUajBen4XWFvF1rohiFLO58JQVt3vYoJKBuMJ5fixUvkwknWkj0q/YBcXETMokFViRcyzpfy/Fl9rRK5k6KKi4pu54lgtZKq4+0khEZkYBnXq0wSq6CWr0VcouH5qi1rlOySuZJoLbQegRe67O9Gejop4JX3edyr4lXNFZIrPrwg1LYyeMLe6kJbZHWsq5urEwtgNuPJLqTmc+9zhZYsqiFkYUUWO7Lbndwedocd9YJeENggYsxgJotCyHtGqprioKoCYkE0WFIW6jhY3o5SG2KEqiWLYzQkqI3JjsfhNA5jMiViDkaCHJhVYFgMdzsEF5DTcnd5XlEQlsN3uZWWDPKb1hUQLYu4ZjnvL1WEvTqdwymyq28qPqrm+AXYTgnj2XOpznek20QuVe+xwYar8LTvV1tZraHK0NbZ9nQ1+TqZeXXtURaokylbOvtiLUW9Zl61/kzifD6cf6t0QSpusYkHphrfrUoqacx0DhtdFV8l3QupPfm5LOe3UgvAcyjn+aniCGp2fPw0ByCHhfCwtfOvr/A5g3ghfZITn1MD5skxpRSAfLSSGSFbqZkzBsWsC0Uisew/dYH5AlIp6zdYt8DwegjcqqyiivaN78Uz02XxQvMN5JLZF6zquQDAZXreHvdNqKriqwJ8Cww/Rhe0lLatjam11u62xsUDttoKuRkpuLOQiXJAmRRTSl3XXV9ff3X4cDjsvvnizS+++fKL2+urfbjb9TeHnlV3AYcudHmBNLf1pHLFh+F4/XB8uj/eH1V7QA4HVWJmkEle3IADDLBjjpsSUzn3A7DYMSQ1JIMaDMjngakl3Rx9hdhgRmpGllJSS0GEiFLSsoeiqkRSsK6qhqSaLMVs2sgUCgfNp/HFO3dePM8HemaGBAIxYb7lmPXDNE4LeJk4EfPfs2dy4+keY8qtIDFTCByCmFmMQ0rE4WCG0zEOpwgAhBB6Nn73w9u33//wu9/+7upqf3Nz8+r1i2+//farr746LwixFJr+ZKzsPKWUSCYkehHgc3pKIiLhjNBU5vWC1lWaLroy0XQgq86nubl111z/OQ5VO8f4fuVS0uj5JU/FS1WFpYP5Z95x4bn1MvyXhc6FdJk/SwZvG1n6VQ1fVacn/fJcrdIr/F8AsoV2tfUtuVzGvfDMBbAvnJzo7HWmYKMstFZb96Ps3/OaK4u2I6uC2L+pJtGCWJaJYKYhw3T3w580FqmR+6JpJCZhGExTlJ4O+11gDRSEVMBMZGQ6FxeRwBRCEIaZCRl1rPkaNDRQR0KSOMVoFsdxFMpXB0dDSiYW0+l0GseUkmkKFCZtg0zNmObT2qpfbbrwyQ+rJ6HqaxkdciY3F5ItVwLkzBPsHEaCsx3U+vT2bA61pcZTSKU1ocy9yCLrAk485BcWhBVz+aaxITRaMr7QTS/f2hp8QzQ7j2k53UuSBsJ15TLXV/Wl4LaERzOtBU7xCbRa7aoI/dy0qORvt1pZHSyPzJY1Wjn2HIpdzdOyW1XnlGEDgX57a/H3MxFebdkUri8nGBVCtgZ0Ew9L4i8drHp6Lq41VomoWD0ULF2A5HJq8Xy5X1vJzlul0+Kt7L+XBaGq9n1fFofVBF2esweQNTjP6o25gMkX+oUNafMcul3l38vdr5r2qT3p8kLJQ1IqeSalrRbHeURWcpLb466g2lJpLovoql9Yu4VblIEWYFu67Crj6+cyWqaSrUJ7MYksLz0m86ly5uW+7+/u7v4fv3m13/Uvbq9f3V4dhAOl213/4nofLHUEIRCULFqaNOen8fHVzQtI0BB+//b+D+/vf7g/vr1/evOLX6lR33XMEsCBc0h7GYZT9snJJYaKqlnMBik1KkhPcZzzrIQeMZuC5YAIsJRSIGR3vkTWU1cUjBij5diKkyWrEJMIHZ8eRFSso2npmHd75ystKeP5vGVj4jglKYCu65i5nP96gaaqxomIJ0/ALDBT1Zhs3/eqajpFOOTZEk0VOTJTDpgZgnRC45i6bjfGU4zx+Hh6fHj4w+9//1/+0/95c3MVREjVVM1MU0rEM2Ww0eT9dPayChNBxyFP1Mhnh8SmZpY9nYDMgOmidh6NZKMLxz2pFOM4hhBMSYs4Jsr0J52UU8E8zB0Lh+50Olk2YxARlpgtMwE1MnAev3k2I4BSUmZhkVxV0mRGLF25dYZmyVdEm2cM1TjL3lwiXwkjIlONWWDSbNlfuKJQJ5aip5VQ/nDMZy4CrjKXyhTrFd98IaosTbM4yqOmmsr6rvB87mbf98VfX1lKeeHuV4xeXBZIVqstFXqhvOggG+x8RTCz1rzgz/5RysYBEyGlsRqUgtvyprSyKm3zmypyifusmP04+fx5JHMXy18iVHFmZnisBKX0VMQCsjlUIyAZ8pTSHKWnnHiXRqN22O0j6YChAzoae+WD9nu+QeifensSOwQ+SLhhuyKLSoElAgKBSDwNMaa+68yYw55NBlUajUInoOE07uVEwAlD1BRZjujfJvv+lH4Y6SnsU5BIBhu6DowEG8xuFhMhz+EWUpqsQ8mMYdNlKJO8YwTOW3PA5NhpJuNMW8BkCkN0DmtxHmfF5Gl5Ira58QSj5V3f2UIDwLSY5nlZyAQ1U55DfWZvN8VWigxYbJdM1V7UCkq75Y3mEBR25p0MC4fJe6c6p6bExHPwXks68ypJ6Ly3npwmVlo6Dygw2FlBPO9D8WRMYmUjdy6qefV1dgk7q7xZqywUPrdiszeu81SKbE9IjvKJmVmYzEAE1WizPKepuMpZdLpZcPL+JwVzKaZRR1VNqQwAEYUM2szvCy12Aiw35EZo6vDGBtDW8LJwYWqP52rB08q98jOnrQ01XbvmACzupNn5YX4kIIsITA8kiz2vieqWvSryhIHoIpwv4Pfoco228hNYkY0Lal8TuTQrnVj6mfMncr6qGi3P26nxmWdZsb6WLk2UUZvIsjltNjNbsM9ZWbRi97wkkimz1sMAp1VXs5j/VOoHIDYr9AYAxcJBNVK+gcYkQQzjGJ8Mh5js+DRc7bkTNk06RkiIFlmCiD6kR1G+2R92gYJZJ/K04wTEGZKUUiROxDFGBInoHh8fnzTZboeUrt+8ufn+4cPHd6EjodEY0vEQTSk8RVUEIyaLghToJIiENNp1RQl+OGzJldVYPHPEzWompnkD0dfPs9PLFoCWT6s6SyvtQFfPOeA2zjQPVVsKJAK8+rQIFm02aV8xRldq8vavakBa1n9e41UU7lskWkxtFSXnh6yAFT2qCGfaiAuak970T09DZ3TT7TvI6XRSKHcEOnGfeooHHe4ofvvi6v/+qy/+7a+/6F8M+/3+cDh0XUfGAEREQIQQDQnERMSkOYh8tB2d0sNf7pj+/Rf83T58v8cP794/PXw4/ZffydXr/tXXcvelXb/6GA5D6Eei3fEfc1yKMWJICoWw9EzDw72okhppvhnYRWUzEiYmS0gxRpjlPcmUIoRZmYliTEPErutZKD7G0B0BiHQiYkoGJpCqCiVjA8DGYjmCN1s0oRuyvKAkCbMSyIgWKUjoyAxHjdPKJnDgo2oax5EhIYQgQqA0Ri5DTGQMA1STkoWROEigQEoa1QxEEojiCKaeAhEziI0pX53kLiVNADoWZlbY0ymmlHaxy/qtjkYUmHsb+f6dBu92nJnzsnjesfaOm7KRKyZHE3luZra8ZWUWVZmYCs3NZJRSEpl0Ah/zsNAlz2kSiwwQT6eouR5mI5K+ywRuTGaAcA5bv7owMLfh4V9ijlxJywXMJ1M1PZCLobdk7xXThZaTfc0XZsHyqcwcaHaYvOTysrVUyLw+QfrNv/LTd8dPVxVgvqELm4htyjXZ0nG2h5mWCyRbal2+I/7laoYqlesQHj9TQ2s7cL6qLcmLM/5XCCl/sWUoyCUqVmZBVSipkirN8UvzPkRKmpIqZC6UR42CDzVxvsdVwslMHKekafrJRCDiLrCKJhuG4TiMqmpkBiPoZLLeKnrPSLbUflY77vMXWloQMNUn1RU3nR+spkZzPJLpvxoaD9Uz2R9LsvGVeNZuGbB6eQEDn5s8Y5b9l2yH6QVg24RHNZbxrFY7hSXGiuirCNgL1apHZRGIWREpKCqfKoOFFuznmHCvFrzAvD8nbUnFZ5ZFg+Rnpha9vs6qsxfw+S+dWuaa4Nk+cm0RcgE/VU8rNlzFUkXk1YzpH6oTwqpRWiti7eU5AGuRn7ZG8HIqm7bFzWC22OqgUEvjQCpE1HUdEQl3ajqLwRRjHA1mZEk59DbNEyACG5MxGVsCAaTUGxsFsjQqdUokIe+5KBg2LbpscuWuZHBsyUun1HWq5kGPh+fj5EI2rz9s1Ulufe7VjBaMagpr6aEt5cXjFtil2oqA21IFzk+Ki1URV/CwirFSbWm3EuxlZegFu5nZ8eF2vw/Ux5OexsjMfRCWSDTuKb256f/uq1/8T998/Xdvvvry9uXd7kDyfdd1mSyhZrNVcxrGMzBKsLx2SMSS2w5C19fXCdLtrk6Rku1G2Y+Bn073T3GMuxu+uj7srzqSMaqmZInYSA3xNAxp3AdhQC0liylSTBbNdD7kMjNDSpqg2TFv7HoBMIzj6XiM48DW877bXx04R76NqjqmNHkNBWyMed1hPDGj5Vms6/czzrWs8M3MpsM2mY4JGMxgBiFXqGRQtaiJSIWIuYNBFUSa96eYhQGy0Zy67gjsPLhe5pT1kdvGnaREYZO5iBFR6LquvJ02mJ0Jk9ctiFbo2D/7tZlnAFuGjCsUVrQab2iq85G0V1myGlHsiKr6CxGTuzfoGbiYvNoyLo3D5rnCUvCyhGpNFHzZ6n2Vs53MSvFWaPpGyyTbYtgLo+WoUdVKwWoBuMiOUnMrSj4prwsYVZe3aqjIusJhSR7PVbVVTpo3HS7U0wJG88KnBcNsJXO7ADYzwCqyPH+yqXU/TBdEfKbvvCA0msOvpuw4K5kxMUSE2EyhqmGyR9fC8JjVBaJ8TzkzReZ3UlXLdgwJg8bjSZ+GcRgGBZspeDHihPWFQZVoOctu5Vl9385MNouGlUrmvdP2a8t0k0BY46PVN5cpfKv+1XMVWt6RXvBRoxDTkvz8z09C5TPM7L4wKa+kqOfr1Xar96sAeHuJMuIVVZcxbc/ccvG8YVFEuuvOujMnn7ZobDVn1Z0LOdu+XyjVjngrY1eL+CY+ObhV/Reg8jzY5nl+Q3+r5AkDjlT8+K5OfKX4c2De6mY1HFX9fupcL0LTm3aSBWDLT5ehtaUqsgr2c1JVPJuxDcOw60/KDApELECgyT2FphyjEMAkECLAoG48G7Ywkxk4QZRUIURqbBAYwzgaR2MOPTjHpI1EMCMzJShTgMGtCdmQKuZtu1ARp21soLeZWxxWyRM/tsUa1tjQQ0hryk8L51JPWMhAX0nbkAep7WCFCmzE7r6gZW39LGC0vFDJ5wpO3x0zO2CkEaNp0mAhhMBJB3368HKffvX69t/86ut/9YtvvvvyF69ffHG1uxPpDjacJQDmuSBBpAOAvNhxm7ZJYySCUaBwdbXb7a9e3mlMBNkPST4M9sPjOD49DE/v0tN12O0Ohw5qNqoZGyAQEyJhS2NKKcVhHNMYYzSokRkJCTOzgI01O7MhA1NKKWr6+OHDh/dvVVPfydVuf7jaXe0Ck0239YCk2ekDEwcCiCwZZZ3OkCobYps2ieaZEcjGUSBlEJuyTRYKQqKmUFNQIAIzc1BVUkt57JhZhNjIEpy920xuUhRLv/e62LqdTWSniCCzeRrNoCoBZiFb/U7kTudDhhBCdvZdJmxP660SwEtnJ2WA/VUxf39AROAONEpDLGfy9TTddV3eEuP56vP8qZ6YC4QFHl+VNx30X/1C0fcrry8qiV9xddtKi6KqO9XPduqqOLk06lPB8DzXLvqOxs2xTzr7I8qZyxCXqtqe+t5VQq16tk/pYb5F/9dj3uevfl5oq6LAUrDCQ8mcUdViu8pcAKuqnQsuaODMI2fjoxUluwI742QyFVAFMfJ9XajFlOMk0GxhQlPUiLzVYhanr5hpwPMjmBgMQjRlopjSOKbTODwOev9kpyGmLUs6Yw9wBbPvbIvwC/hc4rZyGbJAZdUurS2ocvazjLo4fbY/F/XTpTtRXvShQQUtdVDPrb7plrOqep4DbQtMyZlPRH2jq0PgOW4FCcuCHubi3wtLLq6mgPJSz9rnOehT+Wtrtgmr3d8UOJ8msAViP0mQz8yz2sRz6vRiyrZNTLe46ZOyt23xb5VWgflkkUr6tf2qnp+DTKyxycQRjShbnVA+WeEWSJ7kVgeiStVxesWzz09FyhlNsi7GOI5jjKc+7IRt14kgBBZSiilBmDD56yAOJCxMLCFYykjKRxQA1JJZ2hGy1V7Q1JkmaEe2E5K+h4RkI4EJk6dFIrDlqercUYV8Unh6Avb00GbLz2XWLpxyGXuVsL2QrTRxmXEqdaLN4L/ahJwV2m4bokbT86nQSbsg3KrnclrtCGYZnsFuYxu259sd0uk4Jhq7qzsS1uFpR+OLG/lfvvvi337z8n/61dcvr2+DQOMYd0ZdT4NY8e2YbZDMTGPXdUtJSGwMw0BBNSmUQD1z10kXTEdNlnoyAUgjp+HHp+H+6YeTWnf7Nff9YXe1IxnAYzIFiOTxeI8UYxpijNFUjZRAJHHUEEIe/xyqkwhENI6np6env/zw/dvvfyAoEbrAt7e3r+52fR/2+30IHQCCSggi2SNDyjftxIyZmDsRRgKQQ0EryChfCSHG5N2XZo8GrBhNaSJGgEgwzZUhDwSRmCmbqSFfcQEkzC6jzIxImIVn13TVSE2GlyHkcSeduC9vwvZ9P9HVXHVOQVXzEgtAjuCRT3hztyrLbDQnY0XiZ3NkT8SZCrquU5342V93Zua8f1UJylU9yWYDCb+KyA8874h7Jl9esTsnLE0u/Veveawyj1csKv4sgqDSk1Y5cDWZU6lLauewLIs/K12Y59qfNJsKYDkiVW1Faq/Kl+dAtTU7+kGcK1w/afF65HMardafpQuqClsEbMUGcmypilXAW6PREhFQV7uKt/Nkb3m7iAikZrAkBAEhX6gzAErGMEuqnMwkSxDK5uw6b7EAEJGYUt4AYmYmGTWlOOnioyYFovIQhyFq9n1FRGRgmudgpXIbrUWLZ88qtXi4TCd+0KcBnU9csRx67zVxqmGuJsuxaa3sJ9SlF6nSbitJVoHfgtm/3MJPS7Slj1W/SuafnBxDXQK7AtWLLDhu8iK06pSXbNSYj7ZHguW+QN7vK2K8Mie5rBtdeOPb8nB6knumUHpmNp+5LfLJcayY/bIpbEVdz8dSNbI/P1WUszpFrgLghW01h/q/WyLiMj6rWbvK7OvfLLuBni2W9ERbt4UFBVYQ/k1Slo2Fy1RVLaqR6k5VYdn7xTiqdtwTMeUbvkxgUgKYyIa8bFMiTPeKEygRJzNLGBNOCSfDQDyGTmXfk3SaRpCApvvYBCVEQ8iBtjG5a2bdNhmtyLiduFuS8HkujOOFhvA5G2pYo+eW6VoR6jOYc9biJby5hCVVr0qwtjvtm7agOS10tfiqLC1Q6RyGys5eMc++PLK0HwdjksO+Jwzj6cOO4q+/fPXvfvHFv/ri9otDf2OQ0xj2uhMmTuN4NMuWVtlWct4xVLXsdCCzoMFMKUcOZIEBml25gCylYYyn4fH+CeBovIe8JBU5hdPx4fj0+JDC/tBd3YWrmxD2USnGFNOQLNEce5AIREogcNJBkyYiSbNRlarmKPaPT6cPH+4/PNwH4qSjxnj/8PT9D3pzc3N3d7ff7zqWfheurrqrvo+jMrJTgECUvaeQGlH21G4Kgs3BcgGQwQhmU2wCJgMYBuJOiIG8jwuB0GQXxjATCjq5xSNVkAE9mymwsHfzxFyk03RyNufh6ZBvOuZLKaEQ8ByOFUCIaRAR4fNNwlxXDl7BvDg4rri0TGnFz4ffMM64LhvDZZNYi8+YZhOdqARTPc9nFSN5MCZUL1enLWOQ2732R2EFcf4krZqlSu9oOZ+hYfWW/VZnhUrobLG3/9uydCscvfjzompLOhRmWK3f/2wH3VdbyfQKM22/gGeJdV/hVv5VWV/J6+pTi1u1WO1rOuBXNiZWf1ZSuLwhZujKcFcILOJYVdmIiAkEpMxbHaEPgaBkCZNnjsjJpoUekQibQVVLDByzeVfOTGFsrIwcpjXzKEFIgvE4mo0pJT0vrABQ3sQiKLjqET5FzLR2Yt+yT6nK74CWbLw0zX0+wZQFobdGrlLFI8+sebXIKiMXSFYp8HLlFXgXkrcLwFIv8aKyhbPtiJdghdPLlYFShOaNhvLMznNdZaRaSFq4yzNunnRbEdqSR0nPedMm37vVjq8m31MvuD5Z5AINbEHukbDVo0rUVLBtzQvVp2di9fnJS9fL9VCjSU/Fl4WsUfp/MlSryPfzl39Z5WmranmH8mnCtpJ9eary2LDtk+GtVNhE5suEc4umGsdxJH0iFVJh6ghE6LJzfDWoYkTk3IWuyAdkEaKERIiMlHRIaWSNYiNrCjBw13UcJJoQlME56hjlhaApDAkGgxGrKdFm3MLVSbDFT8mwSs8X+AUXmaJ979NWcV+DLn03bIlWL5NtaWjqp6RKaK/2olUV/JuW2KqcVb+25EBL8HAKs5fYk6k/X4eOND5pHG97/Ktf7P9vf/fq11+8vAt9p5RGGzSJqI1HGQXdmJIirzeYkGNuGYwmd1PgGQxYQkK2AUJKpmlMKY0Rlk6neHyKx5OOcUgajYllB9xi6Gj8kJ6e3j08/PC97W52t6+625ddCCxsA2zarxSzfCZlpipTgPcxuyCPhpjiGMdxHO/v759OR4CVAGMjOg7D/eP4NKT7p6GTwIx9v7u9u769utrv913gvg/dHLY+NyCcfa+IWVLKNDOd4OWesiBfm8i6loSA7IUo49/U7OzKmwhMZCDOckMXnlFteRJWaKAsWDDHNhOQieS5O1FCREpJp0MAIqI0q+WB5mPEiklmhYyJpiZ58to0GcratPiePi6JjInY7OzWtgCXG6LsZno2GS3bEllXLbIPjnXLS39EWeqHW5fmJtg5uqxofYuZV9nSK0BbRTIM/mpNYbMCRjVmviGPcD+3tfOclzWrrWjjYieXrmRKaXdVjaty4qL8qpCwJa22fnqB6+7ZLaZYj4fytUJLeeOVVJ9WFfS5U3VH5p4ueuEHqJpIyi5GBTZN96jrqS7b65Yx8kxniagTImIDG4TQ78K+78hAZDJtJRmAwLLf780Sc8DyYuwMP5MYG5QAhbmYhwokxZjoGG1QU4KpGRKD9VwJk52ppaKctjnPjNX78nOJXrhal+NIK+OVqyZXOZY/ixz0VJ3WJmyb48OuDuhqWqVqNCZhpZUtYrtwIlEVXK2hTR7zZtOAFSHQdq30evqqBEyOnjAv6YmISXSeRqdiWcDmnePiTXSuXwurGue9VyIQQdOYF4QlXjbPGm0rBIioDKlHxSrXT59Qk2L1sDUWPz9VjHC5FQ//c8TphYawvSHYtnj5zb9caqXB5gg+bwdkq5XqTWvB5Fu5ACr86KyVOnN6s9D1fWnHxZr8P6GzrYTJ2/yaTTkJZjmMNQGc12tszMiONpDMQFCzgab4n34ff2SOkRIoBksGBSW1ZEjEXaciRJEIZPOVbCGCWRauYmbgRDCwGXjDTbPHcEvMW+Pll82VTcfl1Mpzr1TgUyfzVZGqXT+gW2SAJduWzFvEMEnvZ3DH1kyUkz/YWMrVzeRJKx/hVM35RuPutcb73h6/vMG//UX/v/7dm1++7q/CeL27Edsz7Y2CksXT477T693+OCoTaXZGicTMMssGmq/qkFmaj+lOqimN4zjENAglBjAM6XRkNR1HHQYzKAsR7QDh1PV2r+OPp+FhOA7DsR+euus73u+QBgOIwcxsxCklRUoqZGoWY0wKAAZOaYwxfvjw4Yd3b59OI2UTKRZmMoCEhpHG9ARAgBCe3n+87/vw8u5F18vVbrff7/tOuq4LgZk5WiSifEeHKbNm5koDJnPLivKmkVIjIhbm7Cfcpk9EQgTM68OUdHFpaG2IC22nlJJpFmjmVo8lNIjirDnmqkI29fTrzmJSPDeWJ2lSBdF5yWEu/hvNx4MlqIPOzl10cq4+LTuHYQAw3Vq02jyaiFKKWGO/2ay2broErih6hi1PJgs1VyznmbOclVdMQtmJ68wwlZliNbuj4VW/RPQ1t+JgdfcIS161SeXiqrjHf6nBzccLRbPCgB/Bqvvt0FSyaVW4e/xgLZnNkV6WMrpK5WX2kuqFZk75+LvqkQesRXsFxjQxSO1UxvW07qYtNxrmEaFqmNzQkM5emmi58dHGCyr8kv3xKwhkQtxL2HUiZDwTeSBmRtd1u93umE4CMmZDsVSRTFDMHJhgSdVSVtFJQrDToGOMT6M9juPToIMSiBMMmu07VGja9CHONvGfTpXeU8ivvC99LzxSjf5imNbuM8x/F4pRefK8VmBgZm9y7OnHD9kzU0tCWC69KhbzxFzyVHxRUOH3krFk/NU0U8uid6rqTSxaGbjaqaojXhpYswdZyVJyor4Mse948XZLRKHcZ5jvk9t553GByRZFWwNh26FKL0ihy+k5BalRyz45XqXIqrjYKvKcl1hG+lmttnr/ucT/01Ilos3MnNsqD6RkWkV7BXCj6mYatYtcszUXVOM41bCU+eU9uU0Q/2mr+7lQ4QVzOsPnUuZ5jlDDrDDEGA0AhxDCLuzYuq7rOpZooPyPiPNNIyEhdL2MNh3iGRgkRJR98xtFo0SSBCnZQF1v6WgWez5y9ntPZMZEJTIo2JA3ghJlizhWwtaE0cq9yxiwpX5Vur+1kGulrp7D26ygsRUp1Az6hY6UDFviuqUNLzZ9u5j03oQ1GUiN3uULtm21eN7qQnkuKC2imBvfE7lISmns9oFOL+5u//2vDv/rL3ff3tBdH3fB9p2cBh4iEQulgdJp4KcD7cheTvMDopkJSEVEJA5TODHGZDCYrUhGjap6Go/jeCJoJ0wp6TjqMEKNDSBmyl5yLYSO4+PuOtxc37w9pr/eP7z/y4N13/PucPPyFUkwyVbTmLysq8T4GC0vPo04WHYnE+OHDx/ev39/PB5DCEgK0q7roIbQJY1ISkQQgtJ4HOjx+OOP77qu2/fd/tDfXl1fX1/d3NzsdrurYMzEIUMpRDkCV0apGBLIpoPRjBWKqmpJmTmwTLO3meoIMJBgCgiREDMRj8lo9tStZ5vYevezfBWe0GVmOUSKiLAI63yKOIu77L0iPD4+rtIZL12VYD589ApBode8Bwy37ZTJK6VEZDm+XAaoEBZlC3eRaj2zyrGl3ZyHZ78yqpq98ZS1UFZfQgj50mp576E1t8IsvW71krkvVhbMBQZV9Zdiy96hzWetaGaaVQYrAOdlecEALZVmcmk+oV0cLhX42XlznXu6AKCdmbwcoeXdId8jTw/ldMVv+ZszN2/BODfHZGpeTfT5C/bK13ZfsABWMLY0oakX0hU9eysIz0ilXTd8i2tOpcu+whmSCS0ZktyLfNxts7F0xWWVn6jcFyLq++4UY2INXSCzmAaYBN51gQQmDIGZWeBARE9PT91Vn3GYXXjlOo/HYRzH0HcE1hxZNVlKKZo+fXw8DuOgpCbJeFQdUhoSZ6fhlKd5GGCZXmS+9+upq7AwuVXujN7apCE/V5fI3QCtTEhwE6FnlizAsJwCKxbzZl3mFvDkzGBau4vzMFXBxOZU6LCMZhkyD6G3U/AsULoWeD0wd0uTU/4QqrvTRdGheYlVAMjyqrSLOQ549sVFS2OSnFI6b1WUvlTdKVjyQrLIeZpnB8yBhSph7ge0NFHRf4vn1VHAklUBeA91tBz6SvhU6PWwwQlGD1KVp83vkz+X9pj0bNKWpY21gYe5yr8KQCvWPAwN020ma2Ze/7IVsFsV+pm35OTpKsrmuHuwt8DDBvv7Uj6b77tvtMKMr8R0ZR7JWcdxFNf3UkmYTL/W14qezSv+8nnMrYineuYgE0VAJZu23buuU9UEut4fun5vydggRGwaYNe7AyxyMu6MNTEjMJMpp/6M58QG5JijoqJxDOiEzCAknXS7p/j09eubh4/X//TxxzEN0ksCyDCmtJPAyRSa1BSkLBREWNI4eFSX/lYbXgUDRYUwt71FaxtGWDJLRXjVNpY1N5uw5M2K3iqW2Sq7QiqfWtZ6CEt/Ky4rm25+Mq0AqwSFOc28wnOhvTJftz3yVXm0l8rLuJSmp4OiXghyfbj5+pvXX77pQvpgSXe7QJa6wDCcxlNglcDQ9PDwILvb4/F4Op0yj3TTfDT2YdIHBEQ8TcoppZB3HIRNaRhiGpVhgZilA5slEEgpn74xCDv+mFTA8bbv0rXY4/jh6ePHjx9sPIX9XnZXJoG6XdftTfWYhqRjijGNGk1BURWnMY3j+MMPPzBz3/cxRhAEkme0OEIkcICZjeMwWBShLjBTP8Y0DA8f7h9+DO/3/W6/33dd991XLw6HQ9/3IfBut9vvd0njw+PjzfVB1URCCCHpeDodc3PjeBKR0AvMxvEpASGEPuxCx9kHT55LiYzBFKRQSDNk52O8mXiyAtB5ss9fU0qYnOmceSEbE4aURk+aROdoEzYrGV7OGrIHVMmLQJ1CinNRuK0cIQLEbBrNrXByf0rmsgAoPKOozdDNzGAxO8eaokBnHoCBzakpPlUnZuVvtUQs7FQthAoGU5oiyZllI6i85J8QNRefIsIDZ+c6vjlz2+d115Z3RQrvbQnQ6jpp+apLZzltWz55SsJS2JHTJispWaqtgr3CEYyX4J6KbLEmXBxcuO7Xu6c2T5CVAF3tkc/jEZuTXw8vEQhggQr3sKAEdg4zaE2Ns5UEglSb3gQQ5SHLWxg2/4MZjEpUcRNGLxJYOqFepO+olyCB2ZlVT1ex8wrN3M5QNnUGmVFSTUmHFFO0TmRkQ7Ih6tOox2hR2ZjLBE0ADKApJnI1iKu4bcfC48STt6+hYFKEiwHtvJbO0ikjebLpzew/X2KZKXaps9JyzWOWN7atgrndZq44ok2Fr8u+WEsGpRVy3p+x5C9zC05Prh5LvkjZOinv/eKtRfUcUN6KGKH5fnnVzRlarqqqGLZKeQma8/iFX3E/tqB+IO8GVv0F8qZkkS3n+m3NvsADUxA1PTSqocdhy6Rb6bMyVwXhYFidjJ5Tw09rvaqnrYSyBDUg75V8spElMotcreaL8vxJsP3MUpHfZY7bSj8fUfhMVf78Mhs3LfOUmSV35pmdWk58Z3iKxeUEANWr3FJ81qEtxpg4BhKhxDYKKBD1lIBExpwmj6CkjEhXvJ+aNqhOhtxsxqo2DDGrO6eTjiPG0R4f3777p+HDj8EGCAcJ0ZRAxEhJzYxAwjCwZbXBlN0oX+DEipy8ODWnzDxnrCvBtUDmRevKCrzS+idbfGZaFaQtMNUMUgGmG3cXq7K+v1Xm1e57KdrK/1b8np+P7/teRbrHEz6epD+8DB2ou+qo65Q7mMBULSW9f0pJT+nHP88TjYV5e5EMRzyJSMeiIqDzbilRZDMCg7rsQJ0MQqzDiGTKiAaeXOQKEaR7lzI9Jr7qMO7BJLto79//VT+G0F91h9twuKZOjbtedk+nUUhUlEeAiYTT4+O7dx/GcSznGZhEJpEl0BS7y0yNBPkehCElNZjmyPYRimFQDSzD0z2A/b5/+fLl7e3tft9fXe+vr1+e4hjCXhmnYUyaiDqWAOJRxwTrzFggXWAQkyWL4ykRCUHywRlRImEiyyFhcrj2gres+CwpZCKbaiN7sW8CGIwXspmCV5WIyGzyTFhO/KxJftugbPj5vQ1PXqrqw5WWhktt1VZES/GFdldmOydHCjMUBaXqWssVq1WhMaH0k2JOq3YLc8cvtdXOAeQOwarDlqpmX4PP5s8GC/xODK0vDluep3lh7MfFj5cf0zLKnhg8Dv0ow487zqYRq5C0WEIj6fybUk8r0fyorW402rTeWEFvm3xzq9OGueOgGZ8L9JYmyM185eU0+ukc4JiZg0gfJAhT9j3FljWEpClRUiSi7gz53AozW3aqhexlGEOK45BijD3MmEgEZFHHCFaWbMMAIDvxJCIY+wXh6kit4qpljVyqnChiOVKrY01LGjaPR40LQbGcQT0XzIrauRdb9Oz3vLYIIC+oytlvWWglWw9nUlqp0OXjFrZtrRKzz7Yl0FzB1VVZfUZ9lhtr6lRVv38u6q+6ZMuNfCyVvBY/q31HQwbYYDQ4BKo7UfnkIP4NkweyHbWfkC73t8r2k+tvBenlJvzEutr6hQHypUpitxuoS0/Cq+mTBHC5L9hA7JlPN21S11NhJD+1VT3FkhpXk5dIi5dLC4Xy49zKrGBkvvv47r0+nvTqdCN9IkoksZOu6+z4I4RIQIIplJcZgHD6r96SZbrcCxOR4/GY6xzHUVVPcXx6ehqO9/Z4L2nkrjciMtHsJy1pR8jSlvMVdcBMfWgxj5NsrlbeYMk1/k0xbUAjLi4ITI/VtiAa+VaV2hilS/xyeXyrzn6yIx5+Wyjk6wu8qpurTfsaVmaitQsvVc6KwonoGkcahx++f/ivXdrvv726/fJm3z2lY8d90JFMVfUU7XHA0xifxlFODyGEEIKAjpj2UoU4BO77PvSc3RIRUWAREU7R2JKwkMi0W0FkUO4tWowqSaOJgtQsGfZ9YGMzdGxXYCLZdTQk1tPj/dPD8PQ0PB2xe1DZhd3t/voaaiwkxImMDNHi09PTDz/8MI4nm12yz31PankTzUocZwAKIyMjBkDMgCqgCVETkB7jycy6Xu6Pw+7t+67rXtzc3N5dHw6H29vrq+udybTlP5o+PY2Hq4MlVYANxCDmpBFxCKHPSzUYAWZYbDH7bYLM3cxidj6JyR8rWVR0Epu9yE7doez1hSl7haoIIitLXhHJZuOW/Q7wNPdX65BKIfBA4Hny0XWmgoewNnlU7OGlyeoOU9GNvKrU8mc7HWJNZWzPwWhaIWtVT8VyF2Y4XjprbTNUNayCXXo0s/E65ivM+Mq9VPUiqYDnmygEsHWWTUtTMcPZfYUXVZ6IK2JYxUPbqXZuaPNXpDW/X6EoVc3wliIVDbdTRc5Q+FZVQWCIR2ZVm6fDCYEx5mMwNghIhETyyieZuRv7C5Nhy5en4ZVjwEyjWkzT1eJkGjWl02kEm+0Ss5KoUVJL2a8oTSuE0oiC2UmZFtvtWKzSuc07R8VEnJbr+bKuyHmC1HaV+YGJMmecv84P/s5hGQUz4y5gyRRoSKWlijZRE2evBc9XfmFzp+HQFTr32bwBtt+m8Zn9syd1L6ZaHjcz4sUKs2QrCpmH38zGcSwt+r8tMueRvaTQr9ZQofQyp1d48FzvCWxLaKwCVg3oavKy0cu3T9b/L5pa4UnLvQ9soNR/bSu8wCDPee8Jr5J+F2ooRaoaPpn5mV9/QlV5NtU1HV2dk2dfyVbvKpP7MlLe5B4NGj1rZwQ+vPug/ZOcjt1+TzBVVSHtQwxGkoXpdB87x9YO8T1EeDrn16AaACLquq7XwYhCF9CDiGKMx354tz8EG5OO92lMSYg7kqBqmDycgYhosp2yfA5hy+CilbxqiQFro/wTWKkVtp7UK4FTMWwlLqoh8KWeCdgqy7RlK9EN1/fLxalZ6T1nFluttn1/Af97Pp7G+H4YFRZ2d/3+C3x9+CL0HJ/64YTT03DSY5KHkT6e7HG06zSdBBIRaLopZ0J9f5WNRbOJCuergUyS970JTAbO/k0Mav2+02g0Jk5E2Ww6xZTMQgCIwAHYBQBg0p7tFy+v3ndPb++PD8d3x6eHKPtufxqHY9gPEIBJiBU4Ho/3Hz4+fPw47bVMePAxgc5ENU/EKU46MDMtPOuamTL3fU9E7z+e6ONT3/dv392rxv1+//Ll3RdvXl/f7Ha77nC13/WHbodhOE76FhOzEJEIT9EniGA8eXY7z/tsRgAV5wjTNhFRUQoBqE42YiKSrQD8pJ9SypY+7QWN2p6n0EE5XiQ625kQkQRR1XEcPY7K1+o6UDsBeOSWtVLJoKqGs4JiSx16gXcXPaJdPhWJ6XvkqbyaNdEsOCuWqCYwmu8IXeYus4WytYrnrVSVdRNP3c387O9mLCFZ0eMvAFD0cp9T9bzzVz6Ru0fUSjGPMY9MCesCtwLMtVVD6BHYdJYqkqiwtzZkK1t3LZY8kZdPn5T4q4I1F8ymgJ6Sl39z7yxkr0amQiJEkz09cyDecRckTG6FAxeyTClFTTAyszHpOKY4yQ4m4hhjom6weDzh8Tg8jXFQSznOIQHQLF4NgLFlZwGNSl2RR9txrC34sbZT045g/pstFKqk84lWGd9i8LDlpMRmS3Va6spVi9igqHPTy33rqmu0TKXCKudqf+0ZekZFwFV+/6yzObe/QFKaaLm1MkD1cBZ8li0qM8tewSqQsMYgFWxb2aqOVxlWuayq32f4ZP7LyZet5FKVVk36f2brn5wULiRyGq0f6Elj/+++Uq2o1JPcTwDGz4ZbrHRBBF3O9tmQbOkSANwlscJ3n65w7SDI1wk/rEsTJNHUqfRkB9IDW8/jnnUv0vccAncduAsiCMT5kstL6UTEG35zFyT7+Yix67qszhohxvj09PSXh8P1Tk7j8fhxjClyOIAFSU0NSBMmoCBmya4+1hdOq3doV+VnQaBHyGqdlzm04uVVVFcgbX39OUz9nKq29B/PyJfr8WKzmhoqUmy5pmUoag54SorDkSXswvUpdv/td2+HE42nX3bfvTmlp+54otMxjXEY+RjDMAZYGIbBzNjAzF0vu92OJEzCAfluO8vsp93MJt2DSJhMLEUDJUsI3BsswCxQVHQpPg4DY0y8y1vWTBRASskQWZVEw5WIdWRPFGmUlNLj48ejPlrousPhsLu6Hsb4dP9wf3+fUuq6bjoBMLdyoRwXXtWUiBgwLIYpYVocTcNERJBRiYCoxNwldDHaMKbHp48fPj59/+P76+vd4Wp/fdi/fPny7sVNEIh0XSdMiJpUlcyYLBLJ5JuUYMwsJB0zWxrnMZLJ9NKYuOIpxdrmlFdIiowqn/KIB7NcuLqSi7KXD8tbSxPTinRmMa/diIiIzWBmIswsTgtJZkrZJ7KtqIZoFPTplIDW92/ajk0EGqc7iuWSTEv0VSnf/wqMql2b7/5V54pYY9Gc/Bqp7Oj799XE5rfwt5jQQ8h8PjuuEFgVKV3fgrNtAm7PoMzoxSSs5PFI2IK5KJoXMFaAnK/n1dlWO+eFOxxRVRNANVtUAre8bLFTxiubEFxAclWqIlfLjLFcQVUsAEcPngiZiYXyzB26vDWTkS0ARKSTkHeRFl12wMcxjZrGMaWklm0ciADs93so69FOp/E4jKOaETOHZJEAAcFgTeQOD/wqc/mv7bTtH/wFVDPLDoeZuegoNnufaseuCJdzo25V42GgeRE4pFhl8OBZs5zA9oKwGF6W/TYAhrqGaapwfmv8EKvWG0nUTOH+a2VU3O64taDavAaG29+ppK7LeRabAGi+sxTjtA70+4uY5iHfFmbS82/OD5XQq9iwGo4KYy1yfFk0pFVtZl0QAlst+pq3JHxO7Jw3rIqgZ6at3v2Eqiph29ZwGaurBS907TlweuT4glXZT6Ku4pdPttsW3ETIxiCv1j8J2Lkvurwn7Gv6JHKKYzZflZkJrdxGWcBPi8qDjjvurwPf9LgVXEk4BN2J7g/c73i3C13XMYdAnUgIxHshLw8pW20EyTD0fZ+9WI0pjmQhSnh1E4+Pf327//FxfEqaYrSOAbAEU6gpNBrBkGb3+tRSly0XPBWveXTRmiHGBfFIzVrxMo/7hqqCbeVtF1a5Yyu/H6bLOX3THrCttiqhuooTX3NZY1eXoVqAL7cIYOBgykzCiR8/DH8Y/tKrjh8+fPfm0MVjSCdJptFsTFDtoL2EGGNMUS2K7szMLOn5qikRwV9vM2MjAhMF6YhIVKJZUh3UTBlgliAUOMDSySzStUFVE5kJtCMYFDYmHW86kRtm2fUneoR9PB1Pj6enCOk7jQbwaYyP90/j8ZRZl887HAZSIhYQLF95m5ZYDMnz+jS9lgtZBIAN6HfX2fR6t9uxdDEpQLv9jaqmcbh/PJ3GhHf3hrTb/fHu+uabr784HA63N1eHvmMxJuuFRRh5DUpZSjAoMAdmNktnxGHyIZe1IiLKa1JPa2X6LiNYtIg8vlPYKUzKaihMuCSUxcxdNAxmzvbl1cRTiD4nbwBGRJivabFzzGhmaGKUE00nEiWPFx++ocIMpc6cPGwt81SEviqnCkeVvpT7Qu1KbIu7qJmbqxmiKutwMg3YFnMyn+8y+QorN/2u4CqA6ztMHqryXHrqZZz/uVpnyeNpA4BZwtq+YKaTtbXupdVFTv44upJilbRtAfb0WRBYYZ7W5htqpj1PaXO72taWU7vFkL8KM5hISIS6EHZ9v+/6XceBE1nKokoCBQmsDDt77yxJRIwQx8nJh6oaOKsRybTLqzLVIcYxqWY/NA7NxNNjyrtTy15XbIgmtbOIJ1RHBtPfGKPXjSbFiM4bCv5vNgf3lTPI13yBjM3d7fTUa1UokQ0N0Q+WHzXfVtU7n0q2Vt2hJj6E/1uA92yojQcpV4S18UNQhEMrr/yGjrlUbktWnS2WAv59ReRVx/1DRe2++Kq420pnGNbubLe8+TdPHvjSKJ4B+eXaPpm28PzMgs/BTJWhzb86iM9M7V3TZxa8nLPluyr/KpE8s+mqIVpb0xIRudODT1a+emJWNQRMRqpYDl+5rcPMrLoXXAXsWa8CvdzTIVCgtO+w78K+1yBKlJhpFyQIh/DKy5aSlGBmXeiYWccTJ+HEwbDD8Orm8N1XX7wb6fjh9CGlZImIOiIjVkvId6xAptHYDGGVI9qJ28vtVdFXyYcLorVq0eOw1NyKl58pJS6X9U1UrV9IVd8vTwrV+6pTvgZdRhf0wBQT36otW9OCAIzhMB5HDOMVYxc6GvSf//Cnd9//6e1vvup5vO5xE3iv1g2nTmknfeyn2SoECSEElky2QiYSui6cAwKzibClAAITGzGExSwhkUE5UQKZZs95AdYzkeBBri3GbAsFM5gGM1CUThVGHScO1ll8imRjoBHodYwfP378+HB/GuPHh8dsW6cxO94EAxGJs4d4ApAIxnnNZAYkBjTv2mQ/p/lIc5YMYOHQkRmoy5EPzUyJT6dRAAmMaEQwUHoYnh7ffv/9j1f73e3dze311c1Vf3d78/LuppN+iMfAIkJMXd6EVTWbwrR7GjiPtXvp+FoXa5xCjXnEE6w4KMwZwrzaoeymZmZPz41nYmXm4+MDLReQNC+WylFSUbzmbDUFFyngdZcMVvb1XzpQ9jZKgPgqf/b+hyV7tyoRlpKo1NPWWbFoVlhL/aWJsz3tcpJTF3Gu1M/LuC5FTFij53l8em4sf0Oo403lB6+oefi3JqctwepvS7Zd8JSXm/Y75Z6wSqCOtl1bns26S2ULwFqChiN9LOXghX5VYq7gZMb/ue8LIGcvoFgaOq4iE25Mq3YrXvUC2pYpf+1ImNl4ugDSddJ1XdehCxyEJHAOqdJJFxLlmE8G1WSqSrbYvoGmWcxbvpSsqqen41FpHFG8Lo1qmkYRMjPweREIAFZbKBXM23Kd3KK9KlIRJ7nVUTZBL/mZGc5waIFSTPLofPHGzizgay57BPkOoZdLXnb5+qd6NibuMnBYc6pUjbiZdV13rtN9pWafYkv45J/Bhf0oUiLfAaiQM6HUOWOwGVdZSLbE6fviBbgnyFJ529OttCVwfKnKQqEUrIy9K4Lxb2zWmJ/TOj1b+fMjciFV8u05RVbTlvj6yRX6qn4CKi4Isa03W/VgQ/7j2UguAD8z5/PTeQbZ6Mg66jbCaUyZl9dlV6enkqorKuctITcJZoYpz9O+PswTyT7Ivt/t+27X8a7TQ89XwVh1L7wPtg/WsZklMuqhvZDhAAOJP5MxM+uIlTGdyrAYR+6NMIxP7+6udt98+cX3I/+Y3t7fj5EA4miRXYhtMzWFgYw+Qc/Vgxc4rUBrBZGXhO2QrZKun0d8621Vlyf3qqpPJq8RXU6+zip/NbWVVPHXKpmZS1tNZ72OXDjZ1WmivHxSoYCeQAqJ0UjHFIcx/pd//P1hx69vD28O/R3sKuqVWs/du4dRRPq+D91ORLqu6/uQrYHyxMTMKSZDAnXMzNIZQUmJyADNc67qbrczjAqCWg6Q1ZGBiWVHymoppclDL0EDEUt4Gk+mCNx3AUypD0bXXRquUkofH54+fvx4GkdlMTMBjSnBst6uPPFgAib5QNOr+QYN6ThqZiIqYfYIZvbu/fvD4RC6ncZkBgkdgDjGIJ0EVo2n04nIuk5IZIyRmO4fTw8PT3+yuOvk5nr3+sWLm9urL16/2vX9rj/sdgiBmGU21dQcIWyeQLPSouyuIhdrPq/MewUmD3q08/54OYIKu04SzBT5NjPNi8MYowLQ2fAP0JTGhK7L7tdTOefNuGOmwtrMBJQIEyF7sjJnyETny47JdHn0x2w5lHZKRBSCAIgx5rPRefGQSTwvIc6KRRFw6u5lwZlB5kZpLRVsVlt3XbeI44GlCKunhDlDsXnzDLwqGorCV0YlXwMrCmXJmYuv3t4ENk3IvNev9sGDV1FMgZncqhtOjpfWywF06Qgtfc8sBdMUAMVDrtMdxcnMOMOYy3bdrjpwJqJ8bl4k3Zl+kErclApvlYXwebDm0zE/f2BihFJ/gQfzkX2uMgLouq7runznzdNSRgYEWWiophijTmEnM0sTQKoWY8oGAoH5UW460i6eDnp8tUtvOjr0aaR06oLsD2HXIzFOyvvEIkYxSBejjjk4ARMRJZgZRgNIKICUc+AdU+l4P1xrPPFpsGjBlKDHwBFdBphVD0hslgxR6AiKIe3ZkLGalLKBKWSyYTWzZJGmaHCmGokD1iitUEjBkrnwKj6bqmbb+Mkqw60AVXXy+m6T7Ld5TMvwVUoZz3ELGUQ0BVkscfnOLeYmhNmkmkGZmWgRdb0QFbFlp9RTa0BeQINm7XA+T/AgVfVXxtieiZi57BFlkQhARHa7PsaI6Z7AYv+CiAkCmCYDiCkQSBMA1jlOWmEom51YOPZcEQ5wXL/o/s/Q1KtWzh23c44Jp1WaI5vnAxlakzxZnniwS1tbYPh+VbD5DOXT6oL2QoureLMmPpsHrMI8lovqqipq9NqCDcs3X8o/lDgUn9g+89U2jDVjUtfmNV5fc+pyPvK1ZS+U7aZD9XMLTv/GTzdeGvv8Zbr0JprnXhts7SoEAYGmOM4LypzFED5FURMwdC6UR8fM1DSq9jyFC8JM21iyJwETzGpJIyGY0bSDL0AfZL/v+RpqI8RiGEMfQuhDl0KIIaTxveTAahyMKJ9qmIhRYCLVNIcRhxqFbvcKdz+8//CLnvVF/+F3b/FkeujuxzDKPtFu4D1ICSPhyIiwIV9qaPFQmW75sS4P5jQ3dd6zPGG3ZODHnZYJS+/WJX+Zdyry8PRTAMvyxM9TrfSu6MTWjJX8p1bgl2ff6yzH/D6mn4ZWhXCpoTzDMZ1XjHM2fyfcI6r4pJj18MkHyQtORhZZB+EBCUzMHSPcH9Gf7Mf70/fX/MXt7Y5MzK52+6+efpAeu6sQ9jt0hk6lN8Qx4CCRcDKwGbMJR+ZEumMlogDWqHEcVbXvdvvD1ePj46iWZvXJzEalAdrhERhTGJVSNEsWklyrqhk9jsdEMGIdHnkcDhR7kNzu33+8T8OPw/A4Jph0Jr0FZnRGrGaAZFMkhgJINhRyzXNSViG6Pu99pKwbmhlUYXZzuDKzNDxRduVOg5mRKBGpscGk6wBWiCYQHU6FSAhDwsM9//VxkD+dbm/jYdff3l2/vL27ubm+vr7e7bqu67qUhIRgNioLBQ7EhmQkRGYdE5GpaUqREJm6pPsS08vyUpA4mYkEAWeHNWNkyCQGAwCaJUzhlikkNxPN95S8kPUkVXFmoezCnzwHkT9H6wbgnNagEeKeQH39q61k4mhNDb088tC2dVZchKUQJ7ec8Bzu4Vx+WixRvKQzt4gqb4o1V0mFDytGnctSlRNrcsrX0wqs1Z7mvxU81Yj7Gjz2qjwVPtvi7ZuqkjLQOqdlnepnEYdq81BVD6upWii2wrR8qki0kLcn47aPnm7b1otkn+0o2EIQJDFi4nzXv+u6nuXqat/n24Szx3BRgMwrphmSGOM4RrXYqjTMTNMSxlQ1OQ+iRqB5w5dIiBTTYoqMGAQFjAmYvFJq2d8xy3tpRMQccqO01MAqZqlYo8BQYd6TZfnr+cLj2S8AfBNFEGFBtOuzKRGyC2xPjZkMKxab6ITUNrxoYoMfy3P1tVKAckopMYeWrmwjrincyVUpVZ1qes4C4DfUWmgvjNfPTFt8ytUdqk8tOVtu9bVV1LUqwFeLr0L7t+o71rqfUyXSn1lPS07PrKSdDVfxcBk5n5Wqjvvha+eLn4nwtsJ24mvDTvycpltpdiGn7/KqtGw/taASgciIjBhMxAYkjZo6WeFrc3e254kjK7QAKVEgImbYFJdWzSYPHKy4uoovX7788fj2YZ5qV3q0REIBsjzYUgXChlRsa6gmhZbr25pz8pZcpWC1m3NhrMsnbWZTmxeN1eROSwOxqp52cIsOsNr6Fmyr9VzIWe3aV+/Lz7aSiiaLBVy2j8Qc9hZESfE0Hu10ig8PvfBt3+POsq9bNYoxhi5btUgIU8QSgI2F2YwRQqBAGjWrPZlQh2GIMT4lncLgBYGelxLMPMVNUUVmKBBPu1HUdZ3FNIwxe0uCdac4pmTDKY7j5GjPjFQtRzguZ+82LZvPah6ArCvZOS7oFLrZzkH12CE4l81NnN2BEuVAxPkfzNTtnZkazJIqKfTjx48PH+3HH3/8UydXV1cvXty9fPny6urql1/dGRETK2lSY+YARqDT8cgCVekDGxjM00lDGgQdguTWkhmRYlrg6cQYlMroh5TMSAvb6IQezYZbTAtXdau2UvlTOVPCUrc2d/bl1Xqfs5BaxVe+hpIqYmXmEhO5CP38ktbWcq1UqmRNVX/pfrVyWFVBzCzzVzX92NrCr1LpKkm3NdM3ASjPmX0fK6hakFaBx3JELuDE11Ad32FDOhcCa1v33a9az78qeZfp0/erNOHd3FfkhE+lFoFVp+CsHOG2Niv7n6pOPzpbQ5NtJwJxyqstNbCxTQAwM3NQVegUxp0NxNR3PVG+yn+eh1Q1H0XOE3QhchARzae/ExBMzGxsyWAwMqN8AkP5pEuMwhyERyi7AlOyHL4GSiAD0mR7kAd9Za/BE0OFz60BqvBW/pbxbfP7qkorvDSBnp5pAUnVYgV8Rl31aRp6QtpYELa0V+h/lVSKnKR5T3qmsUUllSxyaJmEZ0ojLYVzMQStiszyqlbIPHhbVP3zUzX6rZz5rLRFbL5ma9aEz2m3ks9tK58FIRqt9yfjtqLqrRNL3+IqSP7rzxyFv3nagvwnFPRT0mKS+hv1uCWPVQmANUllGwsbLERQrflMGZKyKZMxC3P24DjLMaqnVLNEsyKeqzU1M1NKZsHYcvghVVNLalluxBB4TLrvuy9evfzzh6ePH07IcWtJzSZPHDBQNq/boLR2fp/lzybdohqpbbXhOTVUL7cWeBWRfDJ5sYzl1LMFZMW5q3KgUnjIKeE+Q4XPFp6qHl9/mcjgyKyFtoYtj3k5q5z2AJBSIiANKaU4HJ96YLw6cOi7V7u+7yR0BoIx5cWeBIEYkfG0tTCNLyRvQ+Sj0fwypZQ0ZlIREUuaF4EARCQOx5QmWp21JWVjgLuOolrWhfq+F0Z6stNjOh5Px9NoRswhJiRLLAE4X8XKfdLs8NQE2RQx37NVQ+4IM2X0Otwws2k7uOchLrid1ACbvKa7wVIABo0xkimAB7b7+/v379//5S9/7fv+j/8Y7u7uXr96cXNzs9/3RFnN035/MEsGPSWFwkDMTCYSWAKLsCZLSDpZJIB1BBPnODEk+eTFzEKM0RbxwZj5LH3Kaqo121tlGK/65DQMQ/Uyt1Xcymdc+A3sktlvtJQWPQvZcp1WkXsRtRUHVkRfcq4yT9VcBWEFWFWnb66Kk1ZqawVToa0WDDPLJ6JwLL3F81jK3wpFtNQDSiXegG21dx4bBZKyb+TfV5B4AEqG8n717lAZx2LpPmdYEehzj9b7W6UCQOUko/RudaIys67rph0pp+IXE7WttqrK/cvCESIiIGSTHSQBERnPHqbMDKZm00HiHAq4nAhlh8Bk8M5as2qQgPwXNIeZSaZjjDHvtBEMpDDMPoHzVWkQAYgMM5gZIzGF6YgxKTOrmlqcbiLm8LKggAUe/IivkmLLO1sjVRWsUhWmotDt+jhOa9564ZTy9fHtVURFxhfSap6KBto+tqSoS/8xrfiyOtV3dytxUUHYSq3VPrace6G/VZ1b+Vez1fnnX1vtPue9F1kXwL6QqgH6aWmLJPzXNs8qllowtt60vV4d8dVp9ALYPzNV/PXTWmlJ9ALhFWG7xXqVtvDTet3268JeIZzuMWVYKhgts7ddMx0IxkxCOb5g6AJpMle/ERlICfn2Ec+uC3OO6dZiSqOqMOe7W9lsVPPeYn7qRF7evbi9evfnDydhYlNVU84u4nJkNJAhbS+rPB68oFtVS/xIkVszX5AnHvPVsLY078uuygosaWNVWpYuVACsQlh9bR/aEa8gN+d0w5z6t4UTP194fcnL+S2AVydom/UrM0NSzHGRCRxzTIHQ5RO6mOIp4XFIEbTrOkhQUrCYmSYjYZaOKWDamWYzaDJF7ObQWTzAzLKza0zhlMgsqWoOz3BGOxnymlDJCNnrAbPEOC1YQggIgRgicRji4zGOgymzUT43Iwg8TRAI8+lZ8aptc/zsvJkyvze44csHd/lWUYXM6e6IFmRmFWS6kZjXlmTIqzsCmZlIJ0LMTGrDKZ6OHw3pB8T9fn91vb/eH25url+9vHv9+uXt9cEgwoECCyOlUUcyGDH384GqmRkmC3kzYxjUpkt3NPuvJw15/VegJDrvGcwREc9XfiuS8sRU7lBVBKfOyUp5325gl6+6ZEJP0/5vEe7VuSIWd9LWt0ZWZ8SKP8tPb8rlAfY9WrLQ+slDu0Pvf3p4tn4WSVe6v1qVl562fUel5GwHdzWtGs1eFkYXaquGoHSk7a/P5v+2J41medqTVZBaoX8B2lWEVKSIjXmirWqrxTJe2UaCiMggDFbqWEJACCGvFZkpO9cS4pCPAWMakJAiBTEDUyA2Ncs+eEQkajIosmXC7E5ZEUdNUSmOOsQxRk3IPpsnTCrmtaYpVEGIwbJLYiV0NDsgIRUm05TSSEGYyEyyESkSVhOtzfQXUmuIO4uRzQ2UVXbwC8XzFDifELYLQi/fHbSL8O7n0SSFXdrhbqXN1gk5zTsLRcLMeZKvDe5A3jdRUmb3avr3wFe9S6m+CN0ItH+p1HI3gM89sanQ+1mN/v9vasXOc/KXtDUX/E1g+2RqJWQtxp+RKir1c99Wi6uSZ6vgvzSd2NLqu4DxSfqvAAuEXSe7TjoxYTDAArbp2jMt7QuIUDYfz7M/GyHHfc3inYjAoiAFKRkHJkoaAr+4vnp5c3vVPSTFkNeZC4NbJtLL8G/hf/WTn14r0XSh8i0NytPA33Bw/dYwnDRuJa0Hw2toz2+rVTzQMIJX/0qpnP9MY64GHx7JZ7jwM9sQJsAUNAWfJBgbAQwSCUQ6xsFwfxrf3x9l13Nks6Q9NGFIsUs9JMdsICY2smLo5FwtCNsZvclU5wAH5m4SMZMkMrOUksKy5TSMGZTr6bpuzzIAo0YAj09pGDUpEQcimTZEpn3gGttmytRj8ri+vDXssDpjRrKP0rPKPc3dns3zSDFgkxdPlCswRgARslXcfGmz7PhbxyLSjcPT0zEeTw9v+T7w91dXVy9e3t5dX+933e3t9ZtXL25vb4mCEQsTpNP0pLMKNB+6akqRSCwpkYkJiDBdzpTQd7vklAo4njGdDGQreeqpvyLKlnxbWqwUmnL4k/WzOB+/VJWUE2TPcsyc0mJrs71v5rtWJXKpnG75LXlyCpx/6ev3tU1k0CqOawu/C1MX5rt/3gJ+7mC9hG4AOFdVidRKV17FTHWS6bHtJV0l+Dxv1DPcssUKvEpItfj07u9dhgXYqx1vq0IzClgKRA9Jm9PmBUZrOO0Ru1rQ48onZs4+aaa74zGpRlgETESCEJnmrbhkIwdGBzIIEYmQJnJ0AiOzmFsXEYrF8jACeelCqklhWrwaMBEzByRLRsh3BKee0hl4nMc3gZkNRGY6wFIQCqEjoiFFS4k+JRPQEIZHmkddCMEj1lFyXZXPsIpkn99Twgo9U/Zftdh6AOZ9zJaekZjCalt+qljl9KrCAkahrnzPx+/HeVBbIYyJINcNGTKdVBjw3azkzOemLZa5XFvLp5Uzj89ao9hSD2tJcZVCWjSuwlnQdTnnZfCw1uU2j2+oGqYtubfKVpfr9288taAhKnwKP89PXlS2lX9SelTp8nD7Crduev9tF8HVjNamzLy6ZMPFcUdDwxfEGhHtetnvZL/rhEekqMaqzFBmYQYRoQh7zo4nzsNdmlbARZYmouy8Wc2UQydmYSAh6jvcXh2udvLwEJkFyLb0GRYGEqwOc9fSle8dXdwgq7C6PnzbZFkNRNVu21bBiYe2/NwyMS0ZqpmlhXy1+OqbStpUrFqB7Z9b6VTy+LuOHsIKvLYtmvXqqX5SGNkUUAtkRsIAsTAMY0oGsJAajaoY0/unx+t4GxQAJcCM8nnVkDQQK1SIJDupI5DR6XScpz8DziqlSEgp5cspDIfwMj0mzcdepEyEfMyoqswSiKOqjhjH8ekYYyKjDhAyma1WoXMI9OnCDikRJRiz2GQWOhGAzGeklGkJNLlhsgjjfOOGiGxaRS4G1I/UjNjxjPY5/2KAUrmCqGyICmYKEgJjSOnp3ce37z+IyGHXX98c3rx8eXd3d3W9vz5c3d3d3HZXnYxnymcmopSmiKNqODvjIGWmrpNam3HKR2Z4Ihcmy+a7grRYGU95sM3Y1kjA4jTJszq7ljxeaGnC55m2lNCluyd/tu5Z168/PZz+JM3XjyXn+/6aA7W8L/zYHglSc/ESa3LBF2/FQc5eNdoKgvJT1+44bUnVqh5zqRXHlawpYHvUtVKyuvvnhWmLEJqX6K1F0ypIVY+2JGmVp+oyuWm4LdgsTScMFMW9bc6/93oJEeUYErvdjojGcYwWoQmWWEyY8vm+WkyJx2Qd92y5Ie67LlAHM01TTGEAJUREaS6l0czybrFCNa+uYSl7V82QA0TuxmHGG02GFwAYJkQEExhpYkCImIVC2O120vVjivqgqtEY2SnL6hBUePYoagn4c1NbfJUw2q9LVoK/U4czJs+BbRb8S6a2fmJ5GaqKyLEUF74V4Ez/XvAWYDyvVWJhC6TVnJ4sVyHfeuk7cmGIt4DBp/j0makSTRUjr7ZyoadVtav1fFby0oa21cTnp0rGVhtbVbatPv5Mpvsbpi1Ifj6WCubbqqpJ5Oe05emkelO1bmaGhRjJDJ61igqELVIpbzqhTqQTI9Oko0aibkfzBvqcrShysHzDyEocIjWzZDaptcXlO5vBDGrGSUFEAdoz3+z7m/3+Lx/fMQcmS9kmx/I2pWRLu3YoWzWmIHz1ykzLoZV0aqtFQ0L5Z2vZtFWw5ZTncEdVaovYtlQs/7LCzCoAFagXJEn1suChnXlXH8os4wduhnOGOR9+JNWpCCy/UjIlM4jRaGzSKedYBpSSDRqHkMzGRCYAmAmGlNGSsony5A0hL87MBNPeaAYjx1IHE4GH4zGmONshZvdKZIYxjoUAUvaTTzgOp8dBkzFxp7kBcN6EQd7vnld6GWcA8svSfXHWpHklJMox5kaIyaItOb2QLgQAsSeVvEWb/2alCzZ1D5xXPRQQJg0gRjVLu/0hpXQ8pcywZNZ1XSfh4Tg+nuLbd/e7XXe1P9zc3Lx8dXd7e/vmivf7/eFq13Vdx1CBGRsFI0lkgZlDJ4TJFy8kjGPSGQGzwkE0uauu6cPmeICeQOdSi1VToV12DOA9HeU1YXXrLJNCZQWa6/R3Dot6xMw5LEExky3s5FnUT8Yyx+mq8njYPDZoXpP4N9WQe+6qqi2fdA49d0Gi+ffVwtvhv1tsKLqdNt+uG7XFDlwFeQU8nFfS6q+XQa20qnxt+VKeNiry8Pis6Mc/VBASZXmA6j1wDuNbpVUkV5B7IsR8ZO+Jx3eq5FRVXrvtiYZxKjByPcwcwhSWJ1O4sAbTwNR3suskiAgxE/Uh9H2fg5TABUjkOQQoEdG8mFFNxarCTMv2BYBhTONgMcZxSDGlREim0SIHgSVGAMBkxAAxsUpMzAjEApAlMQiBia/6Xb/fHa5vpNt9vH84PR0jiMERWnV/i+A9YldHzWdrZQi2l5GeSStFebWeUgoEwWLjqeKRqgnDipfCeQjqANy03MAqXeDZN2zOUGzdadoFu7QDUkk8IirRgyryq8qSU5TRsHmbLgzQT0teVrRUcc42P2wBVvGUf7Y1GwS/tdRy5Vaqavv5qeWOis4vQ9WK6NWpZBXaTA+tiTW2mfFv2PEKsFWwL7x/fmphXv3JjuZb7rgA+eVGn89HnkOJCFgRmBcaJY2CKMh7U9GsI6IgoeJlMggZsxh43pSkWeU1QxLOkaUU2ZiNiMiIKEVVBbEFod7oZr+7Pex75ggAmjtqAIGQFebtNVL17IltFV0XXrbyf+vN5Y2S8lxhuH2zPl+4N5UpWTWIvlRV+QU6L2KHmhXjas0V8D7n1h37lvgvQ2VmgIHFxSUwVahqYAEHJgZzMrCJhj4xqQQDjzGNYzqaBtJOhA83xoqUeIpOp5QP+xSqpinRdPBmzMyBxpRiSnFW9QuE4zhGTQab3ONRDqWAGCMzE9MwjomY+sAswxiHkQzBGKowwIiz0kjMkv3K5NhUhXvMLUbIMH0wZgTJ4cQMZBYBGLONqgDBnW1mDzQs5dRkIqRpcKcbe/NBPgBSU1IiVkpIMxLy/SEZYjJj6UOOS5eGU0rp4RQPu12MMcY0xuHxafz+7Qf+w5+Y+etXu1evXn351ZuXt3dXV1f7/W7XB+l3xxhVocbUsYGimaklQsi4yAmzyWU2vbVlxKoyf3j1Ij+cVdJGseB5QWVmxWyp0JY3ZMrJL/ay+lsBgKWjiHJ3sb0st6pnjONYgPSzbyYgbwFY6RO0jJKU6/EnfjP8qwsYFEQVv/Ae5yVOI83o8gvXkgConrvPcwQCM8smdi0nF5Pa9hMtvb/afIZZRhnPSO3ZXcF2VYmnijKgM+mdtZPcd0d+Rs7pEWZiq06MS/0xnhe01miEbt5Fac4js8JPWRaWskWwlkHx9Fa1Zcvp3NzZeObvvu/7vjezcRxztPHTw0Po5KoPV313ddi/uDocgnEcMj2oahdC13V937FpAiT04zhOcUGFzIzSOFPUwi9uSiml0ZhGTWNUI4iIiigrJRJiMrKkZBGMwMYMYiJYGhMTDn233+/2IRz63a7r+74/jQPDTsfj47v3Oow9yymOIl1FA36roiDfE4Dn7jJ2PnwCLfaMznGlqqFcrX+dAt0W2IJcgXEcfbvtKFcUYo3XU5oXfm1+OEFUOL1dOp5vcc9y1dtftERVpBOWkaZ8Q9UbxyMLmVByFpzUqFsKtCpnK4LK2FVV5QVwyVm6SZXv5XlJqO5agZcDFUcvyq4trnzfV7tZPWxR0WXx2H71ZhSVQKhwW6GlmLqU+cLDU/hrVXzl5Emi4FmdsxMvrltLnFaOYTHQtSYNTE6byrRSNjczxrEckUXBNXq7kFZHoRpTX5uXMxNP2YJ0C0orHK723eOtpRxfQ9UvojNd5bLTpT4Hf0u0WSSW6TJ3ITB6YTYVws3hcH3YcXbAMV8ZIqeAERGLlA7mqrL+wHP8ZzNTjUaY6UpY6KrbnQZ9Oj6xpp3IPtBgswM0CAGWAyEqIAv69Fj1yCwgefL2soKcQ2Y/cCU+c4WlUlt5X5BGTjksRfK+W8XaZVh9HOlq0CtC9VRXIPftFiKh2fs9M+cbIoVzK+GgS+OyKtKSmaWUvBO7Fk4sJ4I0r7uwFGiexqohK8j3CjkAppBXPGbGIJtvKDAHMyPDqDakyIZoNjw8Puw62V3dvLjC8aPqkGLc7XZpjCc+QbjrLJp2LELUiZRRKtjL3SzLhEK3+WXuVx+EoKcUY9TEakqqAIf379+pEbpuGJPa6ek4fPhwr3ih2fZKYWaTK7wcebDgky1f8KPsbUV4ig4elVhCJyLCMLPYSd/tO6CzpKfT6ePHe/RXC0waZ3Ka8TlF4W7HyyzludjMsoKSMPmZzzpVVuqUAzGr2TBGIiIWZgEwRgWCgSJAWedPCujv/3z/+z//sP/t71++uLu7u3t5d/P69evXL1/sD30vHYduVIpkLDtmRg5y5cXQgsiW4qw1dySX/GJpkcdZ061JxoW8WFDefPRRiRIPqpltzRql8iJZcioioxJbZWWCpYinZu6v+u7H1XenGu/VZE6Z8/B4AVHqmdtljxByE3YFYQVYBWrGSSsQW0lX3reZ275c6KzviIfZmn0H/z77z/SEV8RohXPM/cRyOGg5AVyA39dW8OPxdpl6K3xWbFKayG/3+z3WgiJ0BNJEamwSiANzHygwdcICsYylKYAEGWkcT5O4YSPq8jaKqp7G4QzS2fxYNI3uGuq0oRsYQgYyEuJkMGAcTQdwuglXHKQTPuz6XZCO0CEFGzvl0HXc9XgaTCNFhbBA1HXf40qXFt1bhOF/thkw6ZOXiK3h7vpay2oh97+2I9sC1jK4b6UVGm1ffBeqZy/lfBxC3/RqPEYAxSva/HMhJCtSVLXKRBbbEmA1eTxUErWFrergEs4ZqkbuoVqmrmV4fmpF9Goln4WEf4nkpQeWQ1+EZCWyPis9h1y35pTFy2Y1heUasarkp8D6PyhVzIIlEjyT/oRUFHo/8VnFvS61AieX6AiwZBoNailzCs+XrGbn1TRvPcdEznN7vm9MRLAmdtQUnQLDaEnNNJoqw3bCh553QfQ4InTMXXZoSkRswlgxi1gVBbYM3VTN5heYVNdMQLdGyuZlWEXtF1jmOQO6lM/TVl013ZdYaM9ppervBdZeBd6XWu1p+Xu53dU8VStp+TPHMDCzbJmihOxlLa/nADwO42i2218JW3r8mGK0oOGwExFIjnowj45OvuvMCOASCIHm/QIjGCHfbTEzhSnM5kEWCSZGRgPiGOM4pHx0qIpj1JTC49Pp/vE0ah47pFz2jC3nNw5syOofgdPk/ZMgAcIIgbtAu92eNIUQ+i4wM5E93uvTI4YFchjnsOSLOMln33jLA6S5v3kUyJgMOF9LmU7jMUdBtKxu5ZvHuQgbDOe25HCbxniM+P7909sPj/9EdrU/XF3tf/2rXx4Ohxe319fXh32/k0B5bANzUOQ9vcxR04FbSsnmVem0NSUQlqxEljmpbP5VO4tn9miWXp5YCwp8nlJn2T5v2aDi/FKVRy7WmKSCcJXlfLZqJsYak1czt8/WsqJnQo8Nj1UvEbYgJJc89qpJq5SuOltlq3paAdamC8KlxUz70rfigW9fFoRg3i+vGvUY2BKjaMalhbCdUbYopyL4nLNa0tty4eq5g5mDyG63O7sDLidFSUk1L/+udv2+CwwjIxHpujDbUBgRYd7TCiGANKXJp9wUWGrmzbKXNrdsSRFTGpNlWUhEDBLiOA4dOkEM1DGZEEkQCTiQMGMXun2gQCZkgaRjYkrdfg/pHx6fEJOqkuQbiebxUAaoPbmq8I8la7TI97jdIsu2ztZw4HLyHFdEkxPrtOBomsRmBf+F1J5se8D8aWpppeLNT7bVSrBSQ/XJs0wr6D5Z+WqqWmz74ltsxcjq4HqZYPPuXgtkJdPajjyTBtrMtFTRPlnPKnhbOX13PJFjSY1wnbog6H5a2poanpmeQ5nPr/Y5/fJM6iu/ULZC2mUxsor5/+GpnRz3u26Xw9LmEwad+jgFjQVPod9m8wHCeXPc15wtI4qeSiARMDOYTGOKKY0IsIOEq76/2u/s4YN0nQilpDAwB4YiRY/k8neVHdjdoKnmi/zcjiw5k+9q+HyLvuAzLTXaT6XyLR7EPMtvqRZbjdpS8fP4+STlV4J6K0+V39ylpypbJXBWK1nMULRYw7hOxWnvdXJkQDlm/f3xdP/4lEC73Z7HI9vIgICIJ8vEslrK3vKSOw2ayJxBRCXUhDl7Q1UVYgmB+tALj4ZTHE+POkY9DkMyJKMx6pA0Wnw4nv5/3P1bsyQ5jiYIfgBIVbNz/BqZkZlVlbVdfZHpnumR+f9/ZJ/6caeruzqzMiPczzEzVRLAPFCVRqOqmR+PyNoVWYqLix1VKgmCuJEEgdMZJSH9AjPVQdWH9ceKlvJj8e1kEYpRxiEIGS0rw/j8dDgcDj+P8XJ6+ctU6LPkCfN1cN+mw3p4WNDvzZmHmdW9Gl8zQQLwZiy8fLS6cQPiROQvr2ldvmV2uOvrOR/O85//8v8ex/j++d3Hj+9/+Pz58+dPHz58OBwOoRK7V/S7o7iutkYDGXBzSN2SVGumd1p5awFsf7fsRLeGdddU+6M+b7mrmiPdicQ9/qmfd14i3dv2STuo9pytHdSW5zsY6ltr7JtOq1FTdg2FtosHvvIt6tr2/Wrv7qOlbb8DrLa8dRltcbXb6bYvXx0naN0Nqm9Vr/PYeND19tkiKdyIepenOr/3fMw6xO4+7+q089I6q+yOrp3NEkUmhlCdRgoCFyfPrJLTMIwfnp4/vXt/iMEtWzIPHFmcnMiZAwB1YwKJSGBQILpBha15XVkNYLO0XMGmQDS7160WJnMmOHkAhDAQRYAJUXgYQhzCIQsLAiO4BUIkGgcaBnaiYQiz6TxdUkrujrKDR9rhreOgtuw+3KL9Xn3fWzS2ggW3pH6v2bZw57K4/OjHsvDOHswPBvVgdMU84vXqfCUw1X17914vu8LwngiivZVblWYP4L83EZXlW5590E6Fs2ttd6SdPGnH+LiL3e7aXjoY3kJ7D9rHhjK/F8gH/bYnJJ1q+K5yD6RdCdZBckNLu3t5vwCg2y7+5uWumrtzB7ibvmv9b11NfGPZ6msiImboXX7ZpainGKMEYYjzmmSMzC2EAX71hxIicgJd02U5zNfduyZ3TjHEFW4lXoTE6KpqCrMAioGPMRxjWA8trFiJyzLAqJjv2NhpbWyCWioeauX21Xaw2Kjj9nlLqLsC/wEb0t6ibtdOaFvr/tz9tjNU2m/fohkfWBTtn9tqLSarWrkH/0bf7QCw4oeKNlwSCGKJQOIggwHiULhgjY40Zf3zT3/98vWHw7vxMAxMSppyzmLRWUGhZD0mUCBmeNe1wUlJ5BpYROG23CUyhwkPEgcZhkzu8zylfDnP52m6XObsDormZk4p2/kynWb4sJCr+xIAp4zW19XgihFeMixgPcpeIzMJcwhB00yODDdNIs9P4zDFQFQPBpZVMRGt3qctAbRzsQnG6cuSmBkiYR24l92d1dtrI43tuogtD53IQSIHYnMol2uNls0pZZ8sny7pLz+9DP/zz0/P/+Pjx4+fPn06HscAZ4evKWjW5iAiS0rA5fmaLc20XzIViHcTc7dA0+2Nu7Zaa4IQ0RoulpceV4O7Vr41evoeHwjoyodbhvRbH4a3a6YNk/cD76ptv733/261Ghu6HU77SX27yoJ+Edt92MH5vWU70t0pRiO5thJtK4bqnx3kKyHtuCBuBVyFhIja1Vc374/H1ULYgVHB21ppdHtRp7wqJB1CKPcGa+XauLuL29MwfHg6HmIQN6hJ4DFGWm7VcojRil3IkCDlorW7u2vJbwOykoqqGSO5W0mQQCTu2czhS1pZggtoHGOERIgYMzQyRqEh4H2MIhIJTMrwKHwYIgfJRMK4nM4vL1+SzuAhw42uuqcOym5vSe1ieFd5bydllYw7tlqdWb+z4H9j2TJskez3Xm3B3DJXW7rL/W2/nRjclZb1bec6VZ9vxKN3BOaNZwczb+HZcuhjFO2KrFqh25lup6Yl/reUiqJuyLv9bn+/kSQeCMm3l8di/94n3b5qK8kfoPqxHHtQHszyLvx+xy7/ZoPtV9/E5i/m3L9t2aXMBxrne4s3K5aW5B4PvtP7zByEGQZjBgKzoDjVkZmBALBlt7JTJ8WHo2Q/W2yqZR8TEI4ASgRFs7K8Y4IsN9yYADenSBiZxxiEkJd4G8tFKbIl5kaF9p57V/uwk1G/oHRo7MTCY7OntrArJTpq36XMbZquqv27r7aN3JNgWxy2cNJmuds+vKGlBvNbYLb9PhjmtVmA0GcDJgfIBQyYQ0DLzVwnyu5fXk7znIERgICSKooRWy1tBzncMojWzO2uqkmXm5yqKkLOVO1ZAEQgYgey23yeXqfLz6eX03m6zPOUdJ4zcSCBgxV2uly+vp7nDD+Qe2miXf5dA9VccejshBKCFwDABKlZ6Y+HpzRPmtLJfAyvgwRTpeuclpUz1bJy9m2SM3ZqsxSui2svd7mZnAzscHJzELbSgWhZelt1Jt21yZfDPCWSbEbZFtPFePI8f3n9+evrP//3/0lEwd2XyXOgcSdo5UWZPDRnl9gQ+pajaLPuatFdX3XsWvqt96drna1m6hRht7Vfu+sw2ArTltk617J7o2tm4sr23phxXVTPtpEOP12FXSOmxeRah9EQbiuD2nmpvZTDtMf7Q+2I2uFv5U7HNluxsqXFjjA6A7E22J40duur7jetm14dRRERsHD7tt+uha0n4e5AuklpUd3V3y41u46wXuKn++qHmA9DfDqMT8PAbpryYeDDMD49PblmFgkhxhgkwGDline5OlyCIxf37pTSPM8sEUumVzMzOBsVb4IlqEDxOyiho5k8gMU9kA+MwOEQbTwgRD8SDyEMwgRlmDCIzHSKw1F1en39erqcs8EESq7OsYlk03E69ko3L+0nLfntMtT2YasaW9rAHsHvli2ctC4I8TZGfvD2fvs3aSduae/umvDeEHaV/RaATmLcE3TfhL922lXYbaojeOxZ3rtzXR3MqIH8gVmzBaAzmNoPO/HbVrjXxQMM4xalW0i6mm+hbWqswC38v6DcA+kBSreE529e4G1n+UHlf9OyFey7ZXfuvotTHpcuhkpBuwFM+65lLWCVI0II5agParQ639GyK1dO7dwMbqYKIUZJ1EYEX9qxNQ/hDWjE6x1CVqgvhwHu2RiIgQ9jHEeauWz2BWZ2g5mRM3AjvlaPniVAYIW/kllnQaEzyhu0b6egIuQxde2K3O1GFb4lT3abbfd8u8UbNry8ay/9MkKizQrwHvztJ7uo6Nr5Jj6va0IyApEDvEy9E4PcyYByfY4cmHPiGIY4UrqoqlseonDJq0DOIAbBlRxuzrEG+7mu892d48hr0jz3kjyFiYgt5GSv0/kvX37+6eU1abZi5ICZBFRWp+l0mc7TzASDlmtsWGZn/5wJWAKIEwY1JXJmKcs8U4bi6d27k/klZZ31cpmfDpkgT8d3f03z2k4xG5ZDezScu/AIGUDlPJAWYK6CxeC4hjsxLGs0X5d2uFlu7ulQImLHlOb6pwCFF4l9miYGlSNNIiby7G6qy4JwXfgtgoaIVLOu0S+ZuUCsqgRp4ejobFditrjeytlOQLSMWo+JWwvgSpcPqX+rVitULfC1wXoUiYbJb2dxGeA9k2KV7/2u2GOnKW7SbHSYpM19aADu+zZE56NcgTTr13Ll/21QihYt23ZqIx3earMdRVZ8dhjLOZeQoWhy+pWTikrZFVr3woc9zq2JwrolNmxmrS0diW4F9Hbg7atdqbrbQv2zNLU4i8ZYwttUMLwJNSkiz/Hw7ulpjAN7ItgY4zAMQuwiWDWrBLirubpZECmCpDSimnPOqsoS3V21GARln4LcXR1qWNIWEhFRIGYW10QcA2EM4RjDOPo4ZAnGyRklbaExg0GAurlqSmaXecrZiuNIUneBbJRihwfc8uYWzx2hbuZln/F3gwYV7D6er+30dfy+iNZmu6oBb0cIPG7/3l2CtscGeC+3ER4Q8z1x1P321YhsP6x+XF2178JY+1UrV9seO5Rib963o9i2D1RfpJ2v7o26fXhvaA8GXlUGHgqWXfi3gvGNZTubj598b9nOCO5PSn17j8a2wPyyUX8XMA96363fqddlKr9lQrRd1Nns1McvmIjKie1R0oNWti+Lv8lxkBhj671eojPmnCVIiSvj7mZQVQb7Gk299murhwlWPwhmLitG1Zzc2H1kcREhCHwI8fkwPh2Glwx3JYoiAlMzC8RF3lZUd2bMG7m+5bIHJIfNRGyft1+1tkE3dw/YYZcMaoWaLqgrxf7ZNriLhzre+nar+La90x2H/7bNOoRuA70bb+19F8K2UwVzUalEsrCUsZPDnATkS54+OEiM4EzzPKtqZJEYxYakEzOTuUGBQOREEBIhsJmGUDwEiYg5ALn6dhGtsNGVnQ1Imucp52xExCEy2JxCgJMAMNCc0+vr6/liHJv83t4klCYDiiiQShW2LM/qYo3dXTMya06WpqzJ3MHEmux0OkENeiMTCgpb36UVq0UzEt1mJbhOBy0fdmTgvrp1Xaea1rWhA6CaQtCdCA48H46+WJjZHcVGdNen47EcHxQjnJ0BIkg9IVwkwoKfEiF3HYmE63la5aWu3CNxv31Ctxb/trR+m4UO2ttlXUerSN9hSFtTtNEaWbRluU4WtGxQVymlnWrQbBmv3RyqjXSz2Emfbek4vH6y3AFrJOnqOru/PO7Rvv4IoTcoW9FQRUCtcK/lDshap/6/HeA9wsCtmKPbY7SuZmvod9hu4Vx+FN+F27ftVy3ApZTEEh1I3bcdtFts79b0ppSF3DiOwzAQka8rQHdX1bKEA0DMQ4iHIQ7DIGaBQoyRHfM8j2MsYqI4jppBs5pZtut6svQygGhN4tM4ylKJBOZc3MPd3YlJmMEugUWGA8kgwzGOhyhDVIa6mhDc1bKZK5jjEErgy8nSDFdNBpCQE0p+2kr/FRUtiu5x/bZ0jTSt7dcvO9C+OQx3RzdH3wtAZdAdweW+uz55MMzdu75E1Lnc1464ScvRse1bkNkySHUTrYgys4q3x2C/pX3sydWaVnHLLGhMkHvyZxekezJqC9Xuqw7IrruO2X8BTrrPf00L95rFdxLztlS0v3GMuxLefQ14/P93pSOJVk13FX5Zae2E6gnFRMiPYgHg1gBj5qen42EYBZnM2jvNS3rrYjth8RJSwGAhhLogpMUX7JqXsoVQVWfLAwtRYFoCiYa4ZEsiTe7GzAx2KJyFb3inDLD0VXe961u+CXjWQH5nbdZKmG3NrcvPVlxXvD2Yu11ivqdBtuqJmlhu3dsK/7a1rfzpxFTX467lhj151fZFjSPrtg7RzvJyW4iIQL4GkCEvuang7g4lF5TYdsRYjqTkcpnP57OZPR8OQ6BXvZiZ50wMD4uNF8CDUYBPq61rZiUr/bIKUC0BaHR9WYaT5pyyZjcO4UmeM3xKWadMFMw9myXT6ZK+vp4vF5QFobsvOelB7iUTIcjcGcTWrgkBNnU3Apk7ubpZZlCS/PpyTvMMIATKSX/++et8vvz88wuOxxWfC26Lsl0xz+7a4riboBXLtwhvNxRuPYaWvmyxQ1o/2GI+zNP5+i2RcMkDSPM8m1nxCV1WBObuHgJHsqRlCAwqycjy7O5spoXmFG5M4EAhhboaXXAIMiLkrOwob9ydiUSYmW0q+cRA7uRGzgy4uwBEYCbigLLoN1e4UeLG4iFAHKTmOVMQ4cjCWo08Z2XJ7uLORASHTuwWQsie1ZLCzcmEwOJlaUcHZmYQzOEKzwxlMehFYMIwsJc7rhQMLJSs5Brzgv5FxEibhMPWYOgEEbkeq66et3VeW27nJSH1sgJdNQKtPVFZ0FeyKGfWzNdzcwCq1hpelU+A/vNW1uC+jOvstlqsCX5z695Wj/iLUF66KMEw1qD/pU1yR0kwWPL8VMCKVrj6Jzc8MF9SOdcWkeJOo6bmlLOtZEhVuxCJNSyx7PEASk5UeLRsoTjsGtt4y5wVLR3SShdlDVlmWSQQuXsuayGA6pFmOQj9ahhjOEYcI424SJqdWClkl1PCrBCIwAJSRBqF/9MzfhhPH0R4gECisDOZm6oex3iIHFxpztHB6jkbU4k5PGRFUiOJwXWavzKIswYlVoJxpqDEFnCypy+eL37yMAd2dWLlQHEQHoTG4MKvkfwIGhhClPMcXAIxIDBOMytxAvvx/U+vp78mmUCzsvDwFDjPRmHdePabo0Jt/ZJolSEol6FvbpusHgpa6bk1hjhc0zCgmRoDETPcmWU9LFV3D7x3t/lWJ9bAEry3idBQBRWPrCvBGBSKWyZ6bCl24adXhedo7tNTIVYCgHpy3sqTlk3odsPrtq8rH9UR1cS+aFabbZ3yv90Gu6oiq4Ld8amtCTaxskZ9vkHjDTwdnIuAXfclhRZTss3/uUSwcMCLA1DZ3iV2whKsbZk7Jmy6zkwFk1ZsCzTmqdEN3aJx4atmxBXSm1JF4k2etOtruzk5qRXqj/qqE8i191KNyzFOCUBePwTZHaqrBndH2Lt3/tvu6u+OAXdob/OVm1cPjvaV3LfFt2gpzx+cqHd4q7DtVm5PqmldqABYXN28JPSxetTWNO5EWP+R6k2PLTA3ZLyWLZ5bWm+XSSXGxahDEs9smRLDgiEYxNhcwEOOPNs8CMecwun1d4fxIKcoFojVbHI9MIcQs3u2JEbwBHcmZjARgxn57ARjnrNPs6VkREEIrGY5uTuF4EzJaTaAhmiRGYkhgeQp6HlKLyeCRvVxsmMmczgpmPwoF4BUfM1FwhzK7mNJsVtOfrA4/rEvF7f6jebt0nGXWtop8Ftnh5bRurzBtfI90WcbV9KWulqS83XDbgt/x3RtBb9Nplcbsb2YCO5eYnU2JLMQ6xZ+ZsYqA7HupBe01F31ziDcZcbysMqren6wfGUCEi63UMmNDUxGBgqKxftIDMERCQx6tWcZ6X/+/PIfbfrhmf31dRiyT7PQO8wSmEYZDyQj2SAaxL8mJiKRYr7qlOdpmuY8Dz6ShDgOIpJMppwdHCVe6OtF54u7xwNTxDxbOmGej0c6Xc4hjBfiP71efsLT6/s4OUmaDGiTmZWSPAmHsqgpgazhcFdzC1FKlsWcFSAz00v6OmVmDmG46JJpMOdgwzt2tgWNpWErRMFcbwBWA+lGh7RuYq3SKfr+OjW3nlBUIiwyZy2ZqK8TTUSAyy0lOwAwMaV5DiEuOzUgIjZ2VQ0tdRK8qtLyfWQ4CzMTlgzm5EuNBUpmIS/uusTEfM0zUerPZQ0KMC9WDjnYpSgSJSfkgikt32FZ7FYlTes2F1MgIrPVaPASp1UBJyInmGUzYwCmEgZ3I3c1Y2JbV8wKhZoTUQYsu5rBHCogENtyULzgn3FzWvig0BXmm1SHFbedaYvrHm2/rV7po2vfF5vsamNZ43K50FojnrAKBTRys/bSCs3rgG83sdqvdmXHFjPd6Lb1t3cs7ynydcj7oBZyb4V1h1t3Xw/9C/stQ2O6YowJ+yE+9vbztsK9ndD6/9UngYiIDuMhCGJgEYK4uylcvVkClUTAmqJ4lHA4BCEGmUgcgwxjEBYYyrYuCmGUm39Yjs5zSuo5e4kATgIKIbg2BuiCJWTzlFPJdlGeMy2hm5iJmQKRCES8KHJzj3EgoiXqsNeb0Q6zOae6sDczEuYgfnvd/I1lS4edSt5SYEdCW0W+VZltC/cMzV06f1Da9nfHcq/ZCk/L/ttxvQWGdmhvEVa7gD1ALzYW2C63bttpq+2KmgfwPKi2Jwr6T9r/b0fRGHDofRPYsX9As77HXfLeyQv/dlr65qxdK3wfef7/ptwb+zcR0nHB9zLjG0tVFh2ctOzG3kDSqad7mQJ3Af5eZkSxC8kN5uxLcAdjNzKY083ZPmAh8DjGEALZjSqE+zAMIYRFtoN8FZJlgboesfjqUGp5nsyyEKk72JLDOQiXBdvSaEVI8SwQUTaA2ZYGH01Wi5lWLD8oWzb/pvBp+7onV7uZeuMcPRAyu3UeyLct+XX6rh2Cbz4vP7qNttpUlLBrTT2Gc1dQ020pD1mIHXArOa7IATcGDL7YsgK2ciJBBIpDAJLlWTU5H+IQI45mlrKCTOFGBg5gBpzYmZmhZiV1Jo8hEiFYmHNyT0SEYVHuOedZTVWzqaqpw2GqBoCDuLtIVKJpTvOUUkoKVi+Hlr0xiftkUDz1lrE3XtZlA1RVG4ZaPSj3pqx9/mC6dyu0sDGjbLisnwDLIqXc+Cvrz/Xwi6joq11L3tcVRAtAWOxLYKE9W0PMrIVL2CDysjUmvppfpQk3EoE5+bIHVfpa/NKRNRbDHQIYIGvCjDW6FZxg5eYl3BkDBCAsMJXGyI1KFCwDFRc5OC+nkWLkcCpHkOYEBZaIPM6gNRqBL+FGmGY4A0xe0vK4u7uCQqFqXnxvXb3ci133qnenDQ2LrlspN7svvi7Yyq55+2pdIi6uobgVlA8UatUK3riOlK2gViuUPzcedP7gdydKHgOzC95W2naUbWuipBZUv91JvWWAa2Chq0pw4Jo65gbaIGJXDuzx2bG9XQ2AHUt0a9a08pfWbeaK6vYHiiNECBJjFAoCYrhpdlZHdjOOgAViIjVX0jwO8ePz4ekoxG5mkWkYBhGBO9HiQuzuqmaOsIYQUMs5ZwNIIjPndb7rcU0JD+CAmqnaNE0ppXKYSat/i4jAl40hERJxBsOzAywCd4XBr+EO3D2ndLlcpqnst1HRByIha3oLqXRzvaW0Vp1XbFcFWfFcS6X/WpPXXMzthH4XbG8pXbPbLjqF3ZZ2FC2bdDK6a6p+2BLbLx5aB9g9zm2hvTdA3JoprZ9/W+2boO6OaLfrtho3L6/SbFNt9WgAAKHgfGXqtR1/uKVRmeBurY6GcZ/2tg9/wTz+GxH293b6WEe8UYNgY5u+/cNvlsdNdX01c7d8W952Owi7SPg1LFn4YxVk62E4X7mGHGRO5kHkOIzH40hElrKull8xDoJcL9q04DmJm7mpgnyNdWGqKSdyODu5m5oSBw4iTAaA3HOBhJlJbpoteKna9t6oG5zsS4BudlrRgcYAuCdgtxZU2w7uzFRtdrdOC9X26tAvo8zW0KqSvza71QL1q44vqLEkW+Drw+5tFcu7UHXatnbR/iglsAEgBUF9SRQPK54ITAAYQuQEdipEkczyPF/mec45jMw0xEhks7lEDkLCTkieCVbcylZJbst2hou62atl03mes2n1hst5xupZ46Zqi/NzGIaUkgFZ/fV0eb1Mmt3Y3K8hYbsf90rL7xUPi0NrE/+yNMVFA96ZxO2kdBq8bb/Yw1vTou7gdKOok7Wh5ys9tJ/UflvXFSIKvMo7wNaYQeA194N5yXuBss/EzK7Z4FRDp4DYUQ7tykrRb8wC8ihYvHWhgLqyw+GQxc1d4YD6KgShsq5Nl5WpwQFfT72tGx7JXC5VGomzu3AJepPViYiLq+DqWEQgcwXUjQAuRy1K5M5uWOo6ULyRPBORXQXZo1IRaqbd7Nb5wz5benXo3+XYzqro8vagEQT1k+3J4ba1FrBWlHS++A/guVdakbd928nTLcN0fzJdPStac7OyWtcLETGg13UjtfghorKTZcCagka2Y7wnOts5ul78WH35WhmK9UafM4SWWMnqULVspLDizE1wcTcgCn94Gn/76VOMJyIQeYgcYgnVhTVE2+rQ4l62utyhqsmcBUOMYNc5q2rZuFKHlg00h4LMkQ2naS4LQufQmstubuZmFCgyOy3HqyvOi2uAMIpXsPtpurxeznNSIiYRV3cDhTs08a3SzlErHzusdpOy/ap9/kDN436op2+S97Z+x1O77eySUwthJ/q3aqN7e4+5dol5C8+DrzqN0mksa+6IdvNVWKM7qt0OtoXqjWXLXF2ztJ5O3OjI2+Gs1GJV0JUWfdFB1/LgHPC21j6oLWBb5D8Y+OM5Wn7cV+3/pmWrZXCL7Qff/oK5/n4Av93sthdf7+RUnqq1mgXAbmLSu5P1K+eieGowmzOT+7I4bDbvSmIIFjpwfD4+jeMAzVNWXm2QqiuLFxk5ioOcY7EfDARXBZXoYq5WuJbKPn/pLIBgDFODw5lKoqJl4GUDWlVVYWK2LgiXPf87ZcUY3fLjdTr2Ku886UyIMuTt7bhdcVHLN+fxQanQdv2+kXQ7/n38VdtX+7yNrdAKGbebh/X/bglRxfU9T5nuGuS1l+LJVw5V3FFyTQFkRmAHnNQh5tkd7hD3IBYEoKyu2S0QXFgiuzAFhpAHwChbCfTCqmopO3mQAIFlt2zDMHiadZ6nlEiYIOQuoLlZj5VwDDAC82XOCOE8pS+vp8uUFMIc2AnW83Kr3dDYfi2ZtaumshrcjVlQly27tOp7djttFoSdy/SWHrak23671ea4JR5ardMW1HLgCSBIIPdyTwNkzgxmZuGpdGzmZgYlB7MDLA52OJb4BOWIjdzZ4eutZkc5PWZmymS8LLEW6LWmQSVzW/bAeN0GcydfNwmIFlcHAGolQ0nJRUK+Hg+QZ2Z2EvcSBIfVHU7kLkQCIhi7k0OKryDUwM7IMDUqlyeNyeBwcTIsR1helhXuO/eVHzBwvUBYl/Lb2yPbU7u2PGi/kmY3nZ3Ftm3q7VKvted27ZiN2O0hrIDtWg/U3IOqMNPmHlT7edtmgdChbU6WFVpHvTNTG19d8PcWAH38w+7PTlK3sFX8LK6nzCU5e31S64hBZDE11CgrzFmpOCM7qcFSgL87DL/58O43H55DuhDRYQgxhnLlhgUxRrdcbgWDjIlpCet8RSAtxu2av1U15zznPGtOTkqcFNl8nueUTVfX8CpBmKjaynAu3ETwZMoOYsJywRXJNKlN0zzPsyowMDO7ubrRt7KQPy5bWbZVYLgzI2jItW1kV+FRE2V3++o7n+9E962Kdktd22Ye8+bu27cw9QPCbj98i2So3VWqRjNSNPy+yORfZ9BvRUfpd1dNLhua20buNF6BvPHht+WuzrXZput77Twewr2x3Ku5bf+NaHzc/uOvtpTwTWhbsbw7ug7sN4L3y0bxNyx064TS3em6h5xfT+1dYWaim9i/i8EgJddfMcrBoBD4OEYB6Qrh+hVTiZq4nFfwkoGbHGbZlZmd2B2Wy/kGEyGbwUw1AeAYBmF31eSTkgCBjZlANWV2L1Tdl9tG95DR4rMqHSJqAzh/L67qpGwVxzf5bpdu/c7qq/twK+vqEL45lt1XbTtbADo1V0o9GOgqL4FSNjdZ2mHW4t86ANjaYKC5GOVkYGcGicMJgdlAqxS1kndC4aL5w7vnHz4+P48juRnUydQpO8wUVuKgiwtDRMB2NlU1X256F4OvREBZ0iabm6sISjKUPOWspgpVTbNOc4YRCZvD1F8u88vrNKsRorsLNUft93HblnoYSLeB37bGBhHx7YZph0m/I0Dqj10YNvK2v2HbUWDbezun7XNar5Ve3VxXYg6Bxd0NpGpOxsQSqPinZcoZy63x9WNjWwOdMpUEFmpq7sSyrAYJcBS/AmamlB3F6bTGciB2XM94fMnKUXyOzQmArnJH2oQ5xf2tnPoZfLm7rAxyN/WyT+zu5ObMLG6BnN2Cm0ADIMTHMYAkgy7ZzskuZgqwBwM7lUNLI5i70kIP0iK9mhHtZKx0u0Q0rQ8ronHn6JmX6C/esndHH1vR0FpmtYt2idgSVnng3lMwM7vbGpS/fOJAvY16MzQAW0eqFZgeti0VtoKydfSrFYpHePuwZYNKrAtOaLlF2gzzinNLWuLGFJqCLyvHFnVogtdrm57Ur9sWWO4ftqy1w+cF7SXGdL2OWw4fljix5OLEtO6ZICgRgfOcyN3yzDYdon9+d/z8fHweGNmHEEJZDELhThSY4UvDTLxu9hrg4BhEzYF5nudcdh8cQEoppTRnTdmURMnm7K/lgrZmcy8R4WwNxjsE4WU9WFbscDdBqSbFT0nhbpqzJrWUszlZOUkkFEdrNX8UqmKvtGSwJZiuWvu7EkD12t3WxO3CoJ27ezuj31u2/bbkvVu55etdvdJJ/Mc9tgzSfk4b2/17h7OFtvuzg7kdddfdg1FsSxViW2B2KpdLf+vLpZYt5ml5UqTcom3Ulup0s59adNNO5t97pp1j95DQoFuDYBf+XfTW+h0NdNTyuOW3l26WH5RdGLZEfo8Nvwntbv23APZdpaPYLZUWgX+r6L37tv65y8j0raXI41KusRjMyNwR3dVNnGmJVuKAErOQB6EYZA38t5wfFq8rIgoideVmBAHcXUHZXAhLsCWoqrpngFVd3bMaOSIrAIK5mXsoR5bOKLYRM8cYQwgSZjEGL07W94TedVxUTJGbJMPbm94dktvZ2X1S/u82+O6JU+xR2lvmqzu3uQdnlYT36HaXPO4JbeBGgXVC/gHY1V7yze4G3c8C0kn17tXSFJTKHsN6IaeYi4GDEmoEjmWNQPTM+sP7448fPwxBXLOROZNqZg4GU005SyI4lOGJigPievUGptlSSpbn03QhDgxyIcHiqXQ9a4HCzCxbVnWwiUuYFV9P08vlYs7ObIbiirU7Lmx0X0XdrhptA6dVbVUiePqe1Gqftx21q8SWKbbg1ZbuzXidvqo63ZdglR2c2MjzSrRhCbdIjjWxBDlgXq4FCjEJMYX6zXrnwh1sCFeycwUJmEAo0U+LCIvLntbN0nz1El6tR13XVLgyADvYr6S/hpvLbgjEJeOGwymbiDuBHApilhJZPzAGppExAAMQORwCD4GfD9HBM/Bymf/19ZxzVmMlYwoOMnIsXqOLYd2ifx35dVtllXHXyWjvvNXnW/XjtyZsPb9qJ6ybuXWmeb2seNMvbqVSneZ2I7xrypvzxisB7RFNR3ndj+3bbtS3qOiBbBFb26kwt3f8iKjkQsUNld8sFdyVQc5UjpHLPdKWSQg3s1AJbnc4LS/tVsC6AhyGYQlrGUK9iExEAU7shGBk5uXaBoE45zmys6Xg+TkOP7x/+ngcxfMgHKPEgYp7JjMPZW9FpEyTqcOdCOxmZsROzOaYc0rzYoxOKaVpmlJS1XJHNymd5unl9Tyrr1zOTlyuDsqawl61SMBCZzEKLQtRR7E83M2L6/hyuWXJbFP2fNz97tHMnVIp5Drj7ew3E9H+2ZE6N0lxvtlXN7PbOt8L//ZhJ21vqWjHnt6lrnsPtwBsGQe3Wu0B9bafP5BRW0ZohRtuWbjl63t4vld267cPuwq7XXTiDsDq91G9yAmAm18DJZUPHXUL6X5Z7sm/bUA9zB2KWoC7cXVjwaLg+6Z2G/w3Lfem9QEM32SrThe8pc3vLQ9g2LJM+6plEOZ9OHdbw/fDb2YGKxEPACxBc0sj7g5jRyAEloEkSlCd3Z1BJMI18t8S879D4xqI22CW3QlqqppSyubny7RqalM3FiEKRJSUEZyJCWZmaV7qX4PvkcFB5ERO3+KcDp+PkfPAlrhn0myNik6VVNLtZNQ9kDqls6X8epestZoeyNt7qNilPXfHBraWVnfaxM3wd2HeDrAFo/3RUvLynAAm2LIFvOyyeckegOVvMqJyXITPEn/37vjD02Egs5w4UnGnGuLoCrOcptnVxI08k6MgkoUB5Gme1sB1ZsZuTnAzMLG5mZqqxEF9BpaY5A4FxAggPqf05TS9XgwhErOrEy0o3c5CSw8delu6qlxfPem6t9vZvP6/7l+3Hd14nLpjtSe7eWkI+9FGw1bm3DOcrE0y39gJwUswhsre5paVBGmay/QLM3ORI1YiN5hZUiUzhpNwuZ+nqhBnlNAvcPcSaP4J670ivSJU3Qcpz7mFVQAM63w4w3zNsmYSGK6uCea+eJk6AJ5dBmeGOTmxsDAzjAP7QfgoOAgf2J8CP41hjCFwBNPFLECniSY29eL47krMzk4GlFWH0Rp1bhetW9nh7hKvc1mr8ZqHp502rAqmPVyqbbYhGdoGaxLByu0LdkvsyNVbtSOpLYGW2ezI0dfgLlsLpitvkXdtzYaRluAf7RFN2xrdFlvsthpI5ip2AXBVTLecI8Ql8afB3cgINQw6NWfOAECC7xlOy/z1d3UPrjNOq685iMjEyd3JndThjsDKZgEqpAP0feSPT8NxJNZ8OA5jHA7DGGOMEkQksjBRCZ9dZq0MlsulXUol20XZTnGCpny5XJYrHg4jBrEqpjm/TnPWYL5cFGlxraqes5RISzSAKLIMIVzyZMWNe9nQYhESJrokEoarWlnkMhE50/fYydepb4VvRwwd8r35CrfC+kH728YffPJdZQt/fd4SakM2/eedKO9U0YN+O47GKmq+i2G37usteW+ft5LHN9ZVLdSYbu1Y3o7/7bTSHTOrmwJejaKKorplVEGqwPiiTG4OAm//2HZo9y4QbofQAdxK/m+OrsXVtv7/98su/v9WgLVGydul8d+20HqMjCZbb3E/uXflGPtGW29nv734clHFy2UHZmKwkDgJPJODmINIMBWmKFRc6YgIviiyIkuSqcDhToEYcJay6+e6mFRkagZYnuf5kvTnr68SB2Y2y6Ek9PYhCl2SWRQPJNCk+fWSXy+X89mLejErAmQNpyd3B0z1DNOvbFj+fISNDb/sMk5Ffj0U7QTUmzD/cAHfbji2omx71aUTdNtyT2ZuRW7XTlvhsfjdNlKBbC8HtpC3sO32vhQOcC/3B1fPZnYCnBYzzZXJS54TAf14jD++O348jCOcHWOIQxTLqnPKCgMhgM0JTjAimuc0DAMTm9k8p8t8AUAiYxizmeacVJlNQnCnnDOG6E45WUrJUlZVIybirLik/HKeLxkylAuEXrxCtm7ND+ikNTCuaFzzhHfTsZ2gDofbT9AQdv28eMxtYaB1Wwe3rIFbGmh1bn3awkNrjJjteMNhGIvfQADJMILKjr8OQSBLtgkiIhIDldwjYRgGZoObmboROxGZiwM5zwaHcAiBQapK64LQyNbxCxGcl0uxUsD2khPOkFMZBtWhurO7TjOASGABwcnMNQN4zwfhGEJwMIQlMgC3eRQ+Dvw88EH8gPwU8eEYn58OxIeU7et8cQtZQzaxc77kJBKTUzJXy0SQ4uQK5TXWfxVhZWKWoDubCAo557L8Kz4MhW5KkA9vSlV7KSUiCiF0omTLq+uf1MqmjuxKCxVaKvuGd0oJ4tQ+oY2J2fVega+oWDKq3wZK7eRa/YTWKKNoLNFONjWaw31lPLPsZiWEFK3O3IGg6kQUYyw7nSJCQIwRRDnn7EpEJMy23FEkX+7urVft4HTNI9exayccS4Uy6SGEEIKZzfNcvgohDMPg683mevQa45CyajaSAJC7M5zBn56H08//6vnyw+en3/3w/BQxuD09j++iDMPAzJZTdgs8qNmcsoCUmUs6yhIzwBRq2RNLzDlP02xOQjHbshjkIPkyXaZMg0zqr9OctEQVJzLJbmxGEDjnnIMsa4l5nmeBDKKKczprwACmIETkyXLWS84nR1lzEi3HKWsumOteaaeY7/nWtxS4VU4dNRJRG/UXty7++0S+sQ+2CntL7S2oO0qxKZWXWyncAU+Nsdse77fbQB26tojqoOoGVXu02yvE3VdbdbXdy2hM4R12rjCjkQmVl9vtpCo5W0R17ewOs9bfaqyuMtGSgGXpsVGdi0uPlc2KBZLlAgCWeza+Bqe9ukhsYHHXNZlqO3x0K8WlgrN77sZ7j34qwndJ8V6ewHZa6c4Gx2OKLeWey3RtthPI7Vy0fXX9dvLzAa91/IKG3d4+li0ktd+W+3Ylz/J200v3eQ0wtMtBRRmJSDHgunZ2YW75rtas3YUQAWeGl/ieIGYmIwaFEEp8fk0XFvzm88f3z082/RRCKMZICQiRUoKEEKDqMA/CIQzlUgPUkpoQVDVPFwZJiMx8Pr/89euLubz/+IHJppyYYZ7HQZxiSsmyMdw8z3Oe5znnRV7FGI3IShqvnGLgqxffuv9VyakwYN178eWmzE3wkoqT7bxvBXhL/yU5MG4FDlZX0jp3W5apcmaXzqtk2G6ctd3VpurNl2IM1Mq7hIrbff8VP1fsmRk1hwStVL/Hv7VOK6jrJ23Xdby1x/qh+00e0bbfbC4kUhb2Vq5ukTMRGGTMYBbPk2U9Hg/vn5//899/+N0P70dPPtkwUJTBVS17yprNmAKTm2dVI1gIwQEHNPs0X17P55RnIuIgl8uFRGKMYRhUNScD0zAMr+pwvlwuBDocDul0BrGRKPH/+unradbx6ZiIkylzcFCMUR+auF20nlY7X6dyFZDU7EGoqpczM5HOGCaiEEIxzMyuicTNrORJ9sZCRuP/XJ80lNYbGO2E1mYborq23PZS8hvX6a4XnULOudyXYykXqwggEJUE0L5GGWVyJiYWYy0atSxteb2yHJmMoExQc1Noudq8sJHTEqnKGYASSXZlLFnhBAS4kTtDNAEgEi9HimVFCgshwJTcWFUYQSgOkZnf68AxLD5sgYYhihCDY/TnKM8HOgYcZXyO/O44HIZodpw0jzMkuJMmS8nU3C55Ih/JiSHMTswlYFO17NsZaqVSWwCEQbr6VRr6LSGujXx7O/9WoV6fVDpoK99Tfl2Fe+WbxkQnu1u9i0Z2bAUKNWYiNpILze07L1ptqZOkCIIQqDnYZDcFloCZZiGE56en4/FY8p2klHPOJf8sUO4Q0XqtiIHiekXAeoO1GX4nuzuElCdl2c/MsnpyVjO6Egwzl2y8IoHA2VxVhUjYR9L59ev7AT98ev93v3n/4/vjuzEcBzlGHscxhECO3MTRJyIrd5+YQYoSfBdmlaWYwYsjfcGPunkmdTNiVTtf0nlOs5a8F1R8jMrcmJkCgZZowHAyg6rzOl18zeBcXfYrmYGJvOz12s3JyhZv2BAnEeGhubzzcM+qw62WfdzONyn836LsMl0VDmgo7QY5G2Oabhefu720HNoRc/fhdo5atbE1aHbn9AHwXdmVV13Z3bbcgnR9zqt16HC6gkpEDDJ4O4rSQ9FxDCKGEDtfDcFledAIrhrdeh3X4n+Rc27PCVew+2ME7MmQLU5qeYyct5RfSd47GP43PrJ70MVbun4srn8lYMAib2qzVby3huO988MOtu7JhjLX39y77bi7wtzBTg4NREMIYwzx2hiXPU53J1o1KSiwVA0FILsJR82zu0cJ0zS9fPn6OmUDg+IpJbtMg/AglByDmXmYLQvcyRieLSXVnC3nZXXnrOSyelTt76K22n8jge6J/7vlFzDIVul4s7CsEGITSQhvEFn3rK92Tlvh2b1qrcH2/2osteihzRpgp/i1ctuL3wYU9esJ7U2D3pRdCN3YgAwiZyIigZegMjFM01ncnseBEZD10zD83W8+/eNv3j8fDwMBZq7QZd+ECHC1ZLNlZ2YCQgiBeDyOKaWUpiknM4NTNnPN6s7mZiUZXImjwGbQDHeEEInIdEFRUrs4pmwX0+ySgAw4J4DLjZlu6gtydhHaEsYVMxsV3M1pi7Q6m7RXtvK2/GgPcnHDTepLYIsKgFMJl9HIq/p/bacdyHY3wddtkUCu64awwxzwkuR0CTZTNllpuXvBzPX+X/mGQeTm7tmUiVBiyriTO7MLwZbU32wMCK0uziXOcQhMRGTmqmowuA+Wl+64GNkAiJyHGNgIrgxEQhSOgUXkcBEmYgGRS+TjMYyjxBAOgcaIp4GeohwCxoBjDCHIPFMMwhIg0XmYdZzSnLPmNKsTe4CURN7sBLOr+VW5uhVw2HB4u+tfataDRNzKl4750fDkPVLDfd2zu2PUUCawkRHYk3T3BM2uPqukT41pWxmsxVjT8g1M9beqltzrBSFlBRhCKOs6Vb1cLlbdYt0APD8/f/r06fnpaRxHVVXNMUYyn6bpZ/0KdzI3GEBuxMxYsnmuA9/MRYeZXeFeF/bFJigbk/VJbcdsyaJjOstycgERIRhbRpqCnn/z/vjvfvfpNx+eniJGoUPkGERo8XljZinmJxaxW5FDjbxQk1JTREx1AY8JhlzSdTJfsr5cpvOUU0U4CTED9UZwiZ4EYjKzlBI7QViY4+LtusQbbh1klkFBSh5ao+t1l46KHhtG31u+2QVuBGi/H7xbvyu+WQLdVcDfgvObbwtC671T936HooN/19pAg9hd1t4Fpq3cSbl24wYPh7+r2Lbw487E7Y53F/j21RUti1ABbmXjOI7WPKTVrcA1tVKLiHgJn3HdVaUKGGDlxK+sCoyIyvl7cWFYUn5uEbJ90s3jr6H/B+XtVPqgZivG8QYCeHu5Rydbldp+8rjBB82+pWw+fLQJUvR59Rar+v0BP7YnSO3/rZvPTbFy4oISVNoIbg5AQF4CNJgL4TDEIUa4iURmcjUFVJXcqreRCJfzJV8NPjNLSXOaTRNpmuZpni85Y6ZIcTy9XM75NB7C8yEcDJMiqJmDyDIBlqxRAGXb0aleGrd1q6WzYq+8tv7/N1+x97KitUm2mPd1LdQ20n6OhuZbw2a3bKXxPSnXVtg1gboWyg/DDngPim/sVaz8ZWtqgVqnJcKO6e5BSI6S0xslmQkzipw0E0aA+3xize+H8MfP7//p97/54b0wA25gMHORycSiZcs+mbItMUoENHAY+ZLmeU7F6ptzMsvMDCmUzELCgUVc3cwsqboSnJlYPRORhMgU8lkzyDgqyEkIDuKy7qjHZBUzfn+OOwNgqXhnUbCgbjPd3XnSDSU0J3i1I99srNQutnS7O33t/3U2K5BVKLUTXSgklCNLAMXaNijICeTl1NKMvKzKQTAChIO5FmW5WKVm5k7uEgQogsmgVm7f5bg6dDkgXFJOuDszMSuzm7qZmhvcQTjCmQjizMJBmAIzMy2xrgKEyQNMyBlOSAMjhBAHiTEMIz898fEpjBFDpMg+CoJgYApMDJTbq4YcWY/BPhwlfRymNCadUjLKfl78CuHLSnyN7VgupJnhVsR300xE6ssV2Hbi6/pwz/TZEUx+u51zS6DfcLXatNa65LXCeom817TT2FY7ZSvrr3BuRXBbp/y54jDjlljL7xgjiidkzqXZ4ntZf7j7MAzjOB6P4yEOT09PzGyqOedpmuZ5iixxPIQh5JzJfDkWLOBdd8IarLKUEEJ7SLvxr2jHpZrrhIYQYoxFO7p7WcG6e0qpeI26O4hyzk7OITLDU/Z5Qn75/cfDP/zw/nefnp4HCrBBaBAWuOqyoRCYl8SsIOZFMdA6Qe4lsLhT2aqzhbeLR+7qIKQURJTzlM5zSubGQhwKI1O5PMkgkAibqa/cnLOREg+BAsyIidRZczl0dTVzA5XzlTXGz4Iov15QviGd7zd/t59Qu/d1W1r525Ii9sTlrhnxoM2/VdkaLrjlkcdmx+7n3VetzNlq9F0R5L6vkNr698BrFU9b7g2k6333eas72x+t69ftABcflOZ/AAhCRqzqZIR1+4aIbF5vpbsToXjUm5lgcR+saKCy53oVxQSBu5sj51mGgdroMjX7CPVof1w6TfHN+o/Lls5/ZTu/uJF7FPtgjB15vBEbtdm/LcM20Dbybf2zck3LIHc+R/28g3MXYLNsJWhzoUYIM5GKiCCwQmEgpsDCbjonEWawIi9UvWiHoTZYnaLd3Y1yTkRkaX59+Qq1KCE5fj7NFI/nbHPOB3cRuWRn8qAmgxAxWbZ10zPEKDfbgksEt3W3cifdzm5Z6/xamr8nz1ulgD3RUZ/Xg7LtnHaNPAaj7WhrJW5xslVbXVP3xnhPkAIo/nqtFG23yba0151EVQxsYS6/B4i7uzmRl1QCABRm+fJ8iNHNXtJB8E+//fC///F3f/jhfcQLlch0YSDmtPhNJi3p7NxgVu7NmlE2JuSk2czU0uVyuVxOFOR4PLKTOTFQfLQdTKrJUl5adLjmnAlyOByIx3/5+rOSuAQHA4SS98DVndsTFVpXa6oKvqGTB3y6qy7rmsg2c7Qs5m5Na18Zalf/dmdI7Xy1gF27vqW0B4TXknop7ZZWKHlL1E1VSwQVEWZiywpeeINBS7onU0KAUwnaxgCxG8gJh8MhhEBc7lClcrDjul5oKZFgnNw9m5shRhEjELG5GBFRCMLMH2LBKhMH5pKchIgAVyYP8EgeYEzO7kz+7kOIMR4OYTzEwyDjgPFAY+QxBkJmL/lVKTCDCEQsHtzNcwj2JDAMl/mgRjnrzxfSi8/uashwLY7RNzlzqLLQVlKUH7IusGv98qO9Y3Brqt6VGrsSpNMrHflSs2OBZcHWt9DRzQPh8gvKtsFW7BIRkbRoaWWouzPzMAzl5K08zDkPw1CWXkOMMQozB+J5nnNKX79+fX19zdPk7h8+fBiGYQixshMxyGF0RQiuN45u7M4HCqwbC6/3kXLOOWdZi5kNw1Cu/6WULpfLNE2qyiLlrh0R5XlKl5cn1h8+PP3T33/+3fvhh2cZyYVDHMYQxIk8OwNh9UeVcuzmPAPkMDcv2R/WY2cP7O7ZTFWTqWet9zQMzswwTilN82xO5mQMI3Z3XncHRYIQG5TWTIOmMDZ3J0jppuyTYjlzobKQvMqwsiHkAN4aZfSbVNfOy42i3Zup3aZ+mY24C9iv5JHHxoo3m3ZdhdbI2DbSPWkx9tj6rHW2NlPb4y72WsAeVP4m5nfb3x1R97sOc31ysx5e4Gn+5DUxzCBBRJSut3xDCGGIWD3PSyN6o2WBknHFqETDKhdGmMt8GRGVLU6iq9/7YwxX/Gwtj3t4eEvpPqnt+/cfyHTE9ispvx3dL2DJe/W3+PylMO6Xpv2OJpf5LQutWvkxAJVfdnV6ZwAU2lmIzRkuAiEi9mUnLzATzFRzmpIlEVmSxKtl18XXwJkY7uXssFyxY7CLCBObCDvmPMM8KavBgsxGr8mM8zDk56yBLAZ+HwKTkzqM3Z2FBsdQRFaDK1q2zsmbPHjU7Dj7dZV1I5F+vemxazK9nR46a2S3tQcssLXoHtd/DHZnjWxB2hp+O+O5w7/1+TeNwI4g2/8Hogx3gFCOhx1k7JnFyaZg8/sn/NOPn//Pf//Hf/rdx+eI0aWc4zjznO2SpjkbuYqIUspafLiW9GdZKOUppVRWEJd0cffD4WAKc49MSwAbEiYqR9SmpBlCQZgYJuIxjBfj0/k8zTm7G1G5NGieyZR5IOBqIq6l7Ok3dHt3KqlkTb+dyjopIoLbA6GrNU43n+xO3T0Cvppat3YCrUuSe4LFlsRyvSl1T7mHZWPJMpZTO2bhcqeCQcUSLGqwdMDqVCMim8MswACK7hGIHEiExpEBd1XVC+Vyr0nd3D2ZpuTq4KxEKA5ygSXGMIYxhHC0jGWXlcvpQ1mWUVkBugXXQTAKgnBkef9hiFGOx/F4iEE8sAZJYyyKvqBQ4ORg4gAiAZOrOQVVFz0O/uEo0zxcJjWjOasqa13S800C62ph7NpnpajnrVcnVYeltdTD+jaMdUsHu/s63WS307xl7Af2HG4FQfvVY1m2JdM2rRBul8G79esO61aGMvM4jsfj8enpqR4Yvry81PPVy+Xy8jLP8+xZ//rXL0EKYjmI3LS2xq0hh5eboLduXUUtEVEJ4v4W5VEFR2ENM5umqeysPD09DcNAROWEcI2Ccw3iKmFgZtU0zecA/e3nd//+d59+/2H4POi7iKGkdYqDMqesh3GsVxNLHlV3QzmmMC3ArgCX3ZZyGX1ZIuZ1TZjKThzCrPk8TUnNiNW9xIVzd0PJ2wlnByAi5Uy0XLUkkrJ0Lws/BhELhNy0GinMRkRQd3Jyqpe4vkk27cO32AP3aBXfsgiJaNvBN7X11kR7A4w7vXSKfH3YMwvfRjLY9ltpr4NwdyHR3kfvWKz9qtVhLcPusu0uYC14tbVuvF1Tu5jvpnVrJ90zbnaNsKqtOwlD5ksyXABrgBkiGkIsRJvS4kpaZ2hpsITOci2XdYkIy61jK/fbAYVfM2Huenzsjv1BhY4Cv7f8Mortyq8BAPe3MDrYHpBHN7+74HUk/WsAvtdF+2ftrrUmd7/dcuie6uy5pr6i6prkzM4MVlUFjI3FyQ1qOs+W5ku6iIhZzpqg5q6DxrU7IiJfnDwJ8KIXkBPcn54OgH19vSQP49P7//Xlcsk2ZRjrOOsl2yicy1WCwGBiDkYWWYK55CW+xXVoa8jq4qLyQCC02PCy5/KLyi4htbKufXVPwuBWpBDdROzrWngAzLb9XeLfwllfbZcQlbq6P7efd011wDwW6a0iaOFs/29LLLkAijMzctkAFrZIwDQdRvynP/7h//p3f//vPn/8yC564WAI7qDZdDJNRAjs4KQ2Z53STEZwUiU4U8gJl/Pl9XQ6qWqM4enpCUxJ8xhGEZEwCAtzOVTIZjCDO4nEobiGqc45fX2dX86Xc56zwaXkyHCCl5CCNeoaVrNzwS0BGznMt+H6V018na+bpsrLO/TmezRwTztvhU/5v0YfrZCX/0vQDdxwFrn71gAqIyj7WV0vzBwqQMwsoeynOszLEc3iMm5LokAmKkFmlgNDZHYIQcg9ZXcnD8MYjsMwjrGY6WqXrJrzcg85ZZvnubgCFikVKEQJZfuWiEgCUBLxOEBGTgR2ChLFTNhGDqNgCDwIMfMwaox8ONjhYJEMlgK7kAoPzIEg7qJGbmIgOEc2ArGbuBn7IDgOeBrkeeTXyYfJZ4igWMsER6AFy5UC6pbzln8AkN/Ed2qDcLbqpPVS2JViLVFuH+JW7mytopbIHpNg18s3FXBLVZVCOqna0tnNtTNA9RqFr4Wn0FvO+Xw+z/Ps6yFY+Xy5sWBLCC92vHt3hLvqEnuldCoixN4Wcy8xsW/Hte/Wsp2LTjgSkZnWcU3TVCAPIbx7967CXBCynhzmIGzwnPMhht99/vxPf/j0dx+HT5KeaDqIDwIWGBRGzDzEw8rGleScl4OOFWMwdxBJURRXBmZ293J0mdVKCOaU0nmazBwkWTU3LrstLfEanMNLItErokxK7CiHCAHI4mImQkJ1j7wQM5UVeEcAVZ/tEtjOiq2ptp2jjtTrj85FZ/vjnp6+1/suZ+3VvNvI/U/63ZNWGnS9b8Hu8Fl1zy4ecEvVHWzdvHRtdtB2H94baS8P7xhGbS+d/usa3xV0D+BpNetSx9yWfJnqrmZZNZVsbICRozys34YgIlJilcW4eCuoLqBOl/T6+roavmvvvu7RPlxvY0PYu/f78f2lw0CDoG/X3xZalyvdKN7ezr1536VGbCi8Pvwmw/otwr+3bNq/uyxvO+oY542a9AGR1yIiEAIvgf/IaM27y2YZDCGG2zzPl8tlPowDJXc3y6puOZnlYQo55xACRIjImapimuc5Z7N5ZsuR+Xg4JMV00emS/vrzl9N5ShCb/TyllM0OwUAppcIJHgKTGQvNubWGzcyJ6y7KZkG13b/b39r7xaVjqK3ioGYlv0V+O6FbY6kBe7/sKhq/vUqDDftvW6A7+eK7rulb2x90X3W2AOwCdo8+WzaUnARwgsEVJbGJsyAyfXz3/B9+++m//rt/+H/98OGD6yFPA/wCImgCZjUFI0S4q+rLy8/T+WJzIgiMp0uak5v5bK/FhgGZhCAxiERVBwlLFBFaEpLTlPQyz+6s2Z1MRVPSOWfzy88v5/N0mbM5sRMTg9yIlgVhh/br2G8neUskVzsQN7TUCsnqftkibXVY2zlJottJaYmhbbydi+5HKbwXH+jawaZ0kBR7lYhCIDY3LC5qRESqWVUPh0NJJLhaGIvnJOolSCt5fUEMAgKTuC2OwEEjjQEM9iGMqqoSspvBVXUKnFJ6OjybmauVbVcGkbmpZqFlFGXFVW6uEqLwwHSMfIg8CgIRI8Pc+EJMxBykhGdWIQqRhlAytQa3mLLnzOooPnQgCQgKIQILHcfheLAYchATkQjKzmQCz1jzRmCVfVhvfVgTXr/6YRPRYTws50IihbJLCIKSXqJOXmu30d5WYusFfvv2Zl63jL0rQVo5sv3ku8oNY6ylNlhLqxjK/9u9N2oW2CmlkjqiQlhOyVpEVSZhv0ELrSGbl2SMUHcnK+0QEYUQ5nUHZXcsHUi7pYxrmuZ6DFh8z+Z5LmvCeZ6r71mdRDN295STqv7w+cM//bs//t2nQ0xf3z0fjoYBs8DN1bIjcIwj1R1oXxJMiTCXqIblZM8zmkkUkfIrBM95ocycs5U0ECjrQ4WDhT2bFWekskPBTnYVIuZu5IbCxjAzdSvr67IBLIDRcmFxcWotWPErijpMPjCPHpcHFt6uJbGrRCtpbt9+Lzy/vrTM3iqALdIqeJ36b79FM5AHinzbe9d+i40Onnu2y70nnRy4h4F7pZNg9yine1s+oo3N18qossFRtuHKLd/ysLz1ZrOvOKsPw3A4HMZx5Bjev38+Ho/DMABQ9cvl8pd//YmZX19PWKh0NQJsx5rsJOQ9Od8O9heL5V104Ttb881yupLKL4CqgeJNOxTYoOhe+o3uROXXwLZbGjx0qNhRoPdmrSOD9kfredQWZvblPsx6ucvhJUibkRcLRjGnyzzFeZ4xrr2UWwNpnmNIKYUQolzNSjNLKU3TFMaje8yXeU7zOAzv37//6/TT//iX//nzq11mh5gZ5qzZlwONpDmaDEGIArFnEJGuOm6R0ubGDSpuRdbOxseWW3992VJsKzOpWX929OxN2aWob1L+VrDjlsh9b7XZCuSONmizWt4i6h7q2rXKdggtZlr50w2EmnRiLWAA2BQc1nWnazlWIh8C/9Mf/+H//I//+I/vj+N8oTQfmN4NYYIRMzmVe2YGzCmdTqeff/7ZchanQxRzn1L++evrNM1zenn//t3x+YnIY4zVO6wY0mbmSywZv1wu5/PZ9CmlBE8wPp/P2dyI53mep5yzKgTs7iVpirlZycLW3Zz0ZkG4i9h21tx3Qp1XLNUFYT14tNtb4p3W3jbU0kDtvRJtjWJYH9azvm/K7fZthwFeI1YEpbmkAMzqWRdHhZJXp4xNqASddxSfGVcQwdgIjqDkSmBypBwDHchAiaeJ6Hw8jIcY5Dh4zmRpJHVLlvJzUCFBPmmiXC4XahH9FEI4WyIiYnKfC4cxiBnP4RgCRxYhEg4xsMjBoRIQQhh5FBrHIQicyYnIIaDAxEZGyIRpYGLm1/ysKcfh8Pz+aZpf8+trdP80hvMhpYteXs86q1MUkQvCrBlcVhYOLPlXHVA3Kcc4DlIQkRCHEAKzeyh2uQgTgRkiMYQwDNZNZJ3OexNWz5qKib+wqObnD+/KAXFZJxCVbTxZ7vg6r8TG7q6aCq0wE0BmiwtJob3ypJIOEXnj6Vxp18wqc9oauLb8bvMr1sr1f1oXzI2L9tVC8mWh6IX5vVkHLpoMSvm6U6LmbgRQdg+mIQSzFENgkFs6RA5k5pScICLjqHNWA5NkJeLB3cstWXflxRzcN7KX27PMhV8LPGY2ax4iAV7OKgtg5a7g4XCAOSwTeRAR4BDZJPyF2NJpOP/8h4/yf/3x8x/ff33ir0/vBzPjwwHhXdZk6RJNB5kGnhP/IIHd3VTdNbJIcHI9jGTG2dQz1JFL8AD4WXMRkDZbnrLP8Bxdg4Nd5HVOX6d5pmHCnHSiITzPpFrcLBwEQyZRDuHIUdyRc87JHH6IwiOCjDKmbAoTAuCAD+QhMOVskSywpvkE8zAmZ8uXAy+qohWrD9cB+495TURZqaWs+bEu8+pKbylF7NL1bbGlqpRspX/dzaFmxbUVqdio0q0ZQcTANWpUQ0g3ecbqUN0V8JX8b46UOwysn/cuiBUbu/U53HguLF9RDcZZ1+7LUqFtp4OhRU4HQ4clv90Rb/HA7ZFL+wktT7Q+cANAa5qmelkXzbFna5qsvazt+5rXBxA4lInKHqWamxuF4JFJnYqLEbMDPE2JGUOIPIzH4/H9h+d3796VFWCReMdhLPeB53m+nKcvLy/pPB3HkdyTFrHsKKtKgpc7GC0W/Yry9vprJeB7C4PdGal461B6Mx0VH4Ue+pgo+xsirWWAW2LbwnALD/VHnLW7VWcuQJo7nEtG0zVjW7sR3qqhLSd2f7ZfrQpuyYvbsmrlymr49oyMjv4r+uq8NeMjlE26Difd2c6NHl93pVbJVFpWWa2qYuP6ciLoNjzBXeCSATLA3DVHh2aJEFO72OyGMPx38z99ff0veD4MIZIGBgejPKeUTi8vAokcL+cLwZ6PT/PLl8vLy+FwuKiRm7OQvLvY8NOE/3ma/0dKf4J+Hc2gI+eDkLsfgwzmkV8FbDSaRHLXdKE8He3rE53GgCQ2s2QHyMU06pz5uAq6BWUbnFyFYYvFjiw7udTivPp7V6lb9twrMVTjpFrnaKRZR0K45bKtAd3t3X+z2lY+t0Nr2+n6rWR/Q9iO5R8A8xJmFnZNt9N+uzvG0sswDK3L1ZVfyi0PWvz+FpvI4G5Likhh5nJ/B2bG9s7AmWkOyJSJ9Z3YB+h//f2P//Xzh39PeH86D4wwHJz5JYhOM4GneTqfXg0UxwFzOn99IY4KOc/28ylrouni8zxYDmMcYYFsOD6PxycZBpKQBQa2KSW1kRBNZZ78PFFOR/rpT0/D8GL4v1/PZz7m8fhl0n/N/hVnDybqrhlSfBrZKECvuwOtpVqvdK2ktoSW9BLkb73otGCelrh93fJvibR7S8ksYGEzYy5GePFMqcQQOtFXqbdSV9tFSy1oZGB7Qkjr8QkRlYOBpeXifWmmZsWmrRl6s5dQgjl09Fd/FHlNzQkY1h07J8C5bA8VwJhEPZlBFQpP4oOWa108TZNbJrIYWcIACaYKg5pDiCBk5ELuSiW7N5ZFzjiODOScA9PhcDgMIQSJgZlZCHFYkqqzsIgMQ4wSCpKYy8qojEXLJbdqZBzHOGHmesdxKTgcDodDHkfjlFyhqk4kEgtBdIVbTenu7uaWcwZz8iUlfZFZ5XeMUW7vudX/x3HsJEspxW2yzmXNJvTh0/vhOF4uFxE5Ho+FoE+nE4BsllLSvMxaWRAy38jTSl7tDtBtR7GFxNv4JWWkt0d/dcHZyZoSDKYs8+rylZtD81seuGruUpbdIKjQ1au5CKvShWq+HsyCYozjOA7DkFIqQT6LZVBuxAKcux0vv9m26eSyE1pQK+SGEmOLCiKA9TjO7Hw+Px2OcRhUk7uX03Z3Fzcmevfh3e9/+/Th/XMMLOwEC5HJYSmbOYEh5LBsIFmvmzLcWUDl9v56CrGuY1fnnCVGl1pbHJaSzuwp58uc05zVYSA3t8LmshhPqlqYhZnJHEskgxVRizRxpmrOGzOIWTIxFrdYKUGBIQbA8pZfHpStAute+catsf2k0nNNid5Wbuu0hNdW2wVm9+09mDvOwq2+7yq3ddpqv760HbVdV0bb/arjgntM0SLzF4K31387DYvau7NJ/00s3QC829HSyjUBdLEjP3z48MMPP/z444/DMIyHGGMs6q+Uf/mXfzmdTqfTaZrSPM9TSmYgYVNfZR3RsiR2gNm0xFtEIzArBeLNCHw7Vdyb38ct3Hu7Be8x/bwRvFpsjX3fNuh3tmNanuoKbTZ6vguY7XRse7/XL/Z47XGn/VebL67ct0YRxxqmqAT0c3c4SmIfOFR1nrOZnWIAjENZli/bKEm93L0vXc0ym647vbq4LzGF2XC5XF5eXr5+/erDcYVlvXRh5DBjU1XkbEzkbqr10gRuNmV61+dfNkEdQnArxB601gquTna1/3dKoWv815RfKcZtzQ9+j+a7561y3CrK7sPafjsjy28vm1YEGIiWP4lKEHIi4hJbudRyz4RkiUzCEAOYYR+H8Q/P7//+tz/+8P7jYRzFMsGckeFuGsehHBsMcZxyOp+ml9N5miaHVMBUXS0BzpHjIMMYh8MwjmOMxIFYvBB3Np/TbJY0S04+XTRnQ0oGzyj5C3ia0teX17/8/NKn64CY53IKgL29zu7JloS2oomai2C7H2Ld9MUdFeYrU3bC0G+jjNZZpnWhuNtUS9JbK6gdDjULzkob5VVov78qzXU90z0xs8Al6sTSXf1oGA5MCrJkmSYWmxlk6sNIIBsCBZYhMMQsK9RnNSd2YSKhkoeQDIQxjAAC89NxZOY0zRLo/dOTmY5DCIFp2dIzARNDhEvKOpBBHYzizkZEZOoGNMtaInLP8OymquaWiSjGUSmEDBloGIZhoHNSUzeYG5NjV1pcJ5jg7bGYYrsgrDtY7fyVH/M8704brzcVyxlxXZU9Pz+PT4ditRwOh+onmVLK81wOeIASY10qqWytvVoqAXRPdkaK60gr86hqm5ep6vtioJeFmYiUILS+ZmioS5dKXy1TVZYwV24S3Fdg3NdAR+XWnOpyLFzSrKvWgC7lkwLyjW7oY8dRO1JqAhC3PMbMkaMZVJXdza4IKbsYIjJNZ1MNh0PZBTgOObr84dO7f/jDpw9P4RD8GCCMECSrp2wAlchXiuzmbMbMQuzMUADmarqGur0CbKVf0pzNTFPWZJpTzjmbJsWcNQnmeb7M0yWZAhAyJ3ciATMTuKRIAUCQMvIyFSBwnWsuSe/ZTU0zuYNZRMwmd2eHUAnjZTVI6W659+qeMm6FIG6lbddUqda5lt1Tq7jlgm2P22od23Z12mre7OTR2xZR9XPb5ETecuLuh60ac1+ce+99tS1e90TvjPcx/L+mlB4D0W1gz537vR2Ej9skImpy87SFmVPSerxQOPd4PP74449//OMfp2k6nV/+/Oc/v76+vr6+TtOUUtK57i6RLuKLPEOz6bLLK7he+aDn48FWv4Mii8qGUVEEvtnReIyct5DQ7of1x/YkuavWlW/aE28sfhvP4EpIvvS+5aPdvjoDaMuh90i0Y9UOk7Sx/7Yk930DvlPuzPiONKsoIZjB4aj+lkRUwrQTCUsJ7mhzzmY8zfM4RiNks2DmIJ2TffkSQuQYRQSqrplhcKibGcFInNT0cslfX16+vL6ckmpQA9lizKwKGkZuZpnMFgedss0KEgGv8bFKcLYifDoc/nrR8c2JaE3zbja3cqNXoxuH0rbZNwLwNym2xpXYhbwj8grwtkJ9SLfXgHHLI1feUb8+q6GBVhOfwDB3hhsVcyeJZzdhGjR5np6Y/uG3n//LP/7+Hz8///B0GNhd1QI7czb1xYdCs5oRsvrr+XK6zOYw83nKl8s8XfI8Z1fEOAyDPB2P4yEej2M8RgoONl3Dqufs8zTnjKSUZpumlGY95KxqE0lmmS19nfOXr6/n8xkcASYyEAAGbtbMHe/jljA6a7ObqRal7VFZKd0TAOVEqpjxa+NcPGOIFieme7SHDfnZneVle06zLR21VJe9rr67h61Uag36ij6/rnm8hGVb4r4QFUMwSIARPJtyNp1BIhnOY4whxBiEGe5aHBaZKIZgxduKAgDKpJYAxChCg4gchpEFQSAgFn86jsMQhijuXm6IljzmKWu5OF3WvMIsTCEES3N19SQw1mMQzTMhw7Vk8YYwU2RE82QOI+Y4BHKZZ8+eNUema1TVNcnUjX5aIysuqpep/bMuC8vCbzvH91QvEZXVYFluVQ/e8/kcxkhEZvby8jJNE6372dM8z/Os2QFWVebAzO6KDYm3AqWVQYW0usqdTKmlhdxvTeH2VPl4PH78+PH9+/cppZ9//rlIvXKwhmbjqutoIUW6WUXfDKFxe3Cg3PwREb2xAuHLWnoH8y0DVy5Y2qYry5U6y6kdkbATwdz5VnDHGM3sPOdpmjTn4rYrIh8O4RDD73/z6dP7Y6R5FHoexTRHZjaQsJdtXYc6u0McJTsi2bL1inJ3b50LGBstuS5V3ZXcUE8I1U1hBk/ISXHOeUqqBpcAHsnBXFy3aoI1YZJbbwSmErZGFUt4pMDl4ohndgQikAujZEoMbKwOc6d+SfOWwv3O8s1M3TDanVfd8y3BdMryLTZKO/u7rXVgbGXrtrylQjcoIrr3Ucsvu1q/ZfNvQrXl5c4E+WZp22mBIdD2bLA+WXC418IunruySxVUhHLzvC1lbw6rphMRVX99ff1v/+2/ldOS0/ml3Gde9k0XD5HiflP0oBuc1hABzIHXe+YAYozWbBFWMJjZvgel3YS2NIzb+dr9/O2cuLUJtt9+k4rutbzLpFcxu7lbtYWq0wgtBirB3KPVe23iltjuSZLdQpuVZPtwFwkd5Pc6Yi7etSXfj/naMACg+FGDiMiFGOQMAE6v03Q8HlMU9yK6OamllIbBzpc5SlBLOaUSgkHMJUSDqmpOfrnkeZ4diILJzFhAZO6qbupk5KBiZa3Oq2ZmJatcjIE5w51WP8bimXIbz/tNy7lvvm21wFZe3avZnt5s+2p9lVtoH7tw/9uVqoJ7U6Shn/q7rVM+3w6k/n78qoRkp3LzgVBdnwmA85LWMi95pdx9isoOuNs5xZR+8/75P3789L///vejTSPMU8qaSAYnT+7JfDqd3D2ZznM+X+bzZU7JDJxmfb1MLy8v83kCMA7Hw1Genw+H49MwDONhCJHcVS1nN8+WUlZDSpazpdmmuRx/mIBn9YvPryRfVb9Oecoa4pByGZgQGYoJ7zt+3eVJt4dbkY+9fSXaWzrukvFS0xlkfiujiISui4mdeWzhaWHuNo53CaB92Hr0e7Mebj3gWpkWuqbru7pd0Skhg91s8q2+qGlOBCsXvxhiRI4IjqoeowgJu2rObsoOsAzDoNmXqDYEFpQ4pHA9jLFcx4ss43EkN9UUJMRAQyCUkK9wJmXyIBQDRyZzJ4eUzBlu+faInJ0ExKAYyJUclB1uZMpqntRIgtqcsqk5s7BEymWf2QCQA7enRvUobAk1W02cW8au9m4rqjqi2c5i7aIGmS0GCjNfLpd4Gaq1USqU5WKpQIHcr25RWzrr/q9vWxKsf9Yn1gQWw23x1Ym/2x0Zx/FwODw/P5cMgV++fGkP7ngNGGNmReHVxptFAl0vPS3Q3jAnMwcRJ35+Pn78+HEYhtNfv1TPVXfzsrlkoNuFx5a2O8yU0IIVaXVB6JZ8pf+SpqV4TcYY53lWS2XL4zxPAA6Hw9PBPzw9fXgaRxF2j0FGkSlNoeyFiBgFd0pq5ELCITAzyRLEqSwWKay5SQxeUsMTERmZKYxdy0Vcc3cjczZlh/A052mesxlYWEbnSE6E5Dnn7DKQiEAkCHMZoCmrsCz7HotxAiMCC0xLLJ/idUGHw0FnHR2jYYBnNXMqiOyIpFPVby9bQXzPomr15a8vnSag+9bery+VDn9lC+2f9XenaXY/p6bstrlVM98s7XTseoq2HRVbxNe0ZrvY3u1687AfONHVlaXWr3KmeC7EGFNK//Iv//L169etNWBmUaKZzasLukjkIIH5MB7bE0Kse1vTNJWsRXVJedWne1G17k381hRAQ5Nb4uyb/RbTPaCZbeVONaDhOzygq73n7XU7a+6LtlcYKlQtEm6tnH2W9PuLrg6fHRofjH3bxbbyN/m3wrxCfu23VT2Fjqiktl6DFJRbeJVuy7KriFol+nqeD4cLy+HIFEOk6GLmlqfkL6fLYRhcswemcRhCAItQEObZs+rs7mGIw/EQk2syJyGwG5m6l3RvkBAcAhAtx4aazAzkx2EMnMkIWBLkkpo5ys3RXyDt7+G5xVJH2C0XVG+j7pPtFOB23jtQu5bfOLO4JbNdOnxceGOfVDPmgeJ7e9kq0EUurX+ggb/kH17yrGLZPi6rxJnz4IQ5R9U/PD395x9//A+fPn4muCVJUJrNPcHc6ZI1W4n5QGm2ry+nl9dLNmTznOw0za+vr+fzmYjeHZ8+fHh+//7d4XCQeAghSBQQuSMlywozOl3UFDnnS0pp1pyzO5HQnHw2mwwnS18u6cvlMoM5DApf7kBCyg5eGWjrMdHw3Y0SbH+3ViLurI/a1rYunUTkvnP2uy7VbrregoeGqB7P7+6Mt8fO3pTtwqTUD92Ks4W74oLWY0MArnnNLV4kO9Vr0w6GgxDUNSvNGUQaPAlxECJxNxCW46MQAsHZWUSci23tDtXMYwjM7FBiHIYBRGnKarOrWwgMgqsDrlkVXK5fFX9RuLtbmmciK7F3gHrdEeUmv81MhUCoXDVV5zm7IswZ55STknEo53MDRDAtW0aOcqd8CYdQsdzMHxH5Hq+2E9CVbiO5lnLncJ7neZ4LlZS9unIrL+ccY3x+fn737l3O+fX1NeesxfOYuFz/Xb/q98lwK8VaMgVAdD0KbmnFr155hSScCO5Wj8LrgrCTZSUkA4Byta+4Vrb91p28XSPmnuDrTrDLqiznfDqdlgyBdaQ3ezA33LuVBWgrrfihZlWMEgZpwQmIKAxxjEMJkWqeI7Mw55xKXNlIrk+DqqpJiIMImRnM2dSdhdmZsyI7SkuB0pKa0sjduFBdZUC31TdgYU+dVXUJ0DrrnNRS1mygMFzO0+s0ZxCF4BQcwUmApO7lPE8k0hKBCSEEyyYCXrIJFnnK7rnMNbGLELOEwBAaKUwZjByIAyk54MrONZnPW6TYgwp1gto6dH9tZrexvDobd6vgt8TW0UBbuW5etJDUBtvf3be79keHmSqjWzgfjLRrvxvp9+Yhb3cKsWG3reX0zQl9Y9m2I8sFln1D/1c2jhVXZQWoqvOcDodyQqjn87n6/C8n7Uuxw2Fcwq+FwFzCoEvZSfdy8cBU19Q47u4pK26shPWqghcq+ubMbkfR2RzY0O1u5e/Fm98aOl37v6x0pkVRkEVGlFAZrgaQuzGus79qHcDhey7Q3bjuVfg1kN8r25bf3teVtW+f1N9EIK5BcYDFz628XeI9L2iBL2mIJGRINlgQZ7iJE4OiejqdLqp+GIKBQYwQwSGlJJByoEfC4zjGOKm/GhxMIMrZsptb2a4koFxpMYCrByA7hmEQeWUtbnnFfRTUyLHtwH996SgTDSXYbZiZFZ83d6UeN4VGqlNj/b5FhW3p4ZeVq1V5uxP9QDe1wLdCoJUV289r/YYWy5cMkF9TXjWIIiMi8sSOg9DffXr/f/z4h//jH/7w+6fRvvw0DoRg5nmGsYomvUzLtSyDny7Tl9PlcplBUdUvc/7zn/41Z5eA9++fPn/++P7D83GIIiQxEBFKtHN3d2RFzjZd8pS0BPdSNyIKIbDI+WzJcXFckp6m+TKniYSsjGJFKYjIiURqiuo9lOLWLKxj76q1H1bk1A9LzM/2yUJFJZNd69dWsPqQWjry8+ZqTAtDB39b/5690Y6ufmVmYdt6KdeIHbfjL3+WQB1lOL7cNWIBAIapO5I6ppyycZqYWYR4pCAhMDFKYAsC3KHFEhAhYgEojkcADiWnwEQwJgqjEHsMFEUkEPuymFZVtaRKDLiX5PVmRt7khFgPvpftYdPsrsRCLiA3IBvNWf/y5fTldJlTNhqIiqHugRx54msovJtS0LqkffUrDqs06SZm+y3uuMx1k1dP0rykQT+OIjKO47t37wCUFaOqLtvYZsVZtEjCrUDcNewqWbfU09JTvYSJW5aoW7y4XQq6ezG8ajKJEELxq2yZrTa+CyRw18AtUZ6KCVc243POZfG5xEPixXG8EELbDBHxeuGtJDltuRfAdRHZ+DVdV7lrGCh3OCGEEMchTbMTeEmLjRhjICai8zR/eX3910Gehw+fn54cec7pMB7hRqpuRkJsJEYxhiEGcWNmuDmzmxUH9Epo5fpiKTnnrBkmsDVfqLvBlaEOEzrN6WWaZxNIMKWUnXi5MkBEAC9ZJSgU1bMihMlRrhcuuxVkS5qJIUQJIQRzNjJiZzjDhVngrJ5hCNgt99TkPVuhJcgtN20babmvrdnd3cUt4XUt1Bmvb1sBuDsEb5ZwW/C2vTS7DVcuq+uETpfgob7o1H+dvhYP99DegrTFxrZx7Amxrv727Tc/QTPRyxC8l0UdPLjFD/YQfm/G3R1YYikRgVZX/LL2c9cqbEMI4zgy8zSlEIZxHEWkeIOYWU465cWt1Ay6cp+7jyW22a01tvy+h4g7pVMo9962A3+A7Xuf+2YnomKsGwLdxr67t8V5/eoOtLSauWjm6x7k93p5TFrbOlsabhlkyywP5FKHqMfAdLRaH27lRkGz0GJsdEzpqy4TYY7MS7YvxEFcghI5C5iyz1k9EDv4MiWQHA6HMAxgMUc291khyDnnObEcjsfD4XBZAVgczMxAJMxBWNynYlPfis2StQwN5Msyd0/ivWmyutJ6DOGWIHdLK6ZKaVmv1qlQdQ3Wb7f9fhfYf6vSKj5q7Cvc4fQq/DvIHyBh+aplL2elxc+neBDjGlBAmcoD/TAMf3z+/L99/vG//PZ3v39+Ovic8qSBYDRZmt1NT2mm6ZJhdNKv8zxfLmmeclZSTVPKKek8ewh49+75hx8+ff7h/fF4YIG7DhGmrprVkZPNc7pc5nmeL+d8SfM0Teq5HD2oppznZIfkeEnz18t0mucMclBSY4kAAHYvJs8SKcfRJ2SvSK4/0NDbPZmwvcu3/mAsMaRvtBIzXxeoTcwYwtXC3CWDbo+j0kY7m1thUt+2XipbMmgZs1QL7R0q3PJMHXCBoGydBvbVVje38hwALGUXESZ2IrCDDESQrDQnH5INIUgMMYprmufZxGwJrJQZgdgFBNCH5+fL5ZKzEpMw1GYhjoNM53MWRHWHuJRNDNWcUJwUyIQhEuIg5CgX7oiIKXiRpc5wJkiMEVwuq5ICc9bXi3496b/8+cuXl3lWyBglxjllBgFs6yQb1stdDrQCpVgw150HL8hzv67E3Ptj2OtffrNga2Z6kf5EbEaqWnKBhhAOa8ASrM6i5eSQX15yzm40DCGEYIvjibbzWDsq51ct9bd16u/WC7REOq1v65lJJdmaG6MIppINqZSySGvDQ9czTF5D37YibIGE4HqDlhZRqsq0BMasly2LbS0ixGwG1f7DZVzXPLCdAbf0FcNYKX8ZL5mTCubiUGFmhmVXMoQwnS8hBAJpmgEcxmEYhkD8cpm+nPMgpx8+PKuMZ4OZfnr/bnr5knOCGQcQSYREskPxD2UGnIisnDV4CRqk7q5uuRw+pjRrVtUYDyKC7M4UjTUnFrDionTJepphDApSxCsHMjcncoYXP2RH4GUeyxSUfWY1JygzK0p+QjCzEBXCM0dUDpSFOAoG90E8u/0C7fng5LxSpt+6zu8q6a083SrOzrzYftIJRzRicBfI1typv98CYW3cmz2U3S7uFW9K2wXfglErfNNDoZP8bcv+tjVh/XyHWzcfdWBXHPqdcO1dm/emuwMYm0kv4aDdvfiEz/NcWPh0OsUYh2EYx+u9eiJ6eve+7D2VGDOmbmbJdA07vs5+XYeDqElmU/VxjPHR4v5haQWUbYIP3UPIN9vE7WqwQ+CWfX4B2G2DtYty3Fre7sY2eGPxPTPoe5vajn0L/N+kPJ61Vj8Lkfui92pFhQuWsAKDhFD2MZ3d5uSejJSCCxMHCWOMomrJs7qzRJLohGRqs40kQjw7VJUDHY/j++fn4ziwXsyXQwwQnAAJFALAblSyZ4Cp7BvW7dRlQkG07iniVvD+AslWSl3bVPJo5Vg3ZR0BvJGi6NbOxmp/d4zwuJHdAf6CUe8C077F7Y5MBydt0q5sv91Fi7dbIYsPI+BqZAwmcoKRO8gFYObo+O1h/I+///E//fj7H4YDzWf1FIdwzq8QOnmek5qHPJlewJCTTS9fX+c5Mwd4eDmdT6eLqj49jYfD8PHTu0+fP7x//xwiSpjTgTkhZU2aPCed5zRdzufLnLPN86yqIhyHQIxpmi6XC8nzpHqa8tfLPLkhjsRhOeq5KesVXHYiVNSaWTkOXfTjcgBBWOx6X8+ubvBJ60lg6xa3QS+vp0Xui3sIHBkNUZkZ3xqZ2wldpmmtUF3rt6q/n9mNDVAN2tpCKz/L29CBUiNGppTKAmDdTyUqdjb6NSVAzJwJvESaAcyJAGcwFOwm05xFeBhCVnfDOBxDCEMM7m45u6YYhYKA+HI5uXsIzOSAm6uaIukwBmawwDxPr7OIDMMQQhjGBWAmYljOmUHMATBTaJ7d4ShBQdiI3YRFkkl2V+A8X/7Xn778j7++zM4yPB8CEoIaogSPMmueZ2ciZyprwsWJxReXTqxJ/K6GV5ObsmK53pHoCAuAqbZ7AHV1ZGYxRqx5J7GayMw8juP5fC65knPOx+NxmqYSd1REfBUKzNcMJ5UIqofYOI62RihdZnYlgwozNuK43SGuT1rbvZBQQU5LwViXHFsadXfgmgemViAiYPFcrdqlEvEyRuFpmshRsneUVS4zp5TcjFkClRwSYmvvZfEc1sQkZsslwBseK5hcMdO6UYkIQVQ1mzLL8XiMMeacX88nVxPGOMZDHErCejLXwCz8Mvt///PLGId/+t2n4+Hwr19fovn753cMT5dU3JcFiGSZqLi8hrAEEwKsLHerK6w7JVMiOh6PnmDCcXyWNP/8+jWDlDC5/fmnn18vszEykKfZfOQg2Wq+emZmd3Ja0r0QscTBzZPm5bibaE45zjpIeDoehyHOl4u6RY5weFYij0JiJEaMXNavYNnKqXsqtpVTrVSphFHv0NY6leqo2TzbKuCWgMuH1TTvPuyIqrbQboW0NNwOyjc7yu3nLZ1XMG73iW6Waq10bsQ933LKzRg79LrfLMpbaNty0876c4v/7dyh4dA6qA5jLUqZubj3wHswdqBS870Tzlb6tWB0ZNY1WOTnPKXa1DAMpbUQQiWGRRqEUG8CF1FQ/BpUdXp5MbOSZKL0W3Zp1jtLaziZNUSNp8x8BaaIx5bSvjkjb3y7S/MtZlp+wWZ+W0g67vONdVJXoe3Day/YL52yKJgvuYjK9YeCZKxR77ZCAHtM0eFwO8BdOLccWuV5104t1SCrkwhA6EYtbrvrO7XrhmkdoK2eCymlQjarZWmmS9RxQ0mnK2VHVSQE4qWewxZnoASml/Ocpkugp+f4/PTufT7JdH6VOB4kCvnpciGid8cI5wwlcwAl6LdSmIqLBzm5aZ4lDJFlSV+llsjCyMzBnNXcvUhOTnkOIR6PT3Y6Z89xiCsmr5PYzlR1uW9ns6vWTVPHMnU6Wrndclm3rVxbaBeQW6Lq6KTMSKuPqhi/R+ZVXnVWUDfeDi0dMN2g2sotF7d8XRcGtWbRla0nVzs0Iir2HjXxEVQ1w0QEIFWFZxEhwNwPx0HnSXMWcjJjIEQ5xPE//v43f//b3/2H3/z4w+EYVUMQhieelcM5TxdTNWhSu5ieHTm98kwygnGZc07pp5++TBOen8PT0+GH33z+8cfP794fQiRmW4IWpBQjmcl0ejmfZ1UnohjCNL2yW4wSIjswTZNZPhyGkw9fX7/8fJmUGCLZPWsmiRzE3cm87NVZWeI1VnSLPVmjgrVMXerkZh470YrVNmhn+YbNDe4oGv9yufAam7BIlIUv9GrJtOQRQqhTXK3TNltBJ5C35LSGHbkh15Y76udlpV0ONsKuH2DbbkvQvoTaVyIyghc0r2lVS2QVq2sHh2fMcA6qANEUo5iD4cweaTEcywjNM1IR0LG4PwgxyIJzCDJEcdcoVFIXVCELwCwzc+AgUpILFkQoUbnvTg4uMWWcBC7JhGXMzq/n+S8/5T//dfpypuwHdSR3NTfA2YtLRgRrWfi5Gxwtm9lNhHcKXFwmyk3RTrtQc57WzU09dussHr76JV5lRF0GF8KqcQtoswqt7bTh+Kkp3gQa6rYiWmKoNFBN87aa3+4xtG87DLTmePcJEQF3t2mJboFZ7in4+XJ+enqKMXjOUcIwDDnnl5eXQtyL7iqOzUzMoeTiJPYldiuhoq6q5KssJmrj37QjUlVhDiEMcHcys9PpVFmXec0YWa5/kDsPYXzCGH+6pP/2f//pcpr+6Q+f/vDuHdukBJsv5aKsSMzq6Xymp8OaQ1bdCsEvPqLlbl/O2ZkOh0PKWdXJl0uNBlJiNZuSfjlPl+yzwQgggUu590iuxFKc83NWg8E1SUjZMBIROZMZCG4lepS7GpxJwhCELStMk2bN5r4EuiEHo5zGOzv1G3Ob0lLaFeH3qzWUsNBqNU/bnZQt8bRzt222VQCd3Ove3iHLG/3RNrvl/ds/b9Jj1GJWUtpSbRL3vKX3RnfttOmtBXJrkC0/rB/jKlf3o5m1w9ziqu1i+eT+ILqAKm8a7ZsLr6UK3oWd6YaWyjqQmT98+FCYa57nnLPqslmmdP2WiIiJr04+UluuKWjI3PwGD7vivUfFnRvU29JWe0Dz3yxbif3L2ukFfdN+K+1bKi3u/UWmbWFoiYqZdc81y2/vnLd/fnO838RwCw/t3f7YbaSttitwti1Qc0PMlsQkFmWxUEcRjiHIUOjTzNjJ3C1nqJmZq7n7KTtpcsGcVN1AMQwRNuY5ufuUFZiiSB7jOIbAo06XYrXJcnXTI2EM8jREd4ZQQAhmxOxMLqzqRgQRdWhWMwZYJOZpLiZEiRJfB7uL3hYz3Xzdm7W2nVZTbDlld4K2k9j+Wb/qesEauwG3a8uOuh4rrMcEuf12d1AdVFs6rKh+oHp2e+8wTyIoi96KTzLAptNFmEZBAIQwRP708d3Hjx//82/ffX5+/3EQ0ZRzdjMSU/jXNGf2yXB6vUwvSU+GCWI8v/M0z6fTeZpmTRnAp0/jbz5//t3vfnx+Pr57fxhGIVKHlqs/TCWUHuIQYtZ8mlLK85zMTEQoiARSK4tYU/hLyifNs2MmZGKDOxEI2RTm9aaQkcGWCJAdbgtiu7RVtUgTQXpp6nbT4d50N8QjRJ3dK0vgqEaSdwqi7JS181Wq1cVhfVIJoyXXquBqhdp7JzzLOrD8iDGG9vSjtNI6Ed1jb3d3ukJmpkR8HVIxPkHJfTLyZNlUnSSweRgjD7jqyMAAyHyJNmmUWQBwSfpNbr54BV4HieDMxdPP5/l0vXcrwkQOYfZ5ymAnMEBOTI6scFjyER4uM//15fIv/zr/6af0Zabk45zdXNWt5O52UAAJi0lQ1dky3IlvTtjLAviK/WLP0X6QnhIMpiPEgqo6tkoftA64PQkpE1ZOAsuWagk5UxbJRbvEGAmyhFciMbPmBvuNrVZ3Slo5WyHcmi8dFXavWmrrBril2q3a6IRd+xy33qQldjARffz48XK5/PWvr5Hx6bcfyzEdMy8XCK+n+deMIwBYsGy4Uj3bDO2m2mI+3u7btSJgpTSOVJIyoPhwjuMI8xC4tO8wUnJ3PhxIwiXZeU4TY4zz+/d6jPIEgZiQHDgMwyGEwNnc6WIWQyCilIqDBLnhMk/zPIuImilIhIdxpKyn0+kwxvM8TTlNKWejDEnul4TXSS8Z2QBhR8lu7FBjZvagxaaAuUNVk6kRwOQkYMua3JA9u/uTi5b7giQOVsumOWdzKSlMjdiJEYSYWR6bvRs6xJ3w3221rRRqVWD9v13wtMTTPcTGuLz3qjbSqeq203uN7FoS1IiObalSe1fkdjW3f1572ZxYtvjBrRAArneT7rXciab2+RYD7Sf3mPqbo9sC8wvawQYJ1DjodqNw93L8nlIuB4NXSci7A+EazruVmUQUWcohfLuF342o7Zr2bEH6lm13j7bfXrby3Nf92V/WYFd2ISyYrLeg2yPEtg41Jeed+/sd6rBHkw8A63j5gepp1dbyY7Nn9IDOadmVht+6EtDthqyvSxGRMBTPNSkH1wOJYNWAaqrZTLUElC6KPauTmzgumqak74YhhkFGqDqpu5c4buUqR+QgsCAsgRhsZA7md+P4w7unv075ktRUy1ZmdsvqGjDNxq5wS9nTnF1zweB5SlkXPetMS3IrumIVt3TVDvzB7OxWaKXiI1TvKYuOtHZtmHZCq7Ts7rDca7NtAbeUuf1qy+O7An9Xiz0eflfavlq7qyx3W6XmZOpLVgnQElZf3IYxIGdSFcfTiB8/Pf/x73//+x9//KPYEMYBrnlOKS9pksm+XjIizcm+fL28/HTCzBExuJxO+XQ6pXnOObvi+Xn8+z/8/vPnj58+fxiCDEMgdiIu17rM5HSaUkrTlM5TOp/nl9fz5TJndQohhnENOZMZUR2a05fL5ZTyxUxFMkHBRmCCqVGJULKk8HIAbi7c4/abemq7l9rpjlaGtwRWH5Ywgc2fjPXaUdVKdepLWT1QiG5NCLvdIKt6trzqJOquPCwPy0l4V8fMgjeXWCoz1GRN7aiWnurJXrP3WQ7l2gEzczFCkzIr1N3dTmNmhkh0sIEdJaNgIIIbMfka4QNEcHfV5CVmkUsgymaSc5ABYDgTMa0YUdUMcvfIxedH5mRwBrGTEMicoG6mMx3nZD99vfzpL+mnF7ycw+RDZs6ejJTFLKesyk7CQYSTibuLUsZVJzj2jFS6ZpmsE9maIK2uvZLgbUqQbjuZ1hW8ry43RcYXnM/znFKapklVy0IxhMAUVjezssixHs71h99uSxRoS1TSWlqBtez4uxNVgkMNXLwl0C0htuTUKdROUq9s4CW6WiVRWXH18vIyDMPz83EQGcdxnud5vhQjbNnUIQJo9QRfcrvjCjmV+KjFU6mdqaIKCqr91jphZgbXBCQl7UQ5ZwghkEOEQhBmritINs05k0ONHf7Xi/9//vx6fjn//acjfxg/H0eGqwR1UAhDHC+XizUIrPkn1YyYKcghDsRsBHGEIeZZpzSd5jwpzEnGp8g+ajz96SUlqAEEK0FMCcSrCgc5la0Wd0I2nXNidWZycMljaUbqmjTMSeeUA7E6zGCAwl21eicKiBlBWDee+1sa2KXDSgktFXU10YgjX3cKthV2lfcOt96SZX3Y6oatnuhkKH3Lvtl2fa9+S//bXrYfvvHE6XFf3/xqq/N2cdLNYBViZmZMBHzXIqObxG9ieLcUFdaqzwJVmudq7ZUfZXFyOp3RzO8VgGWXo85Ogce6E9QbzzTsCMAqSdrhEC03zruHbxzj22tuy73Z/AXt3GuiBe5qP6hVVDIIJamCu1AT3dS93IWl+2OkO1cf79Xfyord+p3Y6dQT0U6Qs119d/2Tbh6uYFyX36VmCWI0DAF5NoOtii+vobmXMZi5GgB2EBGDlImNFJZmvcxpPsQgTCIhBJi7a4mxnnO+pBlADMLMTMIGzmCi5yi/eX73P356/XqZNXvZoVe3SXPyUGhf0zwl1zlBL46c3KYpqaqhhMGu5KQdElrNjkYa74r37qtdUdw+vzVOeuu2VGtdkd9CKtWVqW2tA2a3bDVXW2p3W05vh7PVgPfg3FJdp0HuKa86RmouTBIc7AImWCAwWXSHqzjej/j9Dx/+8Q+/+7sfP3/88HT4+ldKph5KMK2cbJpytnROmYTP8/RySklZQsw8ZqOvX386ny8h8OFwCEy//eHzjz/+5uk4BnEJANQVTlZckdyRjV/P6eX0Ok3zZUrTnDIgQyQWRFGCmTuJM6tjmtJLShezRKTkSnAiKzKJrDBIydzCRF5WIXt5kB4I0ro5WP3IquLoMN/ivCWhonyIvOqddiKqnVlPaOr5VndKURoqUfrrGUZtqgJWu+766obZMkXlFABXaNrjxZYh20ZbPiGisvgGzNgJWGIWlm4IxSXIiLILNAN+mZIIicgYcgiR3OHldqZi+dBYihclgUhczLHa3CHnbAqFqzrDRBigMEResrWRuyfTZM6eicg5AMIiBHHiQnFfFV+/pj/95fSXL3qaY+bBDNmhxCzOlIv0NDWJPHAQYivYL0Eli0fwJhZ2QRejp6yKyZpuofuwntBVbOP2zkYZfvXYNLMStLOuM4vIKzEPcs7CtHZrZiZyIyg7MXFz+HZLWC3d0O25cffJluC8iUHX0g9tfN/bIWMrgsmEbn3i13QL5bZPTtOsy226lKavX78SheIH5oA7+ZI3cyF9s3UihKsrTkvSBQMEzPNcEVLZT1jKFaJ1CFxUegle70v07ZKidxEil9PXw3ikYSSSc8r/8tPX0+n0rxEh/N3hcHh6OkqgTLCcTW0mK00BJiEafJ6TuYU4PodgBJHIIrPm8/kyzZO7v7z8fJmSEcXhicIY+DBdEs42J8sGdYaqASTMDGHTLE4OAkiEF3LW7KfzFAgDszABAW7qUOeUbUo+Jz8MxGEgh1suZykrkTjgzOAt9d8hj06NbaXNrjLbPrS9ABv3iPMeSLXHlgHv0XYHw1ZCtg+3NbfPd4FvAbtXpx1792PbxbapOl7GvmTY1n8ANprMHK2acV9Yz4C3LAvJb5cRv6K0wnYztKu+LNzauHyvosZ5kXiyhCC4weE6OrotAHJK3jizVMHiq1XxmMzeMq7d39+coK5sF0h/q7PBb/bbIodW5/xWX6AZTvf8F5e3oLrtomXYb3bd6bib7qifa3cnoMTcjjEurivMwzDEKFkVUFebcy5qd14XhOVGzHLB20FEBriMEHe3Oad5nlOKzoMwD8NADnf1rDnnOadRxcSUiNwGAsxhzqSj8Mfj8cPh8NPpomZCJCwGOMGESMWyXpJNydQSqVmeTppS4++rbkywuzkpH6FrF7d0G8+2/XDLd9tvdwXg9kdtp61Z+aI90d1qpXtgf1O831MElWzuDepBs10X2BMvnSCqUyCF/LxYSMoGIRuIfEqfDvybD+/+8Pn9H3748MP756fA4fIayJJq1lktJMU052lKU0pZZzVLKWVjiiFzfJ3z63mav56gPgzD89P7d8/j73784dPHZ5Adh2EYgwjlnOc5l3j0pjjP+fUyfznN8zxndXVAgscwDEcDzdnUDCQAzppfk89qJW6CEoHJyp6d2XomQIRlCwZOWwG3i+3H6L0nutcJlbLPvumCSnTASnj3GvfbvQNvjkPsNkT/diydtCx+tn5rz5dvy7JzdaNbPqESVKYOr4JCzSZNx71XJ0Yr6RbN3cmcY4DA3Ut60nJUQETqgQBygeolO50nuDG5MAeWWJa8nlnAAph7ic9JxAwPwk5UAoAIk5KzuJNmaEmXCTHVciQUQiASV1O1rM4cinVPELDAKLtp9r98TX/96fVff55ezzJjdIkOqGVmds4o0RSZ3VTcycHMEcHdTQFVww411LmxchZ5a4s0tlFLH2gR3s66r5dHK/eaWfGHLH8Wr8gyx+U0TETO53M5TSoMYGbA9bbVLt23xN1cZdQW4E6AbgmxtLrtqKOoWlon1fpkl75LyTmXez5YzqKXuz3Px/H19XWI8uPvfvfx48fiPTsMwyVZOa8zdzMELjlKOBSXXbqqmVLa3utDXXeAWiOpgKxmQNkiYAN7zT9W9CMT05I4orgQH0g9n7MZwjEOB2aePP98vvzzX77CNef823fHT++OLOOss81+FE05sVuxsFWdCCJCIeSczT1rPk+Xl5eXaZ6ZmQTDcQjjEcPzxcOXk359ufyvf/2SlcylnAm6mcCEiMhUASZnd4eRg6CG2TMuOgojhMACuDIcTE5Jfc6akrqTSCiR9tWQtd2OKuZdJhiw74vfGa8tC1QivEcDW0LqKm+/bUXZfdl9d+3kGwvv3uet7N7W6ST7bmtbSFqE7Iqae4hyv4Y73vDp/uKha6gVU/fa2YoyWndSOsiZeTdhD9BfICS/AeUBYt9YykVrvb1D6FdTcvmxCj2/cc3wklwNzJxuLz9foWrolpZ93OzuMUZrZqx1D/FGgW6nezv2x+W77Jg3NvWYB7+7zdsxthqh5crHHEr8iL8eEHlXtsvdLdq/OaJ2UK3KeNA7NWsbum480TAMwzCU1CbljLqkHPaUFpcr3GBs3bMoncLL/+5GURhuNCOfpymlwYcIoiFEV5tn5JzdXFNYlB2h5DIK7OTKoKMMn47h09PTn76+TvnCCO6aUjrn+ajxCEmKpMjmrkxOySylTBSo+F1hSVQAlDXhlSk6Ed0i+THS7s1Iq6k7gdYpkbYmbim8g61rqu5NVMnW9fhGJdVJ/k6YPya2t7BhJ3txQ11XeHCL4R3RbQ5zJ4OD1NhxGOU5xh9/+/n3n97//Q8fPh7j6DlaHlIWQhg4qSXNl5SmhMtk8yV5dk2ec1I3jqJDfNH859PXv768/JgwjsPx8PT8/Pz58/tPnz8cjoPrzGLCxsQAVFVnnZKa2b9+vZzO+ZJcjSmwDAOBjZjGUdVUU4I4OCc/TX6ekdxKLB1ncnfwcjZlRGRwWu7kk8OWfC43Fh32aLUtdV+AmhsH7jdBENuyzjIT9Scou4eT3Zxi1RGdvKqTWO86tlIF6/lePb2okJdo2Bvwdk7Cy/+hvdjdSa6qStuBmfkSoLWcGuF6Llkwu+SGQyYiJlbiQGIwgOacCQbPYsaO90/HEALc4STCQWCWAdiSS76AsSyHAJgqkQgXN2goQ2iJTuYBWO+KFW3NzA4BYCDPPuU0Tzml9NPP+vKql8RGB6ID2QB2RjIQHFknd4osCMwgT4kDE9HAZKlMlV572ZvO1vcXGw7cKr9av22nEys10Ovi9ZSSrffyy3wvsXauc3dtp7pMbLuuE9cewdUj8k5+dSeHLY36WuoF3G9K+U68tiTU4dTcKh+amTuVIHWn16+fPn36z//bf/rxhx/++f/+7//8z/9slg+Hw5TnsiDUxdGLSRhYDpCLN3kpZWeaKLbjKr9LBBper262qPPGpHN4jesTVpKo52blltG7gc+zTnohinx4jnH0NKWc/9fPL56m/4e7/1pyZEkSBUElZu4gEZHssCJdTWR2dvZp//8TVlZkP2BuyzS71VV1aGZEAHA3M1XdB3U3GNyByMhT1XfujMmRPAjA3aiacjI8P32833774d1+u5sGygMACKhJMXH3ADRCFctFh5xyzlmKeVQq4b7bZ7WCcRR9Phx/ehx/+vnxl18+iZERMDEQihQid9TXuXyoqZqXzTAwAUtFQRFMI/EUi01ISJb0NKbH52MgjAikRSXnnEXOOwloWA25r1YSX57zhUQB14hZ/X5B4VpAWhO/zzKC0AAhXN6F1zy/WMItunL1lbYtbEfrla7neX2GlyO0aGH9sLky89qsrn6J1ySZSizq1tXviUi0oS+vMBJeZbw+yx5dnWptdtkqLsRGpep00MxcGmx8cpbTq5PEZiCYGXRmBjjzDXXmVSCky4zTVwH+Netdn+wXtav2wF+xz7eaIxa4LTJVrA5NltH1M/5q7aR9pp3tZzdhQW5q/3gD/hcdngkoXumqfrO+ZTSbJRZz9v13yu5R6KpqJigzHwlTgjo9w5t6j76zXuPBY/4ErOYUyF2kwDFEIgpIhcxqKGFJoe8VdOaCAcy6wLs+vNnvt/HT4zCAlmJ6GofD8bgJFIzEi29ZMCgIiMDEkTkwAWIxc4HQiM4ZBW+xAV8Eq2tkvmCT4BKKcKVBewGe1+yQP18d9tbQtRhlvdKr6/0sdF2lI3gp2a5B69ZOLsBswbpAo5GZHlAxM1JAAEbcxfDh4eGr+7t/+u037/ebd5vI+SjHE6XEASLToZRjSsfRkoRUeDhJPgkWQNE0iliR3o5JfszHn4bDAfR33fb9+3cf3t2/edi/fbvfbHpEJUYwyVnNLKWcBhnGPI4p53xKoBgpGnfYbTf9ZkNEWeVwGoqZACvRmORwGJ6fh+NYMgUF85pshqg4UUBf4ow2FGbVEiLBlyO6qi5cbP6Lp8C1fvlVdx64VKEuCHpLthY8SUtZFtCiM+O6UGq0o9ulTNhKeYgYqk8qXILRYk6XOHQOhgaZ84mrZ/s8z9gT2xOZkddt8HImpWhSGRAIdBPDJkQfLsbYRcoZKAQAQwIiMDM1VZVSNOcMigiRIniMHAIj0abf+GQmv14jmHaKAZxTkZJ1SOl0HFMqxyEkReKesRfrVNkQuSMRKWUoWRA0MiMBipZSMHp1e+x02itAZKSspd3l2qpv54LY1A+t7GFmpMbMDqeIkwOxzek5zAwNVM3NPR0Hdxl17G9m7nOic2vPakH8FrC7UDDoWRG+BCNsmHW81HciopvHWrX3Ggm2f65dlcwMQNu6F+efALz2hpM0M+vi5v7+frfbffW//i/v37/fbfv//m//9q//8i+bPvb99i9/+X5798Czy6iq4uyCNgXRzgJh5VO5uZPMTExm5m7orSjou2rTPkz5XXUuCeAOP2aGpjQpcU1AUVFl7Lnn0I9Az8/PRQ6BcUeQlY6pQB6Oh+fT6fTu4c3d3d3d3YOHgwJASklVY4yxYzMbx7GIpJRO48DMd3d33EUR0ePjKY/Pw/jxOf/4nD+dZDiJAakZIZEHYYMRA6KhKTMbgoGpioLS7OipqgqYS1FSRKPAhJ7mHErOT09PKKVniAiBSaSoXhX9bga2Xb0LcHl3rgLPLbitgL5Wp62HhktyuPjQ/uuRpYsh1hNb/LRYVHsRWnr8Ah2aMOoNNdO6VYy9mD/Rdc3l1S/Xi7o1sTqTVphpd3IudXNRRffW5JdD/M3EkHNrNaAwU1wz0zkotyWo7TMI5xTkZsZTyl9tHX4qnNiZu5r0euM4ahMhVgtalNkD8OqeXJLXlzZtcXfgxql9trVg2X545ZG9fojFKXiSV2hYed+3Rbr/2iodrB0uVl137OXdWFyTq7io/eYW9K6RxvrdyxeuzgSdjjuSn1WTSDQHtePEVKmqmAFAiFSRKyISAwMiYjEiBFRUUZctSynBVIkRMYQQJJiImaSUBoR+G4KqGKigaAYzJGGOd7t9HwMjAqIKeLrdIaetsXFEiEDqNwA5BLTAgYIiipMB55HQrgPwLeR2q7UP4zUBrN1MB6RzZsEGsF8YtFpasBG9cJV2pZ38C/j5lbdmvTmLb9b9tJNZTGzxbsU5i+mtr2E7EBF5PGoADERv7vbffvXhd19/iGRsajlhlmjGhFSyJPlx/DRmzRYUN0lCzpBS4Wwgmg7DqHkc8RHLT3l8ZrANb2H3/v1XX314s9uG+4dNx6qSADUEdqvGOI7jKMOQjsdxGIanHFyhhgwRqNtuQhej2C+Ph1RMkQ0wFX0+jk+nsRSVXj37uto5QZq2Ovy5EDdcKp0/S/i8VWe9dj8XYHb1RUSEOSX+1fNtL8X6HCcOc/bHXDgwe8N5Re0Ry1zZ9YVraGZd19W0XtZoVUKZEAdFYjXP/VUYyIuJl1IIESm4JxgRhTBdoQI6x0yZGQGAlOlGcQjOi5MR4ifIKAgGVEogCh2FJJQsQEE1u+tDgCI6lGSMFpRcQEolq6qhFpXjqCGE3W43iB0PQ6SoYkXymze7HSAgEVJRHUUNCGOP/fY52Vi4lFAspEzPh3I4jimVZ/hNItWoBmSGTNLngio8lDBKMIbYY7DMOZsUM+Z3OWe1UgyLjAAU2EwTWak15RGRKEwuy0wioqLM3HFEREfTYRZ4uAbtAACisM3ur4CAOivvHNGqKSEhEzApwljykEtKoqp9v/N4M0Q0IzMCxUDR3GAVgkhhpgLgVY7MTFWqPGaTTxc6PMzgcsGdIZyZzplyT3F3TtUA0DtvlQ21fGV7i2onMfaXeM05JDIDMyBCZnRxFwBCCIgFEbu+e/PNh+++++79+/ellKenJwD4zz/9xy+//PL09JRLHkF6sPuvv85mwihaRExNEU2LM4EKMGV/AQQx9v6LipscKz41My3FpbJ6V6ebJhrCZt6Kybm33kmveOG+QACQSxmyDHhHRr3FwLC1VCRTMYpx1PhTCU9hFwT+4yeJj0/v39Lbt93/Y8dcIDLHsN11wQieh+fD4fk0HAAgdN3DV19x3xexT8fD6TT+rL9/Hg6fnp6fDuMwluc0PqXTUURADTAnURUEk+xbTR5TiUiBCN0SKGagSP1oOhQDkEjMipgR0ZAUiZ8NH08lcuhDjDEy7bJmJBK2ZHksuUgxECIImoozx8A2VelxH2yzSU0zxV3iJGBkaJiwdsOvIlxHjACGCHTOGFaz6d5kGur5rgnkBX7HJdqdEbQiTqqwtl+EszatxeAtFW8xcv2pHb1daaU3/mfRs4XfZl2bmak224Lg1M5DumnFpiCiqhIgrPazpRztzl9o95qFrC2o87mccUW7hC6EWVVnAOcszVWXCQDigoF7eMCSNMKKQWx/arb6fCvrrz60I6hSSoyROapmc78isFQcys2Rt9J5m903AUdwWyGgEzUABCSMIU62nSSqeVbLRjGJYcoFPdl/VFVgHBNWvxtF8thEQ6TrbOICNuqXNSakbjU1BYquQtTioBcbeAYif0uvODYhoNfLWtwdRCRcllPCRsM4TQ8ADIhm4odmYIBes9dUxdQ8jMma2k5Tn+pU5yylq0xlrurcJtWUP3+DM7+Yc2vK1hVD5t3a1JPbBGeAWOqe6lm0iiRoUYoeGIGQwIoZEpgxInK1SKsqqjCiMwgSu5JVBYgCAqEZqSAoq5okROm6Lkxepmpmb+/EpCCUd+/3v/3wgbUc0gHDpuuiQhIWRStgKgFzp9TfnUoMZJqTGQZQMqEiWDYdvdvdffx0PA4ZicZCH0fqYdNtKIBGPQR5NhvMgDf3tv3u+4R0OuTxxxCIAVMeIIAQUjm7Xrd4tYXJCofYlCle7tuS8cDKtLQ/tciqVeA6Hm4FxVvTgAZbVgiBS5xT4a1VYdSB4MaFvfpN62N19bFW4b52A6nf1JLLLbFoKVe7TFwJHvWB3N+B5nJ4etPhP/z2u9999ebdht/snllSMJMCIGZF4JQtFRMIEkRElAYrB7PHbEfCtAkfnw8pWM7CWUBLR/BPm+3bh/u/f7i7v7N9n3dBccwQqSNFAxIbT+Onp9PT82koWASOAw4n7k6YrIQthRjHMj49f9xv+2AMT596iYbd0cLxVH7M8Im3tgmaT4SEiBHRSgWtcAYhJABPpw4AoFIh8+wqBZfJh6aTnQgWKEyOgTAhSK1GSCIiPmdQMzNCnoZFqjDjrnxE5MURAMDhCIDMzlnxsUn+74alBUXDxh7TSomVzLXw4E96IUecayxV8PBQLK994GP5EFNFO4bzRrT74nUqW5heF3Jdk+HFNRCYqCsiAqCqicg46inAhi0oYxBiIzRDU8sRwQxVtaggWgghBMhjUSlkFAL3MZqAiEoehQMoZANRNGCFABpAmDmSYBE7jnI8leOpjElViSIF0gKkYCpFJOcygCixxm0gMSEREBBUI7XQoRZJAMJkTJBzRiJEJ8xn9+LF3SailrNZuB1Ci+z4wk1OZw9SbeJHsWl+kH3fu0lKL5utWgXKOpP2gBYIAvGmsn6e5JKs1um1kLNA3As4uYSQ6RVP+05EXReIaLvddl233W6//fD+3bt3Dw8Pqvr4+Pjf/+OPP/7446dPn7xwp6tDQgh9t9n0m67rcBYwqqTqY8QYzaaUrUSkPKEAV+EvjsbVtz7hWjV4AnLI0CBrm7Oo+wNzEWE+b7V4fqP2OgCJpEwiIrkQgc1pgR6fD7gZHu729w/7DYfnwwksg6kZht3DbrcLsT+Nw88/Pz8Pg4ddf//8/ZjTcSjDmIexjEPOOU+RUOh82GQzv8orWePYPh9WdaZFM+NgiMhIZX6mivpEJGAyaxOYOZhhFgQ0ndLyVzBYj96ii1vfvL69QIkXBBJW6v92XFtZIGtvtUF76w1a8/gteenqDNdtgUzMDBvIeWUP1mx7s//X26LPF55ccCSv6ae971fnb5dSyuIZvOTJFo+tuZ/60wLPtMeNt1vtzRqJa9Nv60DtA+M4TiJgjC16d0WSy4rVasrMm83G5gQ2rWr2lgTzAjz/T9KmJV+LdG3PdLHzX1puckGhFl/+l7YFyMEKSl0Z4SL6wvJwfstVIEAA5NoqD2rPSZBsciMGcwsbIRpZiKAKUNTMALGLzEyMKiSmKmVUxD5u3jzsd7v9dmPH4zGAffvVu2/evMnH5+FxEloCd9ijmcGQEMgQDEEAAhAwmQjMPr2AGgOplZxHRCamIuU4DKch3YXIBES03e12b+5DiAW7g3WchkAQGBUMTAISgarcPN1WcLImpg4vhZmrHxaPLZ6vD1feqb3Ci67a59fIFi49UNpzb5mrdla31nu1raexxnvYMP3tiwtMXhUQtsoVf3XcuufLCZQUQN7c7X/79v53b99/s91syfpiZEwiIkWK2ZA1iWTTYgPzqDAWGwAORR5P6eOQhiynXLTkQLDddPv97r7v3j/s3z483LPtutAzBUae1ami9nx4fj4Nnx6Px1MaC6ViY9KciwgJWKDQxQhsIHZ8OuUhJVFCSyIfh/Q8FACIHIoCXnqW3VrmYsPXB7fwsTx/v8pEsziINfW5NejVVutergnrAsDWf9YJaBOvAc0tAwCzsyLvKsRio3dAxBDwjMiswdaO42hWwtdlX9Vwt9d7MWQIoSiICqILtVN2kCyWEgwDdMZxg0pB0cBjUhkBoIgz5RbMQgiBENRrpUIMBExaiqqcsgEyIiJHZAYMBlw0jomOgx1OdhxhGGwYSYUNY0lZRARMQbOUrKckJy3SBeLAFIKJFVWzyBBANcB4yicpxVAZDAgB0MAIWRzzgFN1dFUl0lzyDs4KpHZbFNtAtikge31jF6izdiIiLnUgoptwbY5Lma2FC4vKss5k7Xxxi5aAAtw+2aLLNUZe3ElrkrW0YyGiGVx+M93D/X7vYN33ses6T8O93W7jpvv49PjHP//p06dPwzBMi4oBiELgWAGdWREEzMy8pmcXN25BJaIYvT4huoUwhKAyxW+ELuIlT1PVLe20/S7YbDdwRtCPw8M/0Gt+zAJnfauMOjkFoaJOZawEzFIiwBJKmHz8SinldDplPD7c3T88DZtIZMqkXQwh0N3d3XjCcjg9Ph0Ow5BUhiE9Hw6PQypFU5YhpSFpypKKGoCZq7bZDYN+Dossju0J6pwZFYAUjSpP7HGGSGhmqAIlIZJNEYxezEXAjBCBmJkVRMzOigyaYfsC0mwlCcAlkv1SWrt+fu7N7PJGvEAt4Bqiv05HL6/q+n4trtit5dyiTy8v7bPt5TUuOm+ROTQiU3v3P9vhgkzWadjqrCujZg1f2HZydRVXKdkL37Q2tAWlXCyzXSzMjH69wjUEYNE8HUjNxWUXcc5nW7EjnBaZWBO24RN5YT/XrS6qPvb6s170c33cG8+voWL68/LoF9f5V4BuO6Jd02j86j6/aK9uOfe2kNyeQovWLu7LlLePQc88GXvmLUUDBTUzKJqJyACP+RhCiNwZgimQIQCpaS5DiNZ3YbPZbLfbN3f3+/39ZrP78ac/SS7b3eb+/n673WJJA/Hx8Lzr+kCMzEzBIOWSbSQzG7c9s0VgIFDxLHrAzF1HBAZohICBJeUhjY+nwzdvv2bGyNJz2PfdZrM5JD0cJTBtYuiZcsmkbGiEqHoTgFqeoZ6srUSyBdatYNaey9X+28LiON/iq9O4BQNXuZr2rZafWT+zaAu4vTro4t2rKPQWSqxp9trHcMVAXv2m7XZrhc2+e7j/p6++/m5/dwcWc8JTYrNSxP/TbCJYClqxx21/SMdPh/E4lpPYKZckVlSZuY9hv4nv9ts3++3Dtnuz397tt+8odx330WLAyIigWU1ED2N5fB4eD8OYIBUZk+YiihQ7QrI+hg1HA5Ocj+P4eHgeFQz1KQ0/H8ZDUuO+I8SiJdB6qxc3vQWh1xDi9hm6VBQunlmTzvVJwSUg3TrTlnwsIH9NyNoJOK1pFQSLdbWguzAI1fnQnC9j6mVCf2jg+TfOLoXk09FV7pN2+PYM2s+qisQAnoiF3fVG1NPCUsk6pjyQ9bETg4AkasgkCKZaTA3AVCUXUNvEjn2CqqWkEDqKBGCPA4cudl3P3ClSEUzFSsFTwmHE4wlTIhFSg2JoQmK5mBiqsQFm4wKdEotnFAUAhhCB2bAgClmQP20wZypZhEw4sjKOSW2WDCfH5XOhqim9ZAtGdMld1U1cH/B1NqXZahdLPPzAS014jIpro6/T7AaU1zhoiQfR/1nmikRET3xdcVD1joNLfGQzwKzXWK9Ps64LRYPPUFXHcUwpnU6np8eP/r37DJvZ09PTp0+Ht2/vKivmPbiE5rKxmfUd1hovRMHTtDb20snoVzU059NBxClpLWJjIZz2H6kuvNoDq3m27cQ8b42hGqoqoRIRc/TQezE1Uy0ITBwQkIvomJOgPeYDPx67yLsu7PoudiUQ8mMS05TKWLIgAfIw5MOxMGlK6TiM45hTNjEwJCLKUqaqmJMQqDiJ4ktKbAjTTwgzkPg6zczEJUsAEFNQJgIwAjRRBDIAzypgDh4EzIRiNZy6gkQLd2aGqG5gX1yBNcX9FW19a9YPLICnfm6fbqnI8pqs5lyvW6uZfnlu61GguaQTDllxFV+0Py1sv+bJlg7hlwsb7W7U/bTbgoFdyoqIaHoFAK6eF66o73ncGQtdXeOtmfv/4BIJpzRWB4H5J8LJ48BUtUhpT3zSABJ2cVLb+bjjOOLsyaMmBjoTjpfCX9frvXqgf/2V+WxbgmWFltkPeQHGrdjfEiC+kvv9/Na6q8XV+yshs/1mcfcX2wstOK2W366rpcuwOgvDqAYIyDapxFy7GKbceEIIasUEUYEI9xsMgQmhFNW5ZyZ9eLvfbcL+bnN3t3v39u3d/gEAUir/9u8H1bLd9vvNNnaB93vL6WMaT+O4jR0jGU3xiiKWRY47JvSiuUhICF5OCVFsu4m7bRwEBBQIC+BhTCnLttvELqoOqWRIWIqplE3XbSP2jMckKkpkDFQMcFX4e33KV28urHBUi4ug0bksIO3W/rfAc+VcrpmSFjB2a/IvILTXt8Uq1jNc/AqXUFotDdC4peCswGpXfXUhdWM3mCznDd7fEYbkVQWznk4xxrHkUy7FUJCKci6QRf9kcjzo0yEdhySmiBwjb/t+v4m7QG92m3e7zV0fdoH3PW8ZH3oiUoSCogKkqqloKnLKckx6TJaLpWynlEQpRurvOkbadn0EymMWMUmWEgxIR9NPeXyUlIxYhYGCQquuq6QHLo/ygtu5xhtYI24t8EBrUIEVbKzxM8J1fNXu/2IO7TRaVLkA4DqKP7PQjKyB3/9tudOWTi3KUdRXwtzLtCOM5DauqVwbAEwszsXAeBl9u15S9YUds/vKT1uiCm57jqEvAmnUAaWjHAnJAiGLFhMFFQJ3WGU0RTUCJmAOwczGVBwTmjYAAQAASURBVNQQQzSzE2w62Bh0IJyLjVmkBIMoEIdShlFSYbNOjHJWEaVOApKyIYEGDIQYGJEDoIlZIQAAgyIYDIIRy9PDw67bPHw8Dj9/eswGBWn0O2kI4P+pAU3h8qpA2O5GC3PTZ0IzIC+p15xH25avXAotHq7m5kGbE65AgwQXyHTdKkJc4kS9DLE4D3rB+MLq5rTTbp9sUZiZQROXCACtj+ps4RQX6lzuur+/r97VRVRVu3771debcRwNgAjNJutWESOxzWZT86/iLJW5bZA5VnF6znl01u7XVVMtNN/8WffKYMq2ugiS9ItXa2pNE/bsuOYSIBgAMpoRqAJ6BU9VTcECImqRnLNtNyrFksYEx0J9ER40EIhIFlNVQE6ShjEXFea4jzZkGZOOAooISGZo0OARs7OYc80j2Mw8taJZfYAAUD3hHRIYFAM1VSKbEoeQAJKhIqihmAGBAtQI2HlbaKo/A9WfYaWB+xvVGYMVnp0/fhnlXlyZ+uctzTQiVs9kmAFbP5fn5mo/dikdTePahQvTAuO/3Nbk6uVnrn5fEcviw+Kxl/u3SzZugdmu4pOXp7deV5VMzKaQ7FszWZOt2o+u8KEHGDuxczSLiCGEYRgqlcXZg8CVRBcdzjnAXr9ji9muv19vl305h3prY/F1FssFcWnBYzH/X3GvF8vBy6p0X3qzFtNeDAQ3RMHFTOzyscXtgBV+qGeE1iEogCkiGXiYKgFGYpGMSEhmwkIOVzxky2UkAgTuOXRdt9/vtpvw8Ga/23ddAFXVMpyOUpIcDoc8Dnd3d28f3njiohDCdrt99sBdMCZEJooBiogaiAxJAmsItiGvbq8IYpDTMO778P7h7sen4zFlRDCEYcw/fvzUd+HNboemYzpqyoDc931XqAsUQNhkihkHILydVex2W1zDdp9bcFozFfUVa0wr7bvWKJLWPNUCHhY46oUL9cq79gLkr3vQVSLKRfTBYgnaZLNfsVhXRO4FZTm/KCMaoCZT0TGXU9ac8yBS6FD0mGEASQCj6SmXlOVfSlJV0ACRQQqhxhj3fXy72zxs44f99t222zD0CNvIfQDGoqIqJEQFMBUdcknFnof86Xk8nFJWLKJZFIiAQ+hjx2EbexMdxzLmMQskCEe1n9PwKeXEiEiWBcQiRW1Cq9ao9SpiXF/e9uElnX3F2b2mXaXFiDhp1S59Vl3+Wsyzwv/M0056EJzrYC/6b7el3Zn1drXLD+QijWcLITS0Oc/HlZW0Q+INUbV9UVUdeRMFRILJRkFghMhmlkWHBD3TGCEECoRkSUxMtXOLooCJZ7citRC5z2JDGZMBmWUpT+UhQgwawLAIAnQcegrb46fhOOjTWFQACU2ZuDNSguTxor69CoCBiajjmIZBkpKRaJFREDnG7v2evv727ZsPX/38dPznf9MfH59ECqHNNbkJJqBBg2U4fsOsXJHdrSnUtL6oVTNxJi3NnkMjk0OT+m9hEUZEfZFO3wJuvGT7EFn1XLu5lRgraLbzvAondo1Rrgus9VJqqKv/NI6567oY3RDnubkRkbpuM2UTNfMCjKWoWd5sO5fffJ7e7aSUDRERc86lFLA5xFYuaHll+BbbXk8EaWJQ7FIOh9nXtC7c/yxZAcBA1K3iRACqLq+qqZZsk1O0q2qPisybEFEInqQ8Po+RvWjV5nl4HrPEjo26QWDMRoSSTyWrmDuRsBlKlibVrdRQ6PlYz4gPsbndhKhg5ne0AgkAopqhZz8XMzQGUAISIzMglCn/AoKZzKqlNtcloueuOwuEc7soQ4CNnPDrmMirn9v+4TYxOD+w+ublbqtZFVZ3/Oq7uNL5XR1oTZnW/VyZzJe3q9zYrV/rh1sHtGY+8MZWV7TQ8jTwinW9jLjWa2kp4poK1jYl+TgXSa10ejL0ua7H9aRVXeXuA+3zNmuL6kD+p/svwAoCPwvoi/UuMPniy/8B7SrtaBfVnv7iw+vnabZUGdRB7RqT9ytWcXUs+ByALTxf6ulfZcgQ0dRjBhU9pBuACAICWAbNasZgYIYiRMQGdxsIAbeb+81ma4qSJAToI20Cvr3b9ptweHo+HA7Hp3w8Dp8+PhJv3r99eHh4MJBxzIqYcnb+JhUBQzVDCsTF471PSQNLZEWGAEZogFmKyZj2m/jVw13KJU+1lCCl/PH5eH9///b+LsSNqYkn7iCCoqrFtDhLq2IWEDBeRQ4LTqbFbAtuof1Q32o7rHzz4pkXjqxtC6Ba/LRgt9rJ36IvtwZdbMILc7u6G3Uaa+56DauVuVLVRUIEuHFl6jcZIAYQxtGkxIh9l9RKxGeRY4GnXI6iJ5BTkWNKYy7PHNCd4LQwaMd4tw3v9/23b+4e+vBu393F0IGyaUQLUMbxVEoRAwHORkOWY5Ki9Hg4HIcyjMXlGmTuun6z6ThYF7jrunJKWS0lPJkeCzyrPWU9KQKFQCRF0EoAgssNhBt45pVkdH3K7dEvYPU6COmVabzAdbS0qb5YXdIqVODMYbahWC3YtxbjtvOadBQua4AvkjnVCZwthP6WgxbOZcXOE5rv4VU25QVw93SCiJPVwswQmBhFLbt+zvRIpe+tV0ZmtBHUGAgAQUlEUBmRmfcKlKwbRZ5SxkABuzHD08icMAS2SUojzoSkPz8OKWtWNkMUExMAREIoKmBFNaMlUEFmjkzBADQjmjEBoAmMXezevNn/v377u2673dz1fcTT8U2RpIexCAlQQdSJwWXnbk2vXGkzw2uIrAWsxaksNraeIjU1rBZvLTps2ZEF9lwd0AXBngvgnp9vVRfNEmg2RF94h7Yw2nbSAvca7SKiC3Uu47lA6DOJfZdzHo8HcIAm9Gxvh+MhxhhjZGYgDCHW9O7tbF1iRORxHBFlsUBVRTwr5+okWz9Sm+XY6VI16WSqBIhzvUdrcswAQCllLNNnVBRSAERDNUTvExhEixiSAjNiyJ73DokMctaUUgyhB90FOgkehhIKdX2wsDHBsSiTGhhQAABRELViKmBA6MU8/Xa7KzxUDW51E51ak1JSrXp1IaJMzwLapCgyVTTDyW2dDMGjxRWNAKQUmJVe03U/KzvXqv1pCq+h5a9vlyRz+maNnT/bXib8DQjh4tK1Wo+rxOAFwrB4BpeqmVcIQjfevdrWl/HqZK4yGevZricDN6h1a1a9mMztPls0Untr4vEunBdapdUC/7T/tqiViIyWdUenikdNgkEAKKVsNps6infupLcWJq3905zere7ba6AdG+L7wrb/6nZzDn+b7gFuqOdfmE9lLVpa00JO2/OXTmZBdj/bKpK/9YpdMmSLu3OeOSKgp09VAEMoBEQYEIxRmKDrui4wEfV9v9tsf//390Rh0++Iuu//8sO//9u/PX86Seoiy5u7Pmx4t+m6QCrwC/xyeHziPt7v73bb3hNJu2B59/Dm8ZefoeTpihFyDGJFVU+nRF6EqAtGEKfasbIhRg7wJhgShqdfng9DUQU4Zf358bmL/GHfbQMECCaWwY4pH8chlUzEQFGlFEWFKQfhLVz3MjAvAGBx9O3FrCi3vrgI063/Xg1dWZzgYp5rTLhGhq/Bq+24V0G6nWc7aJ3bC+C3IGfWcPywuiyL/fTtcq5mROAuPJb85+cn3VNQKCWL2nE8HbM8pXQSGc2KwChF1PrYgwmbhUBbxre77tt3918/7D7sN9sAW8aOpQNAVZU8DDLkQxHLagUoFTwmGbKp0fMgIiYAqhAINl3YbmMfmRVQJaV0SsPjKT2n/KT2aUyfzEYjMQBFBGAmAMxSakJRWKGO9VlcPQ5vCwGp7uoL8PMycqsH2iKEBfzDlG50KVLVA6ruJ9iYweFSmljAbTsuANQBKzit70ULe2E2tXtubjOvve5lry/u58XyWvq66PHqnVeDWvPAlao5i5IpiKIRxE2nfQdADMNoCkAUgM0YgUPchLjl/r4UGYWexvQpxUDbDe5OEI6yISPSqe5FzimlYy4aQkSIGBmNc5YxDVIMke6DGZgqKoJhQI6Buy5ETSPoiZH2HSuloHJ3D9991/9v//ibnz9+PA4fe4xfPWyfnrdDKjmngoSAxQvfrwEOAFdEpcJi3Vi7jPprb2zdbZrDPb25sGFm7hiJs5Z6jTi8f5eyql6hCjAVvS7O0RCA0NBT+/t/ntd3XotHgdqURi+E4F6UXjwSPEl39VGeIcUMbcqzs+DGph3wGL8Y42az6fs+zw2S+l0NITBFRE9vYrstVmdOvyGBQwiBSKozs8zNTMdxFJmuGc55zFXV1N3AyO3G1RBaz80DDp1TNbNajx1m3T8AVHcd/3Jyt5729lxGBhG7MHmTTpG6WlJKRUQLoCgZUB9FcimJvBaNi69E8nw0oL7fjqkMj0+G6B6wSlwLPEiR6tXmEIB4NkMjIiLVPHCGesl8o5r4wx5YQlPp5GwI5MoUALUpD6efHHnVBcRaz7EFaTN0xHIVPiuUvvDnr2gL/FPTLre/LgjtBfFogoHX3a7JvM4JZtur1Aa0YEPX7TaLXJFqfWB6a87M9kU7007msy/WI2u3YkFm2lVf7WHxQNthi9wu7/6ScTSbtn8955f3DSbGu5Ki84quEqx2kt72+z3AlJVK51ygZuaaL5g9F4jOHuNt51U3VNH1Yj9bnHDLp+bqehf4uX5zlab8V7cWSm3FaNZnFjAPM2uFs+/AF4z4t9QUXfbsC7nx660b1GruF3AFDTTWV5gB3PUCFFDIhFCJLJABym6zeftw9/DwsN/tHh4eHh4eCE9ecyIngyJlTAzwcHd3+PT4I9rx0DHz/f39/d1dGcafkIbxlPIwDAOjRUQxVcBuswFkAUVTwoABCYxMJdnpODJgzyEgxS4AMqKR6r7vIlCIDESikHMWGQ3oNJbvf/5UxpTfP3z1br/fbNRsFDlmPSbJYsZEIaoAGJuFW06jC4U4rASVBcLEa5zuAkO257gOqbp1Iy6w/VpYch7mXBvM08gtBTaYE7TeSpvbQsVi6Ku3Zn2R7YZAu+4Em1J1VbBZruuSBulcjCchsOKPx9NplKcsnXE6DYow5pJyPqWcS1F07yUKBBsdAsGu57vt9u2ue7OL7+4277ZdoBJVzExFhQOglZJSSoeiIlbECsCQ8TSUUcAAUjEARGCCDGBMXUfAWKhQKvlk+efn4cfD85PYSe3TWI6gFiMDp1yQiAMDw5gGxNhu13rPF23xa6XdazexFgjXILdA7wBX0PL6+fWflYOtEI6XZavq+a5FuHojapjSmkaEcFGmyImaXtbtbN8KXYgAYOCGRUVAaiq3tJOo86M5M4dbY2qmtXbNZ5ZIgQN7db7qaRM4BAqmRRGR0SicsvIpZYO3G2TEiD1hB8pdt4vdNgv9cpCnofzydBwVdm/f9ncfnnL+5ZCOj2kqJKCaczYzZo7d5jgMPJUIRylqaNwTM6WhFFVBAu6IWIulAoIZS77fPJAcDo8/kB1/+93bv/+7b9++u7P0pw2Lqg6q+2Af7vrD4TSO4yGNfb8LyEPKqZSppjlYyZm8mt4lO9LC2bRXiLACLGtktjbBnTMcp9MJALquq4xFTTaVUvLyd9AgX+dC6tAOfNW1YE3M/HudA+Rq1UGXdswMpkTZZw/mmqwFEV2VDrP6v4IsNDaTCiIzBE8703VdjFFExnEMIehc+KGI1DR9vnZny37++ed6Eyr75fuAs6THzO4gGmOPs5/nlCQGp8pFCgIA1aznZkZszIA2c3vTdjV1t2gOGVLVrus8WMgZx5rjh+fih/OtKeo/BUYDoNBvAoCaaikl5yynJ+88cmCGEBkApBQE8HhaJqAQKDAAighSyDk7ui40eeT6EISIhsAGMFnPbS5c3h6FGw9FhCggG8DkX+oLZIv16YmBQnC4KWpYlA3FFGXirZijqpiJyFk0VVUwMryCK69qqtrNXyD6loy2iLW9Qe0Hd01vNW0trm+p8vQnLTtcdNv+ueAn6itrf4x6zWuQN15aseo+LGaFcKYWa3ZhTZC0iUWpz6wnWaHaAXjxil+T9gjqBNZLa1e3IFQwx0DWB+qOVQGpHZeIcBYIFxu+YIwWtAYRacYkZoZz1cFKhuqNrsNVx06/sF7XO0mRc5Vwb1NyqZbquTtDu+Tqvn6rsDXNxVoX+7NYZn1lzQguAeOGCvxWW1+Qq+2zXbWnpqqeg249scX1gTl3KzWhEAvN960Xb82zfX5xH29Ne7HteO3p2uEC86zntsA/C6XARDusMBMFLDnlNFKEvuvv9/Hrd++++/bru/2WiO53+xhjztlMTJSQ0zCOozx9eiTA3/3u7/Z3m3//11/G00lL6rqAio+/PP7yy6ecijA9fvx0t+nv93c5p9PhYKI90/Px0IUIsQMoOec8jKUUVeiwU4XjMIIaqhWWgNhvIhpYKaZpR+G7d29CiP/54y+/fHrcbPcI+ngcc/7hMJweHu4Q8ZTkn//9v58KZOBc1KxQ7IEImUBPcC23ecVO7XFAg68WEmOLx9bM+pmdOCsuz/Va4RLLtd+3x1p5G5iZWJ1DWlp4XnxT4afitwXSq4Dd2nNw9h66BZ9r+K+9wbXL3jox+ZeuC64VlWF15fGSfEw7Q/2YDA1FYfz4TICllCwF0XLOaLCJ3AGw6L4L93d3b0H7GPuONwF3PT1sursN9VRM86ZnAEhJRLTruhzw6TgOJQhgEhmTFsViYSxyGgcnPYGQQ2AyRFFLoqSJTrk8F3kSfSL4JZVDltEIQ09K5LFFIEWlAGjHQS4wTz2FWxb+lgjWDbHZH63d7amHJqymRTXrw8KGJ3fq2UYWVDhckKG2LESdUuX8a0R65SetVn9YCfxwiZH8G+eHF1DX0pdaMs3/DS2UTK+tNgVxKtwLACYXdTPtBkt0/kzg/KI2eTuKCCKBqZmJcirGrKEYCkXamBlCh7SlEBU3jwN8ejoOyqdUDhkLhedH1ePPqeiQig0pBGVm8PxtJqyWpYzjyMyey0tNVRUUSGkDHSATB6CgSqiGBgFxE7Y2Ppf0fBfl6w9vfv/7N+/fYwiHmAtERKSgBADDXff8HMaRyGAop6zIQJvAhqQzLqhHXt2ZptOCKyGqtwhwi9oqGwEN7+hMCTV+FFcJWCveTyTqdhZEs6kMBvjJLaYEbHATxcNlptAWIdYbYmYqgmdmi6pAWG+CGTpe2+12iJjy+W7XD3VQnUvA1a0mmjO+NERaRPq+r6lNEBHmTITE1L7eWlB1ThJTv2FmuLy99XK2qJ9mJzFEVMI56+wMBog4M8QA4DZHVfW6ZWDJMQiTMSCgZ5lzrh0MMXIEIs+bOkouArkoEyiiiNjsrcqAcKGsJf/TbBnr4iGCM/khRDQyUDObzGtn2GjR1qwjzSpoSkTuONp23nBdbDc0qQtU1eK1FtnhJdPZPn+LxK6fXHS4nsmte/FCu0qwX5hSS+ZbhqmlE23Tayrz2sma2i1I3Xr0duZrUtQed21XsPp6njc09L9iV68+f7WfCi1wmX0HL8WPSsuIyGy6s44ZalK+03FARHcZrZGBAEDE/tgZjZuZmWvlvP9WhnxhLWssjYhXxOv/S7UFR7I4pgUv8sqr+l/U2hP8azqB1WWprcUG869mJgaCVroef/vth3/8w2/e3e92274L3AVmZtByOp4QcbfbgfTM8efv//jP//yvj5+efvvb33/71dff//AXMPrqw9dfff2hlPT8+PT4+HR4PJSUE9pwfP7+e/hz+U9Q7QKb6Hg6gWoAK6CglqUIGHexY9ZH9VLGajAUKKX0TAoAkACgi7HvYkAsRQ99zF34cTyBGkg5gn46HuOPv2SRISnEMCoUBMUIxIBsRmbGNzx+22/aBxb0pT7fimHt95VSrw/lart2InAVDNrbvT70ds5Xx11/roNeffdv22rPehkitPh1MSVERJ0kqCJoAMgoRAoEYMDUEd513T2HHeBdiPu++24fYqBIiCCBtMcUs5hI30dGyKJFJZuNaFksEQ+4yTmnDMdRxpRKAVM0JEJi5hiYA0RGJk2SLWWW/TGXp5w+qj0KPIMOiMbMQKxkZgQgRIUhoRgi36CA8KIGbf3KememX6sp+EUFcfv9As8splHhfA2EV+/FetAFJwMrGGvBzw0Zt7iCNTAHnNhBAECB69fMWoBrtq8VACprvh67chWt1lnEyBAIsypkQlTkQixjCWAI1It0KVku+TDKx8dTQh4Es1JGGZ6fTimJIcfQqzGXEIiZGQEATUXNFIFBgREBSMygmJkaiHaKpurzEBRggogKeaByipi/fr/7w+/efPfNPnQ5lyEGZA5EilmhI7nr87s7AIuPw89Pp5QUmDFSUg+2wioJXIGJa3hhcXWhcQRv1WD11OdjnmxWle9ZHNmC22snQ405d3FGflAODIu3VLWFjsr9tPa0dtzaMzN7XcFp2iKNNIhzYr9z7E3N6u5Do1dsUjUDIuSASAZgsZsqbRB7uRAD1Ml45dovOEOjJ3WoizI7O7PG2ULSTgNnpU69Wi7TEpEXkYRGVVlL27tBcnFepalLVm8ytrFnCK6x8B5QgZlCYGYycVMSMWEpRbSoKgJzDKpFcpachVGtgBEZqxaYyl0ZAqDh2QESYZLwV4qbFgDOf+LEYdfvEC9SlPqOFVPPMuASPxKpAhiBGdhcr8LQzwVWoL4A0TWSbfHmGiNfXUXb+a1fG2i/ULUs1ti2m7Tclj+t0W7bqn6hTqDV6Vzrf5rk1c2BawikJWl1gS0iqpd0jWHO+7BiJhaj3GoLvAeXo99c5ufaAqssmqqew2OvselnfGhnPU77wHa7henyXCwhpaFafuYXXVtU5shAU7+YU5avaRvqLMzU7KyIRXQYUABTNeTu6opes9X/Fe3XHVB7s1pYutrbYmkvn+yvm0kdd41eWjzzwmKvzueMwG/QVrhcOCJyYClJJKHK/f32737/3R/+7jekedOH8XQYBwUAEQOAvtuO4zAehz//+ft/+T/+QxX+4R/+4fe//8Px+PznP//53du7+/3d/f5ORLrQM3Vg+Pj4zFS6LhIhhdDHuN9uEHE4nu73+8gBQMfT6dOnT4ecS8kopaNuSMmQKXSjZDbdxLBVYJQuxo44xtBxlDsYx3sEs2MqpqWUVHJSSUmTahJAxGymEIwQ5+LSqsq03NgFmnoBMNbPY8M6wyWY3TqsBZJZQwJcoujFTBaKrdr5wrBzfvdFsL216pfp1GvaLTBu7S4V2xNRa0FtJxMkmJkXE1YxIAVGxACaAuA+xg+7zTf99oFpD7QN/HYDzMjMbhoIhAgmajlLEi2mQ4FkKikllVLk+STjmMcx5yQplVLUw4KIKDKFjkMADqBaUsmnYaQcnkv+peRPZs9mRyUlJmASBHWtdtXlg+DNnXzNNa/ncou2AsCtJOWtBbvtpMUML4xbm/+1ZgBukd32ZNvnWxGgnc+CzNWBqrG6fuN91qQyYF4XbCaKNUaifQEaIMNVqAysIN5mH33n8vGs6lY1AwQCMoCkqrlw6E5JPx0kxr5wTFkfn46nZIXCqHTKUgzHIqdchpSSGmHgLhoaM0eLnUWIREQuEcbIRMRMACoAZKRaAMCYVKxoAhREZNKOsGcZn398c4fffXjz22+3796FTcimA0AiIkY0s5RKMHjoGd7t+xg28dAxMTw/ZSlSAnIBAJy2X64pxtoTvUqxoPFptrm2e900P9r6QJXHrtoWzgd/WWlkbWtuD2sNahVwpRjilczI1U910Q/OwY0uDdZBadas+58i2Z+fpDuimue9lDKOY+h6mFUPVSozM3cr9Veq+wROabw9dg4azw33nj378YIRXHq2tB98OTR7kLYsOzG30Zs8Vymk2SWs7c3M1Mp89e1cfWF2yfOehQR5mg/lmV8H9Lo9ZsXMRNQLjUwYHybPXtj0vl4mVGab1H6GoIAugt3MSrL+rE1+nelMHX5WzvSIqAieU8taE6snVG3qqy7Aox1ujYjXD19FlLbiO1/oqj6whvkrPX95Vo2ry7w1n5Zg1017gSdY78Pilp1Pyr+5zP51dWLtIbYYph2XLh2lPrvPtw66Haj++tn1rkFlsfx1z+16cfYaqLJcPf3Wac19uR07MQV3GXXH79qbO65XBVYFv4tg3QbfVtFxPUOc1YXOn83VcW7txP8N24wGl2Lbr2hX4WEBny3krNE7TPzJZ5jF5fyvuarC7fuOAB49bwSBcL/dbLuYhlTSwAib3QYRS1ai8HQ4/eu//uvHH34+nU53+/s//N0/vX//4fn5+S9/+QsRPbx5Q4FP4wCgu7v9/u5+s9v+9NMvP338EQ0C8f2b+/12O7nhffVVSbkLARG3233st/v9U0pJVfWxHFQGLZazCjDiPZIRbvvICALKVhBw14V397uAAN1pGPNhTIioUkZFQOCIQxEFNARCNFAwgMtqL1ev/Jri0Cq8fMGNwArjXd3ndqAWa9ltpr+dxuvba2jWre/bfVjjsVeOsnjs6ouV3tmscFzsCVQBQGgKLTEQUyNAJWZjpLuu+3q//fZu93XfvSG8A+sARA+ETLNavAAoWFbTlJOJIBlhNjqlcRhzlvJ0kHEcpRjTVEiui/1uuzdQZgYGQcsiWWEQGIxTysdSHks5IIzEAu4jRSpGTpx9uUagtji6djdu0vdLIli3qxXwLnaSV45yDZ5fN7cN1FOuH9b9X53YYvTFmd4idi1hWry++L4+3IY81FfMLMyI8rxNZBeK0naABQlcr2rxsJmJeFKTaROtCZokREMzQEAQtVGESvl4DBwhqo1FPx3KKGaRs+Ivz89GXLKeUgbDvu+JII/HxImE1CJYB9gxexQUIPYAULIYqKqamLs0EFHBpKaEwgxshWGkku43p2/e3v/hN/tv3m1CSFoyoHYh5jHFvotAXSwAEgHDvutjCMTbbR9CsJ+fnlIx5IhcbFp+Fa0vjgqxPTwA8EzULau6ZjKs0ei3P1UGpTo+tWdx9Ziugs7iyPTSzX2CPGCDi2kgIlHwgDtrWus377ZBlxidf2JmugyA1rmEka+0hvC5K3xKifEswSIiAdKc9NJEVcSIAQlcJiHDUE0iMM+TzM4XYN7h6acyS6SL5pNsRT7/PkSqsmuVxtvUgnWrzUxEYh8vbq+eaxiaGTYyGyIyI2M/57MwVU1+ZcRPBzy8u4iaZUSkwAqKaIEwBrIMIoYqHtiwvJtGAFCgAACtVkzMoJ7B1TewmpEVKhJuni+2RCi+A2KgTY6NRVuzawvMtZzzqiFewUJ4W1BZEMKrfN5r2q237IZG+ZUUffHWlz7wWXbn6qxeJpxtu4XtX3hy+eVt2+bfpNE52v5MHemS6anqiQU1bdvT05OqFtO2hA8iumtonXbN4l1jBR1Z0ZxsphL+xb/YRCFWIvjrQPF/qrZeQguQLV2b9v8LV3wLVF7Yuiuo71fdygWueGEmCwrekuw05BAwcAApaJDHNJxOUNKYBpN8RBzH8XQcibox559++nnbx3fv3n34+pu7/VsRORyPh+OzmfSb2G+6EChnSWkIodvtdiEE45JzTnl4fkYC2O12zB0AIJOYBcTQxQd+2O02Hvua+9G6x+fD+JyzCCCiEimxkZgBcoaEAa2juN9F4m2/iR+fDz9qKcVOAknKoKbEwIEm1SqgilOXultXr/zVc1mf0cutvVaLI2g7uXVwC3KwGN0Zg6vTXnRST/wFcF6DaCv93sLbfw1uvArtC3TXIkYAKKCEhMhBARVFlEVJdb/t3u+33759eL+ND4w70E41qBYISiTIaioKWjSLDkUMcciaNVsgAxhSOo5DKeU4BEmZiPrtXR8jGnRd13ddLoUCK+NY8jHJqaTTkFORQTGpjcieHCIYARAaqFmZ414c6qKQZ7RYb+CX4tVbp2A2cee3ul1/eetYW1pz+X2T4L15a5Ec6CrYw+pY18tpYbV9Hm7ckTAHA4HheVSe7YS1CwBsAWs9/OKG1y/PpNjj+MEC0JTwAgBdalJUgqKWi/6czYaMRxCkk1ABzEM+DunpcOCuRzWFst1s7vcdqB31JJoMiEqXsSApWgAKiE6ewa0Vc9wjA8AoSSQDSkABKwAj2ols/O23D9++wYdNZlMqyoTEPSEkTAAQO95D32UporEAA5cthbAvRU8pp8cTGBRQs8n0hJP267z72iiHENEtpwigsyW27t5axwyNBNgqvHGOkUspLUCkHk1rxV3AXO2kfVdncQ4quBt5hsmz2yTAVLeSKASq/pBtt/6wV4aoXBQRLQTOVu+ec64xOczc9z0iesADniMSp7E8S4uIIpp/MBNVtWKz3Q9q+Ye6Op+eqsKsShTL7U2tc+OAAKZWTAT1bAxcLLayd4t7601VO640gABAYYpL9G1x9lGbpIWAlEQ8ZAkA1djMAFEFmDjEKbjRBWwiNhRGz7ZqAKpWwMBUjQjntDIAAEaeLkWLAoAC8KXlMIROTGxOglxX1HoVtkhOVXDB1IqaqRgpQpWSbCV7WKPEXaCkNYZaADNcQ7W3vpyPe55888zC/FU7sV/FsOJK5nmBqLcEo3XzeA0fsB6obk5L6eGSPVpc8MXDeMNIeGs+t76/RQgX0/4stb66wIvV3Ri6FQgBrOpxFqpZt845EnA04ddwHEdEBCZ3Vl90rrPjN8yqLph95CqIWpM0op1nXYLOyUvdz/zlffi/WXsBwts7/vq2AIkFi/OaDqdnbly7q4DagvGiK71MolZnxRRQJefSB77f3Q+H4z//7//t9PxLH1itRA5gNOaCEO7fvvv7v//Hd282qsDUieRSVDSN40kkd11gRiIgAjMpJQFi3HTffvvt6XT65ceffv7x++F4/O1vf7vZbLLIbrMTyVI8CTZF7tynBvsddOEpff/LpwNgZAxJh2yaxKQPyIYBCTiGuAuBGN5tY4cCKhijDGk4DGNWQHLrjAEgKBrA9C9AE2zSWuMXzEbLoV7d0vbXF3BL+2TbpzXs1vrUbKVSvDWfF2Bg8fzi19bV6+okF9+8Zo2Lga52VUnbgtO76kcGAJkLY2AERCR3AjNlsaiwodBzJKKsedAsUqDkHDs08IoBoiBGY9ZUMBUdxzykUVQV1cyKipm5dr+LcdOFTfQs8QEm3bEVxedsn4b8OORjLmPK2gUlz841lZJzfldRAakAOJMWfHGCRrYmDa8hyuvda9jAKw+0mwy3JbSrdGqNTOqfRNfjCVvF4q2x1phzTXf8KbgG4VdfCRVqscqEUx5FvXzHnCJyI9G2DywYuPqBIgHMHmWEBGgz4KqqGJA7IhpKsYQyai+mSqUAJi2ClFSOwwmYzQozxMCRATUx2r6j53EEQDUrolgMrYsBAkURQUUVAGCEc62SYl5wIoOewIZdyHd7e7fh774O73e46TKUEYjcTb+odn3vaRhjjAEppYKSFW3LpEDv7nenAsqHn475kDSY6eXyrcE7V8918WV7k+tBtpBauZkqitT0rYsznhnc60qjhbvvAp+23yBMnpbuzElELtvMblRLd1bvhJmrHayFeL3M+oVNFc76k6oy82azMbPj8ejFBqvDpE+4flm78tel5K7rzEzVrqZgnmbTlBb0HugylKt6k7a0rd7SEELXddU1331cYS5EgbPXvpl55ltrFI2l5FLUAw51zhNb51aM1NiABM3MgNgd2yY3NiIgQhMydG60i6CqhIYK5kjaOWMDAPUSsgg8/b+BNG3shJ6KE3GyV7Z+xdzk2sKGfWrByfuxCsDTI644uIKF1/R4Afn1p3pAa6Je/2wP92q3i4WvLTMXnd9gJW+R6sWIi2m83Oote5kPaOe/QO63ptp2u97V+lgFP2gQlLc2IV7b2wtLuz4cXHQCl6f/+rbYpWtYdCkStHimNvdFb9fi6hVXP9mcY7YaFdssbTjfDiKqndicoc6fXzNkdb3+ojtNhBCGYaid/M/Tbh3NC/O8eiUXwHPrmn8WEr4UVG5d7b+y2/WLLe2ABt4Wg3bcGRQZB46hi/H4fHx6+sFyfnPfv3vz9v3793d3DxRiTgoU7u4ecv4RivWbTsVyzs/Pj8NwfHhzB2jDcDylySINpp7xiwF3/WbYbj9+/PjLLz+5omG326WUnM4ROIllDAENcs79fpdAf34+xs2OEaBk1ZJYTTuDVCSJlS0ChIhooaQt2cM2atcPQJ9O2UyMJoLIaIgKZpWUGHYwn3JVoLR7stjA9QEtvl8j2M9emVv0pT1EvCaJ4e0Y8vbdNVVaDGFz/nP/s3XPW0NIhZ8FjXsliC7Q6YJvqSiu5ffaTRjYBCQYBCRCowwuhpWcSylDGjtEhZwtQx7TeEh0N7lcmf/LqUhK5Xg86hzYYiAhcOSeGGLkSBxDCMSoQsAU1NmkpHos5ZdxfBzHo8hJMGvIBAQWhEgxAKAgqBYUIxJUr4DFBmAUDVlhYFkfxOJYv2g/l3t7bZPX0HUeBa5MpvYwW1Ouz+TqEIuLsPBAsUaZ2/7bYOALRcDi+rQ74/9WPSX6P4iTm6OVpgD6PBWb61qsO9XLIMs6oRC6KUm3UbWxtHZDQCQyACimJiUTGzKGWHJ6HLKgUQgYO2ZUyWCKqFKGoZw2gfsuPp4SIgKogaAJBkNgZAJgFARPOIKEMLHdse9ETWTwZEixhw8fdr97v3/o6O2+3zFLyqYUKDIFEKPtPqVBiue6MUJDMyboOyoF73fbb3kr3Tb/+EkOqSieSqo7AA0DfQE6eN43vlZQdXGfF2dsl75J62PGOYTGPKHBbV3CGnXiipdyH26bnSSZ2a8JnkMBFXGqwcBNWZWUkojA5FXn+GiRcvdCHpvbUoTmuQIYzPJw/VAJj/vDAABgDWBbStcXIh/Oa5HSjt5OD+cwyLrnZtb3vZs9u65TVbdq9n3vPq5eO9Ga+M9xTFWIbZIqiUj2e1FHcUpflJGJUG32XuMQu35LBMmDsg3MUJEUgQjc0khIoGKmZgpIjFXWvQC/GkXp/zaOo2fwq/vgMEYNGNfdbEmX3Y7D8YN1lUH7yhrxWSPDrJFUi9Y/yw3cmsn6giye+XU9w+XVrhO+pZGta6xX7GVyhash6vyvfukY9erq8JL5qNOAlbpqsVfL/m+39QNXV/VFW72eUvvTGY/NPbdEERqw8UbNnrdX3r8sRWqC7/plNeg5H1OL66xP0LFf3YR2h93lwZsXtf/48eM4jun/4mlG17eyblqLe9fg+sr2pW+1NPHqr+3h+nO3ulrMue1wcWfx0leifXIcx/u7HW8LYjkdh0L5zd2bf/rH3zPa+7dvEHkYUuAYt/E4psfH5/0es+XT6SAFYtd9+PDhp59/OB6fj+NRsWNGQBvHKSR+s9mBaNd1b9++TSn98Y9//NOf/hRCuLu7UzA3kaOBqJaU50gHENIhp8cT7Dh3zGkYI5oFjZatwBhwzLuiEjZbBboH6RkedlvT8KkgPx59gTJRLkREQJtYsJZSNMC/3swXzsjbKsTjCi1YnOzVflr6cvV8F6c2IxKoqKviivZVXyIiAFxE69RudU5wcMY8MxN1a8l/TVvQkdbLzBqNdnslpyc7NAUUBbMAFAjIgNRMTHLJOZ8IgAVRi5WhjCcJiKgCRVCMVCGNOuacc95sNpE3hEKgMfKmjyEEtNT3fWC2VKQUQ8W5lPSYyuPx+PF0OhhkDso9siUSNgDAYMAKWBRNPQzXkJTQAEiIAFmBLzO6LWBjQTheprMLiLIm0ODqVn8W3hZjLfa/ne2a2rZ07erq1qtucRE0tGkR0bK+mNDcDkQMxRNfN7+CIRioABEBnktvx8CIaDptVh3VZk1znUS7BaMAIiMHM1O1ooWIKDAxq4fvgxEQMytRAZQ4jOM4PuViysCBSEtCVcgQYQpwLGiGJCqnZPt3f/f8/DiMx23UGAvQCS0z7yWdYtgTbcxQAJARiFT1Lv3RZIR82NDw/o5+8+bu6313F+Dd/V3O+bHkGPq+CwJmJQfGfDp2ZiIeE+VxcWg5bwN0ZFsb7iR9faf/CPrH/POf/wT/cb8fS1YwiB0geQKxEEIZC6ACIjJJBAFLVkzKPt0zk3Pefd+VkiSXvo8iUnJiZgMrxdOoYkoDIsyTATNKY1FVU8xpspUxk8JZVhTNbuJrj2aGDISpButUXRzAzcOANpVud2Yodh3NJQfdpSrnLKBISAYx8sz0YAiTLDGOAzMzh9loZsyEaArZtChQCB0yqFAuOlVEQBBTFUMGBYl92OzfUOBhGKxkZjaEomY1FygHjqDGKVsRFCUA2PQRgVRFpKhqCB7BmGKMOY8hBESvCWOAWES82D0AqJhIhtkz1kxCiEQokhy8Y2SAKV7IRTi33Tn8e8i+J4ooc1PVolPqX0RkVAAghr7rVLXvAiADExgBkgGIK38ZSlErJXTUdR0zM5ZSCmoGmYyKMRBiMDMkDsCllJLVgJE7MTOb9bITwihWZiw5y3Q1LaMhACgZqqmasiHDJJgT48QH1/vufssIkHXK90Uos7e5AaYxn+VDO2tEriKyBbKGFWsFK0zNTf2l9qe1ZnS+DlcsYHoZ8Nl2RZeGphZrL3r2b9rMybU3vCaS1aHbWencFqs+T9WMbnFOevHlxJpd04XDwhHdF4XQVgpZUKCFpATNCV7d/4s5N9SBPD52/vdi/vXtypnZ0gQN1yCnfm9mMtVlIQAyE09aDEhjyVGi6Nn9gTC4WtB7U1UtImEy7g0p1VE8kYCqmmqRDARoOHlTIwDN2k8EJLRZT0JIRARqqnO53eBJayIR7ff7t2/fbrfblFJKxWuHMkcrqV3ULT7j3HS5Gwu+Z/n4DZ/ICoHLi6DXn7/VGINvy7kf59IU3GXdj54AGTlyVC1nI+ocQY4A7E4iftZMACBgZmp5omtrw35lYtoN8fMAmOqmnh8+f8AqtSxeX1xzuBQkqhcMrO774sUqDKuqcU5wzPnxq/tdxPEP3354/+7h7X7PMRRVMYEuZMsqA7KEgMOQmSIRMgNA3u7hm2/f/OlPf/m3/+Nfvvrqqw8fPoTQqSoDg8FwGA7DUynlu+++e3j38Ocf/iyn/MsvPw3D8X/7X/+fgKAKpRQTrzgXEEBUT8fhcDhtekhJc5ACvSR6kvhJ4M66/TbuS7c9ho1hCGzlBMgYaEfw3Tbow6bL48fjKQMrR0USQzHVOXlJnMUebWr6tZiz5R5b4+Et/LN+YMHJtMfUnt1swDg7wbUzqf0sWFmwVQnZeYwGzBzLqpkRn2fSKkHq0has18t0od2QOr1b1AdnzUtdr3MgixEX4y5msjtFAJixmyQUAwOybdf9ZRwOz/iG32xoW3LRsqHNh+FpNKandHo6ngRKANyo3gFELds83HMX+5gRE1Hp2fq4PbAUKkYZeYh8KjLkrEVG0VNJp5ITIRghAIuQ2YNO9LSYFgALNhkpDKIoTpFsWhAyGwBQOV/Ai/U2x0etYHZ19+eKf3Wrz2cxn2a7q3SZQbBuLCJq437iqTRn1QAy15OS+iJPktFF8TZErPW9ofE789/NwOx8g+a5sZl6BmybOGRDdIPYRO9ExP3axnGcuH2sXJN7lmGo0RFmBqvEITBvQQtG66tLc2S/XRGdL1HtXBigZmmrCNR7mMwgqgZGfA6+V1VABDABI1XkqbybmTBBJDbTnAXUS00UsIgqAdQMFJUMrRRVTXkIULqA+0338LC5f9jv9puuQ0AlBlBAMgUB8ZWwO9XOa0eAM0IBQNf7dj31223o4sP9p8ePYUx2ylnKIEhsYIqaU08M7ClVSz6WQoBdCN02KIsIByTAYTjGGLfb3v09Kk70/anOhzO0uTb6LJ9XIoTnTG6ekfKi5GsDwRf2a5s0YVMFhVpLnWbhB2YtuCKAiisfmpLrkyeAd9WmP/Uh3LV1Rs1YCUaFHwbn9dXJWCkFMT49PaWUXC51IQsuyw8iXNR+UBnbe1L1FNXplGhaxcJ5jJyvPJsKueu6GKOHJtKcqovnNPH14lREbHO169pUFTniXJnQPfU54NngoKAINcCPKJiUVs4EAJcsXdSsqIqJ59NBA10dIoG1gPoZrrGCB82VwWtr7/t8dqYIIQTnNWvmJP+pHvqCAtUZrvHJejLX0Mj5p3bOVz9fPvllDO66h/VC2rEWz9Qnb211PYi6Y9DwRm2HUw83BIR2Eu2Wil2QvHbyq+v/ksXv1gG9AELtWBW3r/fnFijaJau3mO3L41p95tKegB6p3t4CmfxT/F0C9ECJPsRi53QvMId2xDhd1Tkp6IQMK4rD2WnfkXYXYtd1m83GsUcVCB0bPD4+Hg6H02lU1XEcx3G8ecBf2G7tD62y5LXPr9/60tkswfU28E+oo3Gla6l/de9fzqeyIvOf7biLabxw7z67inaIzwL5y9cKGiAn4nQaAvM333z19dvdt+/vu0g1I5GBop5rtJIBclejDwBgs9l888032+3+hx9+ePv27XfffffmzZsYe4ef0+n0//3//X8A4HA4+MaWAofDgYj++Z//+f379+/evQshDGUoY3aqB8SGhBQ4mAgZkmjJiGBiCmAn0SySs0gBjJ3ucgJmVLLYIQeKgQIDMuA5Pa6TG2wUTwuc2W5Ui+vWT76w+S0hW7RbxAIu82b/Otj4mzS7FFk/+/Aru237rBu7gEC4BO/1WDQJTYoAwIFUEMBdIdSKiIQQtCRn3nQ0IDpJSf4nIBCGEL5+9xDSiKKA2G17DmEkyGLKKGpjHk8lHUs55TyaZERBykWTFPVs8DDBPNE5+7o1rYpMC1RQafxiy1oq+KU47eqNhtedS1VMtwB/C/XhtWyfqleE1vUcsGlVQK3fNNA+meuoyai/IJG1ZzO7KExvl/oMnD2q7aJdMEM4M9/VkWZxb1+gDb59UG1ZIqpaAFVVQBbOr9NWWjMZQADQIiEEApGcUsmqDKYIwhRBDU0QgYCgJAUTyVYOFHC7oYf7zbu3u4e7ftsHppLzCM4fWBYl1wEpMpwLGYOqB3YZAERiRQIyK4YUug13XXd/fz++oVTy8zA+j6ehlFz0eBqHIQ1jQkZAZqIOCM1U0EwR1DQF2hARQHh4c7ffbH/++edhOAbnUbyMrGpKaZaymoOfN5ObAvR+O6YdcwXMCkcAgB9dPe754NjNgCHGqvGqH4ywZhtyEK/RA/VYaC7OjpPZcCJ+zjbFLp7hTbGuxcwQmMMkx+acncFqe64Cp18tF/DA1KVB/3ccUmusdo2I1ZjVuVyE+6DKnOfU5cMQ6SwGEzhL11hxzoJflfeq52pN9NreOs+rQ5PRkdEUETmgj55SsqKAgMA4hWWGouLiYitY2ixL1+vmXCkiApnqUj/aYrT2grc/NJ+mn3yL8BpCxKpDnVWjIQRs6I3jI5kR+oIOLZA4rIZo2wuEv+0TLjHa+q35ID4j8Lwwh7a12t+Xn7x6FgtcDJcZF7CxKkAD8P7zrRliY8xs96Ru/mL+V6f3V8jLy/ksplHvdfvYmbL8jZgzRKTzjilOTkTq+jIkcIS1OIg2Ox0BOkfLYtl0oqxave41hBCYQzfH2brKVkWLKEzuM5uu98jA7Xbr/uR+hUsR19mP4/j8/DwMw+k0+jcAEEJQuaWzfmnJ7Z92KUi/sp0BbNH5F7JPa2C7en/bx+ySPV3MBC89siqP+zJHUX/61eJ1nbbd5p7h8tou5nyeQ3MLYuQi+tX7d3/4wx/e7uOWVcqY8kDMOpWLFQRQN0yhljJNo3pm3d3dbbfbb775Kmf56aefPn369ObNm+12OwzD09PTn/70+PXXu91u1/f9eDxp/lPXbf7u7/7u8PR8PB6fn5/HcVTVPnbb7bbrurHoYSzHlJJagQBIxh1ySGVQlVJ0GMfjELdj2abc9z3qCZliD2yckYyiISkCECqAzMBHiDAFAl2QG7h25ddKw/Xziwc+iy7WUFTPqwIPNoqqdQ8zfvjMKC9PYz2Bq++u17Ve9WvuNa60ii+Pu26NRwy7MhgBEdQAtJTTkKQYB9QyZamwpMicAYEDMqsIUOj6bew2PQcGMbNsllU1RGYakoyST8NwSMMx52QymhVAY1JAASNmIAIkVTXCqnyv5GNhi1sDSbsVr8ESt9p1Uo5Xfm0n06I7M6NrWmOcsz+286+WwLpYuMxedjGN5q/1tKvH0zVwstldZaoo0cqlix1T1QCNOO5E1P+sAp63s+LHqLIv7fAL94C6KXrDBaW+ovMrE3sdTMBnib5EVS2mzExehgSntDSqKohQUghsMaoWSygAYpwEekRRIDY2M8tgYlpQpaOy7eObu/hwF/sOCItKMVPIxszAjAZgQoBIDASe5RImsWDCFwQYQlAkEAMTRUBC3vVd1/2/374R01NOhzSMRY4pfXx8/vj49NPPj4cxP58KI8R+03cxq41ZFVPsGFCR8M3b/Ye3b8ys6wLwruu6lNI4jszsQkEI7I4KvjEAYOoQOVVcbGFmArJGIGxhCAC8CtcCiRhMhrVqxQIAIxQRmH1PK6pSsJSSd+IQz3Od+hb7u1TmEBVj9D4Jg01p3FEVRAzRCCOyqRYRS6lok9bPDXotjLlcZO4OMwtIHuBXl+9KepjvpIgQBb8e4EVjZkNijLHrQ40CmqqvzjknUkpeoMxzE1ZRsAJzdfmoOzDpEYAYZ3upipkRX/o34tlfFwCrpDqOo4vEiMhNDOeELufYZSmlepFdPeX64WXScn6mYhabjg9mpwtEz2Sq8xkslfRmxnNWVbOlraa9++1n+xLtadtz+7xeBlvXnv9WSuE64oJyIy4ZoMUkb828PZSXj+ZqM7M2cfF5nauerg60oGRX+7/Ko7wwn6sH3dK2NTy0IIoXes3XNppfNzNQc4VFuwl+L8gAyZAMClQbOBmQgVVHWzNUO+s+/JsVrGqTOIqZu67bbrf7/X6323lcsSvvjsfj8XhydhwAZC486Bq3OrEkFy6jn20LOLw6z7bJNX3zrebo6IVxb72FlxL+Leiy1herUfZVhqblTjyjwUJAfRkCAW7pf/7G7eWr0R6HSO76+Nvffvf+/VsUZ4kF0TxxgrVX0sQAK7GrZMsvUdd1T09Pw3AUyU9PcDg8ffr06aeffvruu/sPHz7c3d0x8263Y2aRHGN89+G95JJScmroatnT6ZQwjKKjaPLAEgzGZoHVuqJZNA1JQzk959KnFMJJKRHHfotcSIAfhzQoCLABmd9uQiAHALh6eyuxbgH1s3C76KlFXItf2xtROZPFFb56a24d6K35tMD5GpL68kJeP9ytdnFZLiOi4doWvdwJIoLNGSWBQiARESlFBTQAsa+bQlBCAKTAQKRqBkR9LAB91zGZiJgWQRQFJXo8PY4lD+M4iBQwITJ3X+IAYOS1wYg8oaWZQeNRDNcEitdsyPp8P9uuEvGrj9XdvvX8AjksMANcLmRxiFdBq4X5NkTFmrzWfsGrz3DlnM0uLIRzIPFZfKsaE2csQ5VQnSutY89i+jn9Y513hbwqJV61crYLO2/EnDLkPMU5tYZ/X6xMiB0VDAFEEd0wqIhTjlMzBfCi3wonhG4KhiE0pGIGBtnMDEgEQNEyaEZTIttt9H6P9/uw6yFgMhEpwGESQRHJTADQiIAMoPgSAcDZYESMzIiUpRBCQEImBSQKxIzIrGgYMtBoURCKwePp/tPTm8en0/c/Pf7xzz//8pxyGZQoYIjEauN+uxmGwUDevfmm6+OPP/44luwoHsCPleYELVWFAGYCdhHnc0Z8k4UQvFhgWwmjPYsQuioQnqEWuYKm4hSCgrWGIhMQIiBDcK8qLblC5xoGKvmvGUHByJ08icgMAah6QpqBqqJhEWUuIhJjdC6qQrDzW2ZWrYKmkxA4mSL1QjFR/3XLXgiBKNSpVgtpBemGO8HZGJjdXDnlRoJQN7BKaDSHTFTlVqXrWYxg+sn9pZEuSZfv/8waMaLqZCB1m3CMMYbQDucn4i+klOhsj5mp4Fwk+Na53LqtF59tiZXwUvjBaQ/PF9wuofE1Yy2g5bOzXTMEFQtV+G8/eBDv65t9uSB069321xd6aHmX9svPTrWe+GefXM/wNVBxdSteeBheFPleOU+7FCzr/t96vdlhAYcBAHDvO0QCXHgizpcE/D8AQPXwWUVP8YVT+LQCMmCI7HTUCaVb/wBgv9/HGD1JTNWdpWF4OhxSSsMwDMPoCiwfcPZumHwQ3PIPK7S82IR1q3fELt2zvwgMYHUcay7ki7r67AQqVmzPF5t3PwuQLZu4mOorUdwL7QKtvXhBru7bAku0ACwl7e+2b9+9MRCV3BGFEP1hBPMoIzvHrVlVelZFbXU2jjF+/c0HVzeklGLHX339/t2Hr0VEc5GUt9vtw8PD8/PzMBzv7+/32zuXKl2b7JR0pJ4P42a3HywXZTTIaiCI5JnYTQ2KwpjlqEAkhCPH0hXgBEn1eciHVAqRIYqZAgKIx3qYCYAhBriG91omoaI7u0yy0m7jQsH3WcisOug1VlyACq6UFy/3vJhe++JrUOLVVmGsheeWwr4esS/Woqvw+M/Bc3X9VbMpNs1A2NwxWMVhFEGBVTV0EYHETIEA0BQELBt0d3eokqUAUQy9AByH4fHTp4+Ho4hklQJeUYAVUdGQwBQFzWcMYBOT6X5/q8telWjtqu1SSodX4Idfh+Je2MBFW8hK9d9WCdji8PYutBfkFgBfHRHc2WRu9WaZ2cQPzvPx1s6wjujiRrALncqFbq+ysHWKZiZ60YX32Fp1F8gxhLOLoA+wOMs6xSrO+heuqlQEBHOfSURTRPacVmqqYKgsKYFOocMhqkAWd4dXoMIECIKaUcYQoA/hbhPuNmHbWeTJ/xEJ2LkAPh8DIgKoiBFGF0fdd3Sy/xhgRkBWJFEVA0MkAiS8RxPQDLoB0chKcNf17zdheLP/7u39Nw/7//zh0/cfj59OeSjJALVXQgHLXbd98+a+lJJz7rqIiC4pVYsTIqaUkMjO+TN930Kt9u5OvqrFzJBmqR5ab9IJDkIIzJHnEn/nCg2BdbJ9GTTBNgpGNWyvUbOFSDTZ/NEMiABQvTD6BFHkIqW5hr6yQYiMU0ApAAAzp5Q8D0uRRHPWTc+gXfUiNVSP5roOvhsVlqQpy+FSqM38nKo7l+JFZQgAmKuTpXy+n4iTX+jsH3qO38NG3pt8QZtWLwLMPcPsLUDTKZxvENGkZ5UppyilkmFmAipe0CZp00RBdaKaOefIoWUNfUvwGg1+AbPU4XA+lRY11/XUpYnIlCwXzqtuh4PVKy83fDVr2CK49Z9th/CrCIBdSk313wXigpWQ3M5hQRXaL68ucK1Tm3u4vhuuYKgQstiKdT8tGFxM7HMK0Re+WextixZe7q3u23o+ZyCcn3wd/ExZXvzz5Dk400IyMJw9/eb/pj9tGpoMzMBzIhc9Z3JQE68xc3d3t9ls3AwIALvdztFmKeV4PA7DcDqdsnsRiJQi1WWgrpR5qnBYyXadAMA5X7nNyb6uNs/Qt97YW7vUMty32gVm+ELZ6uq1xRWVrzhT9SzqUHNHqkv8GZInXPSFOpdm/q+8++utew3UXUy1cbOCBoYRMXRhv9tst30Zx55ou4mgIppNCcAtgqSqBKqmoKBqtRzRWf1HFjtmRpupERHd3993XVRBxClgZFazWkppv99rETdQT/4vXVDVGPfZiEIwLA5nAgigZEoARNEAFcu0RMNPGUiVJWM6JrUh56KgxOq2JAScWESFG/u2Rk06pwBYoK+rD7/maFp00Y7b3otbCPzlU766lstxP4PrXmjt6Au8tyajLwzRIt61t+ELROeyNy8M7vuEYl6DJxhimU2H4s72ZEVUwVAYAJPoMaVjKcDIIRRJw5hPOX86nR6Ph8HAkJRRwAxAEFRVEdim8C8FYwNEJTfqyJUSwVf34bOg8kWtxVQX38N5k19GRNNkViqPCdfNurCK5ZwKzCobqQ8vwKDaGGZT0DmrrT81/2uIpip2jqpzDec551B1aqtAsoBAIgqV0Twv6fJDu9q6PEdJrV2lxlC9wEu1ODTnXJ+8JBuz3zCggZi5AkOIvPQ2CNhU9xoBfaN9Z7sIjIOWUpRUAhiZqI+giSHtuni/C/stb9gCCIFFpMDI00GiCiioJ5ZhIgMylRCjmZWSzIyQmAIRcRWTDBHZDbQIhmjbQEkKmVgAYylggaXf4b7nh83+7W7zzdu3f/rx8U8/fvrLx0/HU34KVNKpC/ju7UPfhVJS30cKXc4lpQTI211vZsQMiOMwNJqSKbmQH7m7GhL5WSRVBZwlCjsHCtaDZmbmqZZDWw7LLs8LANyJakoq7XCMYGaelDI6JHryBiJA1FmAcclpKvzhunFEzZOFc4pRnt0dG4u2uh+siKmCe2H5T26mq/DtX6oAzLRwThszrZfnMideTGwWDs/MRynFGv0TYItMdc57dL7SzNx1m9pzy2zVbGZVuva3mFl0ygczqVfmzL2llFxUwFTAS8ITBZnvRc3rQ0Ro5i7glY7C7Mu6JoHtDV/8eXHB61vzu0v6NEt6sKI9hOQCIVZPIedUVB0s18Pdal9KntcrWgzUSrY+vavtlbS2tqs4tH6zIBVwSRJgpRpcT/6V29UOOrnUNeO2x7/Y1TUBqzN85YifnU/775patxO4Sk1/Xas3FA3mALIVCUOtyTPPJzILXYjISFkLAKCZ+0TwnKeKI/d9v9vtttvtdrv1iqDuQz6O4+FwOB6P4zi6+UVVOXYw+79UlS0AEAW3pVfV0uSRbst05C+3xWkuwOzK83+jpDW32vqg6/Tan3AWCGM8F/KBmTtZYB61JVPb3pqrkPO3AmN4HWZYTOnqJvhU+8CbTdz0EWSMHSNilsTMxoBzTZ9pRDUD2G63cIkufOtcSkTEELjr9tUtGSyGEGLsj8fnv/zlL3/+8/elwLt3AxEZqu953/fM7F4nELeuchXN5mXdcKbsYDBV1grOTyLiyNHUICuUImDZEIiJWUuZ2AqoiNEQUbK2Z9ReeWjseBVLXwXd9SmvAaDFq+1j7XG8svPLqd4EpC+CsTWdggZUWmy5eGbRyQvQWDnndk++lI4sGQYPKjQxBKSAoGaihgjTT6lkCp0XqiACBMhqz8P4xx9+eLvbbGMYx/HT4flUyiia1CREAFCE2WQ0JbEB8FwYyh7E6BOYxa8ziph5zgXzAw0mXKzor6EsL7Mua5hsd74C9loHtyCOOPNvrUFvwWDginm7uvx2MtCopRqVpq1542pqcq6yGlEQMVS7nHdbx56MtnQueTdP6CJZTRVAq+spNAe5wJVmU40sM/MCbjUVSh20C6SqGUhdprHicSKe9L8udTK2EAekUgqgm1CIsmYVAzH2YhPIoAilY91G3G9pGzkQmAgoMkJAr26nYIqeclcBIxmjEZqxW5mmnOaegQBQEchAwBAwICAzILrBrZyKaKaIYROAIUtiFaLA1J3G0iFueLPrw8M23m/454+P/4oyDMNmu3+426uWcRyrcSxl6fv+/v7eDHPOXbf5ZJ8AlGiWE/AszM8KRZeUBGbdBjHPoiO2cEmeoZbZzAjYs1wi4pjljGqZYCbbk4svTX7eZ4o1czz+J10m5K1HXP0qB0mzqkPNYOaW3NznEDVFALoEeDg8DcPgQHg6nUpRT97g9fdERIpUWK92Th+u67oQoheE2G63PiuZE7QAQM45xHi+og1Uw+yWXYMJiSiE0Pd7bBT/9VbXtyo68BMBCibZ+UWYBOnJyFkFQimm4PeTYoz1EvmIbmOsGMQ717n0YquUqXjT17MmJ4hTiOCt1pJK9+ydqE5FZH7oYJVjqEmG6hWuAnlFVdYU7F4PaitZ9AV6dgsnYsNbtJRykZvqr2kv0PgW0V1F6PVhbZxh1qSrfWv6cJu6tf2fT/8yq2R7EO3M6x2/ldRkQdXaWa2n3fa82Ir1Pry8RReW6oaru23p8nzZ8+huqVADtNn6J9aU2UQ1EK0Q4korVdUiyEREkTnG2O+2+/1+s9ls91t/18N6n56e3Bh4Op2q34HfQdf1piKzKUzrTjJHV6FCW/LElpX6bizwVe3Woawtz3VWcOM+flFr6ft6PmdSMlOHzabzxYqIilS93suj1FvWAsb1ydjNrfibNG0cZNqBWuSDTRT9MB7R7mKcEsqJJBHZ7XYpi7C5KgM16wy9zhf5et1dyMyQLOfslmqzSHNS9xBC3+2HYTgcDj///MunT5+Y8cOHt3/4wx+GYYgx9ttNkXRKR0SMHPZ32wGBUQAUVRGFTA2KiZBnT4A5CAgMABVJQu9prommsoMAZESKSq4QBMTZ5ZUQiy0VZy05W5wdrmKMF3h1sbFwWXaofXGBNiveaLvFyvnMOXte3770ptSh20nC5drtUqJYwO0LmHbRbnXyqjkrem1AuDiyoKqGoOBqbK/8QMiQi/bk8qEZIhEV1VPKeTyN44kJSilqqMTFIOUxz3E5hqhmAOjlAVRdFCQ0MC0GYEUFpuo9dTKtyAQroLq1238NOn3Nnr++/4qy2hlWXsUaya3ll+Aa5EDDmsIlnNc/Kzc4xzeBzfkmfSdbXtE1nt5DrYkd3HiyMPepaozRzAyqg98cZ2UXhKTix/WFrG/58XsUHIVzDe5KCSqmqGYiNGMOsyGCaqqPuiOz9x2M4+jxYznnU8opa+DNptvsun6yxZZjsLGPtO1sE1VLQoqRYyQUsWFIgRGJuAumCMQxdjFERARDmg0+vpWRAyIaKJgRIwMrTnUuKHbTxLrICsamCACCJB0TEaClN7vuruv2ve57ue/5rsef73gs9G//9u+7bX+32z5++ng8DNvtTgzFbIesqqeTV89DItrv966cFhGPAHR44qbY2lmKQCaiarSZ5BOAGpVHTUyaH9A4jn5uE08Dk3NmtWuDZ9NRdUIVY8TZujgXaTARca0kzAIhTgn3is0esLMZ7VwfIufc972quEjseWhcdTGO4zAMc1dTElS6jKvMOU/hPYY2xQpSztmXE2NMKcHE2J0l2BijzWAsImql3rcYeQ5NnGbrjjc1eBca2k+zB2mNNdJa9QWnXDiqOgwnZuaAwzD4fnrZCXfP9FVvuljvTpX62Kt9zGKhmZWUfT9jHwJN2l+Z0+KbnKnygli27PiMbwAAQoxn/OJXeGUW0zlD1GwidlZ9uuPMDIiqZyTo716qnJYI9yqjv2b76oeKwioqbJ9v/5wJ/xUpDi6J5YJjWDMT3mTmX1uJt9WGtJ2331eU6AfUIvFb/VgbXbDeoGmYK6uwyz+hObt2krXzyrkuKFZr5W7/bc9rTbQWh7L4cOvX9t2WG2iHbld6sSWTthmrt5N/ewaYWUvlthBE7Pu+lJIBJqVSSv1mw8z39/e7u33Xdbvd7u7uzgiHYRiH5Ljx+fn5eDz6pXMKioiE7Do1FfP4G3d8qqoZAPAwaWx0AZ5Py8AQkYmroqeFllv72fquw4STodLHJYCY1aD99U9Xv7/VbCXG+9wWAnzbeStzOtJ2OlJrtNrsc1s7v1j7LODRpdq7Xu36ynljieB2Erur+2MrHtoaRUb7J8xSDdxAOHCJryaRA/XubjeejrsOSykBLMY4DIMBJSnMMYQAiJHDJkQDCd3++fn5hx9++Pjxo+qk/XRnHzPr+97JyjfffBMCq6qJSi7/+ec//ed//iei/eY3v/nNb37j6ehEs/u8+JyzlFSyRQiMjNpFNIUiYwBSNXeUQlA1Q6/ViGSgowJQBLQMBm4qIgMRO6OUyTBvk9/v9Qx2LVp7Pa5oj4xWTrmL5xd/tg+soRcuA9UqlLb4bTH6VTxZM/m1WN2/qdnC6xG0l6XCc9WVL1YEl+amxabVpJG1eT8LQXexh2tUHAK4Z4XPis7lQBlRich9GXXmCkIXiwpYIJxuNyEWQ0A6lEKmZqZIpqaGGDeTHGFmNmnh0RAMeEpWiADAGGq1q6xnKtDCQGVxF+d7a7G3FIj1XteDqAr9xUbNv0r9pv1Q+Zm2KwCggC32q7E/fl7+in8Jc+BfO9sKFTwpX84KxBbntMv071tFkvfZhjK5FaSy4j50NY04cp7goQ4MzQ0hoikgni4kV1WtGrgWQNtNaTfOh/chJp83PsdUTAuY7UXThjokIiKaghmRmRdnPh8VGeNc7YBjULAxeSg2EABCQSmaBjElUMK8i7jdUAyKOnYhEqIWSSagRB11YRO7zpANCZERghmqEJqaabepZszpyk15bXImJPRgLY8qVDOzAqTMikrTPbJAFANZMbQMQLtIkcI20q6nr9/uPp44EsfNdrvdHocRvBKjQuy2ZpKKsEz3oc2WeVarGREiIauVC7qI6tX8iBlmBbk2YS3OmE4KyJpiKAQCqt+4BGCzOqFqDTRMfLn3Uysx1NFnXeaZcamocIqA0zMCqgCTc1JVz4XjUutmsxmGwUHIS7Q7f4+I1UfL5thONxsSkvNqc+DfBITtQNAgFM8i65+nDC4xIuJmU+cP2tRXrIBtl3Yhx90tmnALp4y5pNH5npJGRATUUsqc/C0Ez/pD2HVd328CYXUob+mlC7QwM+sV0cQYa4KZup96iVIXGKRepPnDGcXYbAwEmOSNSSOL0K537scJEeIF83Sjnt6NduvXNbq/9XyLcK7SiavtFuFv9xwawgAzbJ83qgGDl8daE5vFil4559e0BZBDA7TtWOcvbx/Oep4v/LpuL5zUX9kW+0nViopzGXas9GiRNlOJJhosIq6E2u129w8P//AP/9h1HTK5WebnTx+fn5+fnp5gYatpqF67nAokcql8saZ07fSvkQcOTl3BEpBarNKu+m8LJ7+iLTAArOSixcPtZ5sDuccxV60/NVb9i37QO5+GaJGb/66XmRvWl2t9o/8HbN3VO77dkscQbgKijGSErkEDtsJeGSKdBtMUiLoQMvzkxSpDCO/evXvz5o3n5XYDdSnl06dP7qvssuLxafzpp5/GcXz//v233359f3/v2tIQqfotI05ACABSkkgxLWCGUBgQ0GCK6RI0oLkaFKACUAEEV42fK7QbOGdo2CigljvQbkWLM68+096s9Qm2j7VHf6u39ZRaet3yn3bpukJEt5Lyrilp/aVd6WLc9SSvIo0XVnEL67YOCPXJW+4AcO2qtlS7Hc5mQdQUa4jzGTXVcedX1AAQChhOijkEQHVfOmicIIymSG/zoQlQUWdvf0MyqDE7a9R3a12e2rAi+TaO/LzSa0qxeppVLK/fL+Bk8WK7yVdQ4urLit6tyccJlxDSYrP156vTWI+y/rflXqgJYqyrXg90rjrQombXNBBRzYhYfyr57B3n3184ql3qgeoyqp3QENoH2k4mQYXQzNAACMkIAAQU1HT2dGfAOWDPKYYLJAURmShEJiC2rDmTKpL1PW+3vN9wJDNNIkpIMKfvB6MiBsliHwGRMBAxAoGZKZpWIdkmUEBjIyJiJI93nBOfT8BlzAoIgGKiCuxVxDFwACtibB2Tl2lACrsd/Sa/3212w5iHVEzBzNKYUxHAMOaSc5bZ+uRX1Vfp0ikYeW4oVcXZ59PzphiEnLOrDerl9U3mee1mZztANRXKXEcI2yIKOLlQTnIjEYZY7WZ0meCL5yp/9XDbX72+POJkHkSsWH5SnIRAgJqz5jyabbSpE0hN6uq5CIQiTKWiJ1kOsDIfOecQzvmXZmi8wIx939epEkPXdW6crD7e8zwn7FCF6iroVlqyuN4+vcNpZDRXirt/rcFUaZCZY7fBwGBkCJOFU6csOKp1AhdZKHSukV3FYC3S7vACxcMlPj2jm0v80mooK0I6P98IjRPiCAEmdSC0B401LP2v48DWhLClx1fxYPvYZ1vb4ZrStyi1zme9vS+scbEDa2LcrmWNmtsnv2gf255rn+0EXr8/62kvfr31zdUh2jOCV2gKXjM9aDgAQEUFQFcVToEqtRTP7A43p1CmSfmKiF3fe3DgmNP4PA7DcBhOp9NpHMecsxsAZc7WzfOf2LAL9TMiSqNRXkiD4POC81uIqCpXIXBhMah3/6+8Vr+62YpDWn+oDS9do2HGWgBQU/YgojVKllvAgLO5lRpnk/V2/Y9s6/WukdX5Man7oKpqUgCAyETK83H4j//4j19++WW/7R/2OyOEII9jIqLvvvvu7du3u92uwh4ixhjHcYSfw9Pjx8dPJ2ba7/eSy4cPH77++uv9fl/zn0FBDjw7I2i1cqhq6DqmkTxRuZqhMCAzoefhQ0BPsARowAQqcwKC87UFATXCifn34HszAyB4UbBZwE97iBUbtwh8TcXwkgO+RWUWz6zfbc+oruvlm7XG1TMtuBBQ4QZ+s4bjXfx5yZzclIUWCHnh8QEzK3LLMvbCuhYjqpmBGExaYBfjzksGdbTq6WTRyEDMTAABjdAcIhAZjBAIVAAAQSc2wiYtJE91xefqVj6FOb6wntdn7/gtWrPAP+fPdv58C4QuIJNp8U3dtBZyrhLZekCtqaAdGmbLZ32xIsOFBbLt8+raW2BoGfLFoItdWoAlAASdQyDAT6cRBi6fXG5ZbRUQF2Dd4pH2xWokrT3ARS1E8KprBmiIbKoGYgWNweVvmDGLAQBkm9BYH0IMhAZkygaYc0DoiHqmjoBRwFTEE6KwGZihKZmhGYohAxAwcyRmRggTO8xJkoe9GqioGdBZazJtMZrZlCPOyLGoggIAAxIFMLSiFKOiERASi6hoMc1MtOs3upPD848fP34cjoMKlFKKQEqlBqg4UKqWnHMRVDNEBvTAP/dks+BSYmCOgRnNUCfTP8y5rc9agRlAsUomNDlYGsgcMAagboBCRC+gSZMvMgCckyOhIGIIk7TmAo8a0gSjTgYV0HWKZ6zqddS8kxpgM1W5AwFQkU5EXD/qAhUAqELlySZZkbhWk1dVBK0GtApdiHg6nXylTZ5lgBrp5wdKVu3sOZfZ+2LpptJ+rmjLR5z8oolKKeM4AsDxeNr2U+lFT5ADqPv9Hl10D12NIfStwDkssb1iPpDbNKaAYPSAxt5Dcepy1i+2qAFxSt7b/mTzxVyjjHrctsI+DSZdznPxp7fPYvbPthktXo9RWbeXR6yuNS2WX3fb/tpyD7oKlaxvtcRsPckFT1/x4YJONG992b6tV70A1PZ7bDS+i1ld3VW7xnAsiE07RO1n/e9iYz+7is+0Nh0U1g7bfC3TA1kn0W6qFWGWc356evrjn/7kEqCeVVTYdZ3M9LFqgrTx+1rPdl6jO1BciuWzbfACn6h5AYwr+9bujOdSBvgvzhHzUrsKz69kQG0WbNzRpJKhRW/rVm/HAmwWf54/rwDnhc5f2V5GNVdhtUJIYEQwMAFAZjY0cH8xppTSL798fH5Ov/n227/77bdkCqZf9VFVmWLXde5jwszb7f50OsXYqwIipwTjCNutvn+//f0//sZ9Z1SVCD1UPsloxUm8VY2qETKoIiMFd7xSAUOlgIhkWkgBAJZlWsgQAVBxitQFMwQyvrANkr9nCPNjV7Ci2VIqa/ewHuj6ZNvH1opXnMWq9Vv4otCoTbJTuI30Xm4LtAmXmG3xU52Sf65GG72WiWTxcNvPLeTzQlvQo4YWVDoIiGTO6trFWqjdZ0QAJKS5HpnijOaQUCd5bk4kCGB6UVyHz1fJABB4zlaIF/vWchF1JldJs17jB16gKTUT24IYXd3naV9W+9x2vgSARoaqH6pAWMl9VYrBql3FNueHsWZ7mS+UPzvb9abHpqFBZQlF7fTWEwillFqMbgFkZuekGf49EblLWHv9JsbiJlzihaEDGse2KXDLanSjqqLCpL5CQwREZABR4zApEeYVTLPNxYi4i10ficFMComyQkfUB9hG6oMimBQhsEAQux7Mxpy0aBcDh+2277abfZlKrLo7h5kKgkcI4gREioYAKsAAU+Gdyd4NyIToLjDMJmDJvbApMiOo5CIbDqJmOimNiTkSYuCQgokeDofnT88FiJmpGDMgE0kAmvKatiyI55KpsFXzWzJTE6FXSilqAoZV4PE56zkq8swpNvaos0aE0TPPkE7JRJAA1cxTT3neqFQyMyMR1JLNqr5AxyNldph2uS7NGfm8yn09z5yzexSIZjOZi91PRZkmxSeAT4/m9C2IGOaiC5P8jNImTa2rc2Pmgh1HxHEcHfxwtk77PlSTqaOmirKrRd2vdOO5ambmXj1uYK9e3dXVdtNFVUWyu7s7c29MIJNSrMx4rTBCVYFTTVRohohu83TqHjm4TD4MA86FN9TM61DTrCGrN7TOvMmnP2O6Bi0uEIRVY8gKcWm19sMSmdZ3X0Nf/3p27YUOzewWB72ec0shoMWBl1iubRUxwjXifQuzL6bRnlELt69d8GVbiJfrrtrhXmhXUfr6rVsk7ZWj1Cm11Oez7fJJf9GuBo9NWNq03VjHDzSX7UgpHYfT8XgqpQBT3/eImHMuIpZzkuI6IwUDM/dxAvZaOhMnNF2lSS24hJNpN+ysSaxzg/mOr2n/LZ7mvzpr6GfbC9NrW0uw4JLxutiWmUmqiieAKVvsZFOwpTtGxaj19bYr0+s6iNv8yWfagnFsv4Rr13mxOZvNxqmMJo2ITAAiZphGzbmoAiJEDjH0AcxAeLc5HA5jOpmZB0F4DGoIHQCAFVMwBTDY9Nvvvv3d24f7p8NzSmm73arqjz/+eDqdnD0hmhmHwF030Ytk8XhKuSgQYAAvPGamaKRes9PAC7UAgAIxGoC1aqM53MtmcCYAaBylLrbuV2/7Yv/XO38Vua1fvKWw+NKJXcVpLQPT0gs7V4GbJrk2WL2yVThfjLKORYdXa2cuB2hss+3sPP37PGq9yUQ0ObwDVhBQRE8mr6ioBghqiIamBq5mOC+8djmPQ/NZuEy4ZjWatb+eTLSewJf/ThLaNJv5gG4piO2WpbFpF9/cgKua72OhuViwEPWnq8zDC63FSC2vArOCpoWQ1mxQgdPfnaqIVjy7gLC6uIrpaBXhUPe0jtpCcLVK1RnTnITtgmLMhIFcV2AAakBTqBtLM+lpUHMB0TgiIQcmArCCUlAKAW66bhPCtuMuWICCoIBGRCmNgZGZQuSuCzEyM3s6TSJfnKipSUYVNAGLM4CImYGBqjIomheWBTW/MISEqtaBCAFOZeONjQxiEUCMRFhEpJggIjMAKaGpDkNKp2QGIQYAjspgEEMPlpOUGROpx9HhXNRYp1hVj9UP5vkww1lWacltAx/TcYcQXI1dDbzkoaWzRkHmm2BmoNZxCMTuP6JzTF0g9qSgOieLszkc1gUtM/NaETbn1SxD1ilq1szcHGfMOI4JEWEWWIjA03vWivCzy9CUF6de6apcmDaErcIbzpKbqrojKCKanY2liBi7rsp+Nhv6zCxGhkb8q0Jd7dBP4ZwgwcyZy5o4xOfwcP8QeYqr9PhAYogx1kwz1SMIJ03h0rhkM8fktkf3LEWDOvoUMDo/iYiIBM1xL/DFBAmNZgUaZNQSn/p8TULb3mtPXnt+5pIDWPC48IVtMe1FW3deCfACod8SCK8i3AvO8pKWtIR5gbjbrmzFEa6XcMauqzVeIQZfuHXrCdTv15DQTqad0tXzWnT4MqGqvV1dPsx7dfWBxQQ+AwNoazZ0TYb8S/e2SCm5Q7Z/C4R5zCGE6vzsvhUtwaI5Q4CteK/FxtZFWdP8HBc0u3a+nuoKhl8rxv/XtQWUtvO/CjMtDNcTwaZwdsuj2O1Eo/X1BamqM6mP/Qok80J7YatfuNGwuoCn0+njx4+kGSUFgC4yA5rhaUyq0Pe9qn76+BiZ3t7tH+52z8/PALDZbPq+R+Ccs4iVokTEFDab3bt3H0qGT58+MUUEHsfx4e5eVb//8Yc//vE/DofDfr9/++4BmdwbWVWBYRgm756Mu1RgLACIHDoxUgUVCHM+HptiCEmBDChgMZuC1D1Yx40RKmdNIgA0RV+uaKNa6F3gn8WWLrAxrABv8Xlx9RZn8VmowFXF85cbXhML2ymdyev8zVXNhbdKtV8gAYvV1VaNTqpLE9mtma8RuJm5NGeT5DDfNGj4g8ueCQ2AQNXTi6OnKUV0sDEEQ6CpcLcRoMIUwoqIAKQL8QxmBcQ8h3Btni+s65ZltaXF0AxqeuHM2GKnNXQt/lwfyrpdxeQ2531c/Hpr8usnr47VLqGOjpc8TC1CXu0Z9clKm9oRg9uX5gVfETQrw73YmsXFrvdqsZU1O2KFtDpd71lmS4s/T57Y1EQViJiIgoGylqLVToWIPDPrXehdVBPRYMoEMXKP1BFEso6gDxwCMhUOwGxFU+y6zbbbbbttF2MIYmUYjhy7KcjE/adBpoz6NJlApusCAABqhdD3DdE89M+8GF/QzIRMMIqZKGBg4hAJjJnIIpiKgqnJkMchDb98/PTj9z8cj0MXIoYexDIZA3qxBDMz8LSramZqBXHT5JTz4LqAiCEEAJ0y3c15QYtMGaQqkvKoszlDzJToVeesRMxcklSyjQBe6l7nApoMCIgJ1EWyEILCuYxJNW0h4jAMXdchYi0eqKo5ZzQFMCJGdO8ZqBdGNIMBM5px9SOtM8cpPDLO5s3JQ7XVuLhpTi/Db/xfLycNAGZTYXr/0/1tphsC5+iUOkoIVB+QueS0S2I10s8vy1S4bE7/47fr7v5e8uhZdrRkAOAwbwWiKIgKInFw0y7UgbyHyVd2XqbrjJlZ52IbsYuSSz1Ev7CEZJeKKGuMnPOezDhuwp6zSFl5rAbp2Aor1RfxkkxOyr5Xc64vE2xbSVZ2LanVK8dq25qnhEt82g5a9QIt9qv49Gr/tziGBdqsiLTVFLZPLslj88AL436Wwr3cydU+4XP80Asvroere/iaftZ93p6AzZB7EROLc0S02NlOZ2bIk+9osYkMjSm5yDE5k8da29cIJzygqtbUynN/b8+70VLG85yBW71E+8ACgFv2pf3m//S2gKgWDNbniJd8WMt11aVXHuWFSwRzNsWFWtk3TZsIiPr9gseq03g9Rnq5rftpUN/F2fmHnOekgiJgBiZkYIYhxL7f3u0fng+P4/Pz08enfb/p+61xJGIAyDmncQSgruv6fnt4PlLErus+vP+677aI/PHjxx9//Hn7u/en0+mHH3743//5X4YBfv/7h7//+7/f323N3ZxEkhR3tJkViBtEJWYwVCN3WAZCBZvygZkZeKEBAkDz+pxg07rNQM0d42bb4Hz05jGE1/OarK/tFYKyenKBeW7hE1vJVBXkFpDZvtLyxFfnvH7xgj5OnS+v5wLDL6bUQkt9YFZVX+lqfbMWS14Qsl8B5+txzYzmRCw4J3TEOdeTqoGB6RzGRWAMNpUqMa/8Q84fzOTrzDhPy57njABeOAjR47o9n+QL613MEy8tqIsVLQ4Lcap02KKRBZx8Ft8uENHVZ1rwW7SFAmIhKH4Rtj8TOEScmfa6UZON5PLJ+m5rYW6nGtr0te07OWdmpibeauI4AbFJDrnA++tVdV2smUucn5bLYnFQSkqpPRicJoPgaYvYSIkIkKahw+xFgojAvZQkZSDUQBqZdhw2TDaOBMhoTNAF5oCBFMm2m13fBe4CTmalQkRGoKqEri8hBGVmBmQ0C8FFQk9Z7v8SQFWImaLN0Y8AgFoCR2QUsbEIogSO3MU0lhj7GJkQGE0ka0qH0/jj999///2Px9O4e3ggIs0ZABD5+fkjcGDG4IUZRUVENDMHFzlmPsftdYqIItkLJ7jZ08zUBBDNzrmPXTIJIbhAslZRLG6j305tPFQREUQ1FwscY8wpV0B0mdBdJWvWE1s6+VSfchf2pnf7vj8NxSFSJKeUU0oGWusNwqVOpcqBahd+RCq5iUiEytg1wDn142/Vgr9mhmQuLSMiM3pQZYVwlwBztmoerBvisF3fdeETET25Tp4fm4q9NA6lamhgdF7XeSwf2gfCWS9e0yrIXDem67pRJvshzncKAbGGdDaEp1pFWkQA87/tXYaZAExMGy5RFRG5hXBRRfoqIP0N2xq1wSXCuUpZb7V2yTZrTBbDtQi3nUOLfNeTXBCMljLBQt5bCYQVGb5yFYuhF98snFLayQPcdEBcsxcvz2dxBFe/v/VifX5NmFuacmsC7Vs0f4O1SCsizZwTEdUYwsUczGvOMmtDUFNKFqjquaYHLtNit0t2+lS/r6oiBG7n3+wJqZ6zbkIDityUV2n7/z9LOrx6rezScr7+Fa6A0IWK6ozi2sOdOE8AAOZJOViRfIvWFsADADUbft3Jq/f6V2/Cyw/UQeuTX71/+O2333337YcAgiKmxYqoQuj6GHsE/umn/r8/Pj8+Pu53m6f9lu47EfEt2G63RCHn/Pj4GDiWUrzoV99vY4w5y9PT4b/9t5+cIrx7c7f97e7Dh3eeIQmZAExVgxWfuesxO3x7HFPXb4eSiqoaIxCRezoJTPIceaiYIliRSiOmvNIeFWaoKHDWdMzVOK5hFGzEuWu3YCkN1s9rPL9ACAvYuwqiV4+pmteqVmLu/CUX05aYzt9fwQMv99AuwZryjC+8BastuqDgjXx7q6vFr+eFWLXMz5LJrGQxm6wJDnUTVnTHeFXzGoVTMCGYv0lABuCOZIAIOOeinfh5F6DdTAeEhgBGLnzaLDNCK79dO741mVjT2VucLZ1TKl4c6C0kdpWWLSjIBSTfeDFPjP35CrREZD2N2/jqAh4qc3LxxNyV6pR2u5VCcRWf2b4SkMkT/pgIiJoJuXkXVUvRAogIxKDKiIRBLYFXNTBk5sDs5g5AQI7TYDLdNCICMKJJFASAPkYAcJZdS1EABOjnGh1mxkRiGoHEwBRVgUPcb7fDkEyUFANwz4EULYmUEvHPBBpYNz1vOuzYGFIE6O9CIOtC6TvomJmBGNBgxxgQOwxMEYgAWY1IzawgGaioiAIoM8RoRF0oImKKxIGAVMGTbJkZgYIaaEFVYtzEEGP88e4OSg4p94jvOgxURJ6SGt1tj6yFO8UNSBwGevqZH3/E//hlLN0DYn5KosNzUY+0TJHQ7xcBM0VEBN9FNTEhLwdyroRhw/+ft3/tluTGEUUxACQjMverXpJa3T1nrq+/+f//EtvL12vZx+N1zky3pCrVaz8yMyJIwB8QRCIYkVklzRxzSbsyIxkkCIIAQYDAdIqRAiWBkoVJCEJEgYhzIBZbBl4/b/gIEQ18JCIgIBTmMkxZw3iqUB7HrOeXiLEM8jKcCoGqdsxcCmgWApnv56QQEmKCGoQm58x5klLc1Ts1smWW0MWAc/hQ3vc3t/s7AkqEhRC7SBhFsIggxZIBKGKMXcJpmo6nUyLc7TQkaSKaY+Ecjy/H40vf95qyqZQigiGkEAIXoEAhhCwDi8QQUzWVixSiwGyXLWcfXQ3oMo5j3+9TCkQwTbrhAxFOKaq+Pcdl1XlBRJYu7fKUh9Oz4rkwUEAKvS6PoHbOcUDESPRyfC6F6zVIQgghBGS8v7+fpqnwVNcwaz+3u/10GiBrsLgAhaWw1NnUbeWsXlYLqrIAmjm0nnRAKYxQ4wYo70BEu/wwH4YAAhTVWhE0KoGIu6El9Zh5pWutGZxtu5WjefaETmVd8VZlf8YEpf5Hpk57NeeKgLR+1wJm1al4eOz1s235qu3L3lVnFVtx/nB3LcDmt5hluTfS5y4O/GKzEtb5msDhudbTFhARqRWNfhRrJFgbfl8Cl9N4+Jn1Ey3VqN50LW7f7xtfW1Brm0GgBs/S/cfshlJv30vJRURkHE8iBVkiloGLMKfUBwzAABIi0FhyKUUQ5iBVAQREXehJ7+wzqD8LCEaaAykzcy5ZQM8rCSHUMy6ZtUNErBcXG3wyTwELBKRgeo4q8LqmSP1SoO4CBASJ5gmVeRkCAAsHCrAidYSLhyPNRgRWK8XvNYveWqvAWA0GKJolCgoKk7AACwMjUeqm2dExMI8RIRFJmbouM0PJEBGwwO19f/fw5vPjy8cvLxMHBgyUNPZ0YA4oeZwQMVI4E5IoLDN29Ym6ypNbd2vWAauFjHpvaoPJaDruM/LNsBBrFGU+u0gGRMyFEZRVMgkjIwEFwsj8w+vbhDxN+f7uNg+nx+PX8XjYdf0JYA5CFqbH4yk9fkq3/T3e3NzciPA4Tl3HfUcEGCP1fXd4OYogBXr8+OXjxw/jOHx5+nL3EN69e/fw8BBD1/e9ZjOagLoQmbnghAhIUqAUyBzKbZDD1+fAY+AMGSl0mn5ZQAAJZSJgBk4AACgARSIhqdDPLlS9WQbOfGBGy3lD77HasC+P6k32a5q/p0PzDGroc+02ScuwC2sCsAoN8xHJnv6leiHVLbWtAIE5YcyGy7eGotCHRRhAo0xgBgE3ojX/X+MBvkXD67Jm2qJRwd3Dc22u52ZQUxAhIAIzVF4EqFl85igxAQAkCAQAAJb53mmkQIwyKW4RUQALA0iJfr515BT0IhJo38AgIiQCAMwXjfzbT7hKQ21/mZlmRsXyALeRdGse6P8ufrWfKueZJwLONaXWKctQf9HpOFbWMMwfkGEVukxbKgXCMp2gCkf1wmukbUqp5POQVQtQBEUXC0MNtwCASNH88ht0e+8ycWsPcTYmMJ+JsqFmcrE0Ng/zrPKapgUKzGkwEQiYMxQUka6LUhgKQC6naQIWAkaB/T4QhS7BvgsxlAjcEaYQg3BKYdf1XQopQIwUE6QQEkzneCEzVIJI4zgSJrPC1J30FFG4sDACsSByEWAEYGDsIsUuEUTgAlJGKeNYcEQECAGRgIkkYEDsCSckKcRFpjKd8vT56+kfXz+///x7zvuccxENmJmAdPuurrbAzFMeGOYJCoC57nrREjmKiLDze/TcTcB5yXNNeVm5wznErRazXNWWgZl1P0VEzNrvfOVDRBCCxIXxyiY6pqQpHIyfzmRWSaIsA4Eq5EYVnmnWgcyDIr1ZWpeoqmEpzMqPagylpg3U18dxrA9Jk1xC3SpZ2F/tpZSizmYiogY9tS8aisQFkoGa/VMXvPmLUs30iHUjaMSPqHHx8e7uzhBeSsl51GpjGRUr8/zqySvgMAzMrPxXRFA4pdT3vV9HWOUVurs69hex3R1uPmxW5VqmztnDYLs0lq4FKTYrfatfWRrH1r3/0dL0smZBl+obYu3JBrNaYripv9m4R0XDEv9Q8RYqj7cG/u8p9rp9bV5vKlwql/Dpnzdjv9LsptRYg73+rMVWq4h4C2ERjVNVmMVMT2Urm7PM914WRwCw3Ck2z6HuAzzkl1TlP1f+xPxeL82842oH1pC3h0R3QyKLQBBCKDV7rR5WCkMppSOMIb685Id7/PHN3ZvXD33f3+zvUrf/5/vPnx//DUFQaNn+OcOYYfv6cC5VuEK9l3ij/3DeHZ5Jur6y8p6wHhHx7u7uy5cvx64jgsevXw5Pj4QYET5++D1EFJFdf3Nzc/PmTZmm6d/+7d9++su7n3/++dWrV7vdnpmHYRjHLAjH47Dr90Tx06dPv/zyy8vL4eamf/3u7X/7336aI4uOo4laRDwej33f932f83g8PU/TRIm6Lk3DVEoR1vgMKES5cPWp2fD12OTA9uQKd10j+Tr+r79oS8z4ib1llNaA0ZBxwy3XXW+yQXTlEhI2x7hmTQp32LqyuCkf2/2Pe+435+vXPT1fXw5+vM3n9bvriWgGskbsNyd9zWSCC295CaRvrtY1wIYiXkXwuoSlhrE39Wl5m+nSML+/LMhv696KFrMne8xIPQg2qYc1VZva801a2etjvRlhTWhrc9oJZqa5D4QatFALEbG7hIaIPKsQABoov0LPpfi30BTQrQnjZcxfK8xqG5kTXApkLiwciChiBBIJXHhMkXa73a7rIT+GgH0HKUKQggAp0i6RZOxj2vWpjxFJQpAuhq5LaT4Z18MeREthrOmrpADohn4O+FEEmFFEgARVneaCGPq+E5QikqGgMBKEkGKkOE4YUUM5ZxJBjEQRoxSEglOGPOWvp/zr4+P/eH785fA85cjMUoOvsjAQSq72VbO9K8XUcL9EFFOo2khmxhijaNgbXBBNnQ0ymsCaDBCW3Mo0tOrDM1NYKaJ6kc9nWGkOp3pr0bteCJRUi55e+AVpYNika6dgyR6rSmO3EwEgpSRIeX54ToSgpK95L9SSZ2s+hLDb7UIIp9NJ3ZIBAGAiiurVIDVdir3CzKXMVjgDWyQoVqSetEm1b9gJTUqJKFrWCqPwEM42kBkAZADQeKTTNOWccx5tJTMWxNnNNYREGEWEhCwv9qzVE+12u5ubm6fHx7Ol14Xosu6aD5uy/Arj3mR2WH3xffvb1S5zzOYndFpNI5U34ZSV8N4s9spaxnzzlTWQlypvCsLrHclSbF+ZAl//EmCb0+qLIfaK7FzDLEuxvR7amflfjW3QtGCjtq/rafJ/G/PgN4tvUxmoYSCEoNxKqrOWlpyz6BX6mnYCSC3q7bbSIGwUwvq53ZjaW5sj9Tj0L6LTRX3hrfldt/CdWPKQYFUIYX2sgxsUUkeyERZwJdOZUHa77u7m9v/y19v7m/1+1+1SV0qJsRsZRQqh0ByRUBsvuBxag7o/Md4r5Qr92web980lf353Cdfvv38SkVcPD10XDy/PeRjv725KyX/96193+05E1Jnl5fn4/Pz8+++/n06njx8/Ho+n/X6/392ChnnoUs6cYvf169f/+I//+PT546tXD//yL//y+t3b2E1dl1QCEEYAmKZJ8qS3QnIR5gxCRFSyHMsgOU6jeuiAcMSgZ9+ACC5FWx2gO3T2+MHL+kbDoy4hdhPz63c92psKTQt+FhYkekGFgOXKRbcXbcisEUmbI21as0Y8YdThkH+y+de/2FDdGhhPh2uOuom3SxJqTdhrGK7jwU+Wn/3NIkvh0lRer/pNOL2VtWmkmc2mzmLNXt4YNKj26PUI94S0+dYVPMAah7hxhUQ/23psSMjTKqz0Rq7XHEopeplIrwTOnoOOJOJZvImQgKoT4CgJEQlrQnAgluy9AhZww4wgA5qZwbVj4KK7kNMWtbCDBCCGDEyMgsA8soQQKVCARHHf9fd3Nzc3N9PTEUACFoKCWCJIJIqIsQ+7Lu1Sl1IkLIii8RupWshDCIGqLyXJbrcLCIhYSiEQS2EMHFAEABGoZuTUgCggoOFkGEhipBiJUoqnAQoBYk6IgAWYNbdcRi4hT3w4Tr+/HH95fP51zL8DSJ6TEzDzJJPUS3G5lBg7tTsBBn/gDQCaa0tvrOmWJUYqZU4S4O+6zPhE1PpaoZqPzmRt6Nc7GzlnkVIJi8zGGEKwrIYa9fQwntRGR957BNkrSzJbFAsi5JyxHmBgjQ6qx+p+gyUWlTTnUkrs0m63Y8B8PI7jyAxIlHMmx6q0fnGmc0Qwk51fXcyszlTMLKWNegdAIpnqrrG2RoizSVC97ajmhW+Eiu8IALrd7qzsaU5NERF5fn5WPDMz4tljhAIRhWpvTCAkIshY1d2iMHRpxq0qljMGhPW+Ky9lDNZjJ1iFsYaVVFisxeYATz/r5W+9dL7RVMsQtaz7tYlrIPECzPPrBqpLUu16uVLZN7iGat3vZp1LpcXkt+xj1xvZ7LHBxlocGnpNksHSs+Wb72527WX/elKaKW5o4DoG1p2uu0ZE36SIbOpRRlQmjZR2EcFWum5/9bosI88Zdy7kNzMUebkmIlg9VV1vAKAnysB8DqumrXj9wUhivVK+iZ8/WqyLZtHhcitm45LlzrIyHCGSGmIEAABr0rIFlpi7Pr179+5vP//l5x+7EFBK4ZyFOUUCphhwDi0hDMSgGYlWV57+xHr/E2hpiL9d9cs7PPOgnR6FiD6Wxu3t7d///t/6Lj59fUwUX727v73ZHZ4ec84AnXp5AADcwf6mv729/fr4uRT+8uXL09PTfncbQoixu7m7LUUOL8fffvvt06dPRPjDDz/89NNP/c3++fCBmRFJg43pdS8VuOM4iQgFII3EkKdpyje3yRsTSARA/RvVpRxFwE864cJ/D9zphscPLFfoJiO9jvArFcDT56r9xlbjK2xKnDV4TcubbMez/bV4ahoXp7lh3ZFfGrXnn75NPwv+uX71lq71YH2DlzC8OTt+7W8OsDnNt0bWV0Iazr/uffOJB9tOuuFCELh1Ox5pfggNhtfSZG2laCjNBrt+t3myBun6KthAxSp0c/3AFizNnutPGuVxtgrWUkpRhz7bnDdt6oSSY9fzvSm1fAVACgBVLzqDa0E4gDTRISKaBcmN6rxmzGMNl3k/fLlIqfo/SpgDuGVgAgQERmFEpBAoZMDMMr663ec8cikEHIhSwD6GLmCf+j52fd/1XQwBkYoIA7JIQSQKoYtUo40gAiDOoS8BhTVW64y7ACKESECCgBgQAQMM4zGlkFKi1GuyhAl4moZ+NqAilIAxAFApnMeSJyxMX0/lt8eX//j69M/n48fj8CShG0cNPTJmzjmrJwfFyNNUhMOcvr3YLkedBpGNUmeGaHQQIobzwDAPJ+X1c9A8AKnHA3pNbqFn1jTxapmbZ0f1Ymb11VQtCwA0/4HQmfdVspvjYUZNYsQCLMBFT+NTSuLW+UyFMSpdijti2OTy1XMThFkVwspPz4Y+XRjTNORcTqeTnpVWjavSG7IIMjPqqUkt1r4SrA2KiAAiABCBWgL90Wn9ILKKInU8HmdNtRSWNkGiiKBdBNVXgui6JpfiAnkOCZDLFELQuyLM/Pz8rJ5Cfk259QiLzytJKUtz1nW29Z1lzS5bMJZPyoVw8/9VZRMenegGReK8Lq/w9DWW1uO6JFD/NPyXnuBqTq/3iCuFbd2sIUG/XhfATbP/VaVZ+A0YTXfnKTtT9WJDhjUXrogUnmpEKEE9pgEBZEYgRM0/BABy1hy3lbS1K5fUjVozEB2Cd003msSqEH4/AjfW9Qo/31/86rgkjr9ZaKtnlTiqaQBw36d3b97+y9//lk+/AIDkElFSiqkLmCUREgjVJL+ADAJIIkTCix3qf2awV8om27xSdMtg38BjT3wdiLG7u7t7/PL1n//85/3t/u3r13d3dw+3N+NpOB6PwzA8PDxoCsEQws3tjsIblYbjkF9eXr5+/coMt/d3h8NJGF5eXkRkv98jyePTF3h57vescdGYIcao4YtCCHkaY+xCQAAoPIlgjJBSZ6HlQphQkJm56C4C3bgMvcu7UluL8RJ+/pNztGZTa/68Sf+yVOnXdXQ4mztSe1FWaqF/Vy7rhP6VhoEAAK5cFr+JUlNUPGweDPvpOun+aXlkQ2jW4Gb7/usajU2zsOLq4BaVF2qb9bVs4vNP7CsuwcnOk3GNas/J12OxgXwT7Q3FIi5I0bXM5te9XgseqgaYhfteVeDdtJ43unNMZ1UICUnk7JRyXgPu6EtVAkTUpAW2uohI4KzXnhXFujRoGRhgk4ilbtgRVCbXDKmgnvECKIhEQZBAY9nc3aUplzLFQJAiRMI+pRRiohQJY8AQsEuBKIiwQJE8ElEijJFiDIgozCIl5wIJuhRVLWTUGLlF7XWOfTAiBkSKIaRAAVFKEY2BJohzEhZgCBKiBGAcM4wsJ8aXKf96HP79+fDr0+nTcToMMhWSPAlhkKQ3CROllHrQ4B81T7q6sGIMsZJXVflEg46mlKC6kurFUaMtEbEI6c0mRi2KphCqpW4YRqjiHBFBSNXG+UJqTWghMsfc66jzvIkI9KRAb7czs0ip04eI2HVdHkd1lTT9TXFO7kChjiXY+f04jpkNG5hXC17bt6utlgkUSVIX8qQLGy2mjqaUpNqjPrfLgaWgG9Q5JX2MhHiGqkZ/gd1up3c+Rc55OJi5ZNEnXKO91XbUv1QA5tZmDZNYs60AALOo0o48A6NBYne7XQo0juMwDKXGjxWZw8PoQvJLeKYEJd+VC3fDUBrJsWHZUzwvXUbXC7mZnbVAWnNJ/9YV2bOG/Eq1zV+NzDZ/3WTuTeXrLVwC2Br/Hsg3322gWleT1dQ3o/AwILbXURqZ17S2BqAR2GvA1tUuCc5msHCBJjfBW7eArpwD4y/kuiBJ5T/niyuIyMgWJ6Dpcd3OUmBf1LT9yalsbSjxgua52dp/vmyuWREhOMPTUP4aNkQUKboUEIERwgxgAKFZIBYhol3XxYA85TwcMudSSkAqpcjT03EqH3//jQAQBUkAWIRwCdj3j/qP4mcT7dbIxkxdOJhQioK6TTKM3d7edjGFEO7v7//y47u3b9++en3/+v4OBf77/+f/9euvv07T9NNPP6WUYpeIw83NDUIopUy7qes6ADocDsyQc356enp+4r6Hu7u7vu+HYTgMX6ffHmOMQhhjd3NzEyhpeIndbjfIkPOccwLmjVw5HJ9eDqdhGPRqDCNCtWnZQHC597vEzL8H4R6T1yusuzAK9K+bpQG2SNSmcrO7Ne/1+41LL1pNK81z+/X83I2padCLy80F1by4ZFmLFtYce82NPWbWGL6Ooua5lrUi3eChYdrXi8HsV5kflx2vrzHj5wW2CMm/1VRelyv4aYbmPSibiQCH8PUYryDB6sxjubxwmtm3HlNK+pMFZNbNM5e5C7J0r8s5EhHgM8zRsEkhBCRAlhpmRuzIk2zazoxPXBa4eTwI5iLI9QqjX2wGAa8iIpxxUWDOnUGkF5XmA8jCSIQEep8RgIGQAghPBJI6iqlPJJGgS7GPHUkIQYNBzuE0Q8RAMVLCuh0nmsdrLpQ2LlRRh5AZAAVRr0qwCq1AMaQAACBZWEAyEsYQY4xcJhRMEAIHGLkwDQVeGD+cht9P+Z+Px1+eXj4PPGSEnMJpQgggVIQFIVBIKZFmmU8RAACJarwWAGFmvXKAKLYwQ9CIl5OnLTOXMbOZxXyG92EYVIGxKTDXTarRMgE0SS2ISIyRaE6CJ+4a95INna/QaF/M2SucqEFEc7ZI7p7oFXg7y1eathOaaZoyz2cbpcw7ueDUFR0XyKziqkdrqHkXc35BjO7sE5BECoBQ5d+LxFZ6s8K8Ya2Lvk85swfeSF0hV2Pg2dk1dHppkFyp3FUAOlsFs5JJrDg8U2XlDn3fpy5Uf2AfqXVjH+m/NsylERuNOLnEv3TVNcvWM81qMl303oAhS3m8Bnj9oq/pecXm83VpxJU1vik/NgXGNwVb09omhJcAu16n6d23ucbP9aY25a4XRc2v6zPFBuH2wZbMFUQ1lObHshafAAu+BFenwEjuer/WSwghxEyZMCxCETowGg8F/9P5rLNBoEfCJmV6xwf/l3B7X4iIsOX7Clt+6VdQ9E0Ca0HampoaX6dtGQGYBc4XilV2EyKy7gcEBCAFAuAvXz5PwwFO/x5C2Hfp7u6OGTIPAqnv+xCBMtk5k4jwnBm13dLBH1QRr5c1NfpF4T94XAGAv1cpfosmi5b71InI3d1dDNhFenl54Ty+PH7tU/fp06fPn7+qt07qu9vb2xACk+R8PBwOzJxSf3d3F2MsAsz88vKix5VEECMBQAjhw5cviAhAt7e3XddhDCoXSinH4/Hx+WkYBnAGAaB+nGQc9YKQrvS67QZUl9FayBgAuEWkYm69U18v1TVXvIR/P7kepeYmA6vdfLP0cLnDbKBa8wFYUZE43WlT+myWZjnDklQuycFNnDSo8K017TRcy3NjXrk+rju6VKy1S0IWnULYtLnuYs3W1uUKqpvReaT58a4bbH7yCGyGsx77eiyexqSqRQ0vWnd3hcya0giR2ubGZkklgsiCI+krVUGAZSO6EM5yeX5SN6tnqPA8zGgABcQAKFBUQ7L5EBGxTafmnZxlW72kaM5v7upgqVnF/WmJB9SfNCxJJ1IN5HmeW9Rk4gIQmDlLHohSKSxpGA4hYpc0+zzTrM+ELnYaGVx36SIQYgwBu9gBAAVA0uiuMwZTiiEEAcg1MV0IgSgUYgQoxISIBMgckCjwdDyoz2QIUSQyc2HhMUNMFChCoIJjhsNUngW/ovzPl+dfDtM/n14+HXLJESWmgjwWvCGsREYphtQj4pgnRSxFVAdFEcnTNI5j3/d6guAitbDdeDSWbfY3ZlaTXXWUYo31ojkY6jAJAPRk0YxyiAhCeulcLxYqJM5HmUvJBQwGNDBkDsV5ju9iat7xeISqsBlVzOorga+pw1GXSIqzw5Xu4JgLLpNhGreilKZpEimIEQBY8jTVxQDGfWac+DUJAEQgNQqrnrhQvWVqfJaIRIplceQaMufl5UUtrub4Po8Cz16s3hCx282qoM3LbJwMosY8bYmrHVREuq7TOATTNE3DSW8Pzv6uMKNO7xDCporo1LZm4MZ91hxcfJmfzC/D5XKpKetuzTdhS7haa5u9XGe1l5pt+OkVodIAcL2y58iXwDO+J0sp8ieKsWi5ICC90Noc1Pq5r6zLvKl8HWAvsy+Bgcsd9rpBTzk2Zczsw6ave9yg9i0qQkQjaJVRRRhKnrgA6sEnA4AFNBYRorgehYK09LqZWZDnXf6VUlPttVhaaY+ba2dd1gP8zxSjW2vOFqn+7Mn7vIo1UDUI1jxj+qYgaOAvEUGUQAG4fP388cMvxx/vhoeHuzev3/39739HDFOWAvHu4d0/fvsYCIix1C2HbjAAZ87fDPxSucIr/hBCcKUKQktma9fEDZalMuXh/r7v4vHlCRFTSi8vz++ffmPmn376YbfbTSV/+fD169evKSXJpCHf9IRXpWFI3adPn0op+xuIAU+n09PT0263i5H+9V//T4fD4eXlJcZ40+9u7h6UAg+HQwiRKHz++uXx8VFE7u7u7u8fphyIuO/jxEkmFVKIiBrdXR2g8CrfaAjeI+E7GfIa1VeeG5fb5Jnr2WkgsR1II3TsoNkGtebVV9ipNdV43DQttD+5NdVg2FzKmxZsn8xL31EvkrzQ8bA1YDTCAhake01ZWj9fk0eDmevtwAo55w8VVHZXmYwGYLUw1wqqh6SRjA0/v1Sw8n8A0PNBg8TIaU2Qnjf+6bJuARdlDnLWDEHcMWVwmXJ1t68JIdRxUaDYMQsAhkAACHNYewDgaAuDmYsAIBPMe2INiE80e5EwMyALFP2qe1y9yzSbKeAsC+0OW8lzGAwF3VsO1dilHvBQV9ecOlsjjoCIiMZv6WJkZi4SYkCQPJUDjSL4RKfbXd/vO0oxBYgB+phSiFwYI2m/JJQ6CoRSpoGPzLzb7bqu41zsrIuI1LCDzolIRDjIzW6fuljGgUtJiRIh8NT1MSAiMAggEAJEQIQw5Ry6PhfImSekz6X8+9fH9+P4f/zy2+cMzxkBdpGSjChTSSVwQt3Zp5R2aRdj1BQUmluWiFQoQCC9auiUCk3iJ6oYmhNmiOfsvUospcwWuWr4Ej011Ab9qPu+zxOXUkqWGOdwLMsSbRLn44CIGsCz6zqvIIlYxBQspahep1NsJwF+ueacUx99DE/7kHMOCKnvETBX83VhzjkHIgvrota8w8sTUbXXSY4YdYUw62JAhSfnQetPU6Ya1AcRU+pTF2Zj43k1nq9oHo9HPaQxZ1FFXc65lDlThS0BNbRK9ak2SouRSimqlptFEYD7vheBcRwq3yERTCnd9DchBN0lsF4dPB3VfRREdJ3a+hKZM917nqvc65IAONdx/NeeGH70B8Z5m2J8R5aHWLAUP2u5aJsJqUduXJ2HwQG5KX7WDNe33EhN2NqIN+14SW+fvf6v1TYvzVv769Y07xA7m7O2dvZYXro/XZoaU/w8bM2+x+PZn6lrsSFY441I2xyX7ket2FseXZubgPXXSw/F5XtYk4eCbVGUzPfEgJGzp0lFRdVSGpet4sh15p6BEkaisJTiLHoWKQAAgmLteJx74G12GlOGoYvrWaoptNqONYgMQuejMRtdKUUc0s69Ly1mm/tFP0eeEnxhF6l/seJ44Y9kfW3OqU4NIKIwiMU3L4ISYsfMh5eXt28f8vEFRH7+6S83uzi9/H/vbvbTNH39/OXm5ibtboVJxdM0zf7+hZk0q2rFuMe8jdHzKxOL7Cwk4op/i6sQISJ2665B9SbFeh7lmyWiknMIAea7HTBN0+7uFgAC0el0IOBXr14Nx+MwDPv9PoTA5abrupfj4fn5+eH+dc4ZBHPOp9NpGAZE1KRN45ip8DAMt7e39/f3KrX7vh/H8ePvn0+Hg66RnB/HMf/rv4aQUiklpa7rUINslykL0E8//uXt27eZ4//4n/9OGKdpCqFngVLKNE1dH1FYA5XX7LIooPF+Fvj02F7zkDUH1qJXSDxLRJdutFnXUBMXr1kxLW8eiXOG8hRifz3fXksiv1Ksd+8c4Vtulr8O1vbfCwIT3uyCmSOdU9eKd+1ziuV6Fa+J0zfuQVK507RjaGyKXy9XyN6XdTse25vobcD28mi9HoUXEq1p1vflqcXq29f1wWtl0fNXkyxrJPsniPNeDhx7t9asO6vfCCl0osFLMdv2SBWCnorms7AlevUXDeRh22xdiXbRqenUMGOYJyK1GWiASZXykYLBcw4OiYjIAgiamH6xDus+RkQozNf6DBc6khij1CvI5DxFjaw9oM38GWtARAwJAECgZgCeYSsZBIQIQaKgCHDJMGA+QhYiOAUIGGOKIQhRQdQ87jpX4RwgJAAjABKgRbvRoqnqdE4LgOq6IQSJxCRTGVkyEVOIhXMp09tXrzlzzrmMBUCKqDG3gNAw8WHKL1yOgX59Pvy/f//4j8enz5OMkAB7yTScspwkMKX9zdPwJIyEESFkLpCr52c425R0JpkBUTSakG15bFN+qIIh8jklIGj+ccdM1cRHLuq6WQh1XpRxl5qeq2ETIkXVIVsAZqicpkk1ExHRY1FTCM+USoKCuBLYAKzundZpjHG/39/d3f36z19c77pOyGjJlrpeB7UrE16d0xerBywwZ8/ADcmqvOkhhUCZj0LOFktTJrO62jYrXyfFNHav/hkHqUumMJ8xzKwp5kMpmuS6MNvVphgopZS61M0TXc2JBthSAAvIgm2BO1GbP1e9Dhy/hrqfnhmHiSKNadR4h+pP6j0ubSNrFWPdl01K87wpS8L7xgG/stozG7nqQ3vlecNYbexXZAYsjzDWXdgYr49iExUGRlPHP9wEzLAnW1r0nyu+d1kq1ZeG1sy7EUOzsUAn2nl5eA/1wMgLC09U17HqCXt+AgD17BKQRTadXc8CHi4rz5vjXRO2RxfWfepMVSWDCyZu/gLr+W0W8mbXftRXasJqVa4XS4PbTfoXAEBGtQSCuuCTiAjIMAxd14UYhbnr4l/+8tO//svfUsTjlxdEyeP05cuX5+cD0peB6T9++2xQ6W1/AEChwpNlmv6e4pF8heyvkOt3FkT0p2AmhhCRWXI+Z8Hd77qbm10IOA0D5xwT7dIOUYZheP/b708vz8zcdR0APB2OMUYeym63S6lnZr1XkVLq97u///3vfd/vdjuAOc/1NE39fv/Lf/zz48evAHB72weK79+/n6Yp9bs3b95UUMP9/SsAKDn/+uv72/u301SYRRgLFKmCjyWryyhopllGQGFpWfd3Imf9UPct6JRAz1obXgpb0uE6GH9iThve3miVHp41GOvleWWlwIog/eK6zlu+E/metW62Rlv5DzeLVJ3fADP8yIVgPH+oNPi8jq5LgvWSXN5EgjUFAJY77Q9B6+Gx5x45sJxT/+ESXcGWHGwG4sEAAN3/rqXJld1Ow+EREeqRfbCE5DUx/WzwOQ9VBEBM3zvjwk0AEWmcFY8R+7A+vAlOA/QHQtYgVE6qTzTFBTOIqLF2HoOI6FyKACKBQGGADAeUPBZMOXYh9V1EEgpF8PZmBwVAOIYQkAkLSAkYSoXNjDmw1M41jIcIhBBDwJAiA3ApABJioEga8vH5cBTBACGERLGPQjlznopMIpKesvzj+PLr6fjvXx9/fXx6HjlDQggkKBnLlKeJCyamMpWMiCEGDPNBKQYyMkJEBgjuvEoDZnpvJR2OJi5HRMCA7jJuDBHgrJ+bxtJ1nZ99rHn/UuzQpVP3OqFNq11d86Q28awQhjjfcC3uymIkYAAR6Ha74LQ7nl0lZRZL7sQ9hLOJsu6W1BUZLco21fXQd8mbXyrdnpNhonNmrjoRImLfdb5HhWHKk2Gs1OQQWiHnSQT98A17Sp/WjnatJzGOO2Q9qbHDIUQJoaNqqp04x5h04DF2XdqllAKE0+mUcy48+QPR9a76rBouTzpF5qj6sBK9zdeGWctyg97wKZyTZ3o+sDj/W8B2+Yx2Xay7hsyu128qy4VN4RW5aL00/Kph6+uBNMj8ZkebNdf11wK4QSwsR70JxubD/3zxMF/qwoN0XpiX4Wlo47rA2xSoiAjLKG3quzj/5NCpHiizxwIi6hU4nVm7IbYC1ZDvJ8KBx4iz+w0AoPq3zDcTFq8DCCJQCLIMaWO0x9IihIga7dbjFrbm4hIF4irFHFTleYOqcbEuDLE6ttmgWi2EAARCAJxSQinTNASCV3f3Dw93kvPu7dtAdDqdypTHcSycgXa73Y0uVcKAFIksAexGksMVws9fm+m4QmZnRF1eoLK1c3VPKsKNFdddjfk7pJRCwGE4BoRSSiDoUpen8cOHD7/++uuXL88A8Pr1w2538+Xp8dPH435P/+2vf3t4eOi6zu7zE1Hqu77vVbKUMuUsRJBS2N/07979eDiccs53dw+vXr0iouPxuNvtxnGs57NYfUloOL08fn0+HgdbNSKiNgcpxXIzi+26FhFHv6tcWBRgw2lW9Fk8OTyLUwMaxigrnwhwxH+d7TelYZue519ZNVbTrzVcbsa2cpOA4cUL02YgfozrrhuENETueUjT+3WWuznYhm9/J0o3SzNN139qeL59zjWd2IKxryjkO8ZybdQNVOv217/61jzxr7vY7NGf/a3f9VMsIojCfI5jv4mrBsLG8kw0ZwXgencMAPI4aVryWSGEhSSQc0zRyumUK2NVLiuKN+DglYOT/2lhuapBOGzw8yZbb3mgiLEnEVGrIwIgaqRwCiRIRXjETgBToTAJDgWBEENPgSlJGSNQjDEFEB4BOCBSSnbv0TvaxdjVgcye9MxQiiBDAQFAAgIMgAEIMMFQhCgWiIidcBAOp2kaB9nx7jmX/3g8/NvXr78Mx9+H00smTLvpZYIxY2aCSEAYaIIySjnPE6GoB2o1xioazzZS1UwkgxABIJJu/JmZhSkAEmoklRR7O42LxFWRriIWEeeoRGiasFnD1OUAq35oJ9beKqUPjXBt+kopgIyYrEER0XyEuqRLKX1VCNXqbUZFZpYa9E97GYbhcAgqGlUT1gyYREQUkCilRIgKjMFwtg2qNdLpgVB3d+iiCqkHjpHlPOAsIc4vKti2dE3xa9iQ3zB5nmUmXIDzMYrNQr17eS5E1HWdGipDSHqEk3NWAW9pDHVzwMy5uvbNYmipRzUspmFYS17TSpFGTm+WefiLytuy3LOC5sP1xv3XK6/YyYV/S1YnmusK7VgubwFliVv/a/PWJbF05ev6lbVIWD9fv9LUvyKMr5fNWVtjD90OabOdTfG5hsqeeIu6H1HjsrtsoZIugCqDcD4YaSuLO81EMGllkOiINjy+rEFjNX8IpZcqI6KsXHxxa6d4iT6vl0t1/LrYpHk/WXKBqpXn6MmtYx/IiBE7jcZc8lSEKUDJU5nG+/0tEcZQ/UEYJwkZn4RRGBmZcN6MIGLEeCFb8cWRXqK3NX/7oyui7QvPn7Gep+s82n0ZIrq9u6GAkXC360DKNE2Hw+HDhw/v3z+/ft397W//8sMPPwDF07/9G9Hj7e3d2zfv7u/v9dJ+TEHvE2bWS+YT8+wtBgAqvG53D29f/fj08ng8DLe3+aeffnp4eNDeX07HaZrGMR+Px8PhIIw559CFaSwiGAIJpcJ1F4SzxzWcNV3C2UPrXPBbuoFH75oXWVnP0SUe0iyE9SpYg+dfvAQnu8tgHhLf3TfhvALG5teqY7dwrt9ai+/Nlr3I20SgdWSnupdga0qzwbPt3CWL1jfLJvtaY9U+rBlgU5qJbobWfL5OFf7hukdj/use10Nr9hvrOZVVTM1m4pSTbMKwJhU7vzALxLo7w/M8iuoBas/9kM8uo8CsN8MJFthhZoaaKg0C8yTu0Mj7aGFNam/AsUv8DY4obQKk6h4GepapaqQWfB+ZWbMAMjMjE2EQ3ffDUYg5YAEZuchx1jB34eVwIi67GHaYQiCgSFBCQAx9CLPaoNqypWufMeKsUgCAQogIiITELEMBAgRKp3FEQQYcxpwzM4TMkAsNh+HXLx//+2+/fBiPcrsvtJvymF/G4WWMORBQjAAYCmIhAAyBAiIKAi1XXQhBNMlxJSBVVVSN8Z6fqi91XScWWpOSLWOUSQOoiPO2r6PW0C/Fk07Oc1RSb8WyiRbn1OTX4bxpi5jSTlUs1WQQMYT5/oymSbB5l6qn5ZxLyQBQkIxS9fkwDHpFMJcMRAAYY+z7PoQ01rAuenlyAKmRb2awpQ52Dn5TiiqSiBExpJT0NCRnTikggvZ4Zq8QCk/eURuWseObtUdEpijKMvMEV49T/66IIIqvrNMqBLvdrt4EmCc9D9k2AQbD7KFUCiIWnQ4B5FnlhcuSxvMvPx0N69FCznyx5s0VV9uHar5rf/SzicPtlr9D0fLtrxv//ha0kL9/6/h1w8Q3hdDmuK4IEl8Hl1PWVF6/4vG5CcOlLcV3FlrfQXV62p8ojXxqpCA4rPr6JrcuKfa4UggbN+Y1GAJz+kEzDRIRQDEY7ADUr4sG1Et4aKSsffVBevzUCM/Rue2aivJkIlr3jIiy/Nq0BhdW/RVsnFsWG/p3qZpa9KiL6j4DlakBAsA4jigcQ+hS7GJCxD4FhpIzl5xvYgoUc5meHl9++/UDAIkwCwgUZiHQ5E7E37JQNQu8WUFWZ5PziFiinov4aRbm+oMeYWP1LYKz7yjnnB8fv3A+dV2HiM+PT09fvupZ508/3f30088///xzSv3hNFTvDRzHUY0hIsJFJr2KUXKMkTkbTyilqC365ubu55/jzePNr+9/+/Txi96zSCkycx9T3/eqgr68vPR9f3//kGUXwgsAyHxlBgtL4YkIxDgTOmRW9Gwyru+ZEf1rl7XMuQadjdrY7CbaPc7P+1XX/kx4F7bRTWUt7O7QwhYjta4vcexL41XTxZWtgvXVNLvm2NcR7lnNJYZ/SXxcafP7J/o7G7zeS8vzLxwK2Ge3fWrlr0kWWE6ouC0NXHAJuDTLlzBxRTQ0HxqO4V80eqblZdqmZd+mxgReT7cRczOQNY1pbEXEOfkCIoJDYzy/MCumwrLIKMDMgtVGCcgsZkL01TyuvZ1QlqjZxKM9R0SGDCKIAbCgpiOSIggiCMAF1LOeMAACSJEMVIh4kiJ5mEz5Jui6HimEVAEhlDm3hLEe80sEgGmaUDPCYexSdw5VEuc0D8yZyzRlQQTB8FJALYSHws+n6TCOh9N0Oo2/PD8/HY+fX4YMIU2JWfi5lKFESF2XYugKyDDl0zRBDN2uDxAMJ2Q3RAEQgJwvK1TrXIrBaN3wzzUKn2pihOdptSHbK1pCCADiQxIpGMfjESqH1bCZKp+sNXQF3MIjotSFvu99GBXEc0gS7QVKEaed+oMK/apxU0yr1AmNMabYCRKG0Pd9jB1XfVLmD+fFo/JS7wGKsMKDOIemEEaiqPNbigzDoOIq59lubpo2l/bmcRVmi/VmwM/x1peBVU2Ke2+ZysvIFDwdbNd1QhBCVA/SUub8FTxxnTaoK/Higmok67lHre9cFDxIdkcLVvzXN3ipuB7bpd2w7LXo/Wab9vWKoPKc0TBszz2W7ImBBEue65lyQ+cNML6jTWCs2hoPl4bp4bzELdfV1pB8D4avl4aKNoG88sQ30ogoWSl4HkV+aowB6pmdF3tQ2c5aiDbroplTX8tBIDVey+KwUhOj2wv2kx3ewYqEYEVdAKC8BdyC0vpdihhTSslOzQ6HQ12M7bqTrQ2ToaKZArm6t/M/+V7W84WIFzZRWtkrXaECjSIwTVMKEon6lFIKKByQcpEUEwAdT+OnT+8/fv76MpTfPz1ViQciKMICQBIAuWUoF8rm6l6vXFspcJlifU1Y0ZV9kCWdUE35ZYSnMRuenj+PpzRNU0DKOZdp2u12b968effuXdftRPB4OI25pNQHSk/Ph8+fv8YY7+/vleZ5YpaiDiN25MWcS5FSqIDsu77r+q7rnp5ePn7+/Zd//vb23fDu3RtdOBRmd5uU0rt3737++ed//vqcUgdCdWURESIhcxYfKnYupN5ShtvrRNWgDhyZXcG5F4uXXoc1TW4dAvrK3y9i1vz50nD8Q3Dk5CFHRKR2v968eEUIXofcfvJjbyh8k/XZXnGzuzXqmijTtk/7z4iVNTfzcG5yMC+FrY7H3lpINUelnvmLnC3el5APqynYZIkeeL8oTEI1dGt7raYdX9xmeHtHoQu2gRyXfs7r7cF6jGaZm6WqEzTnkETADMyArIoTukih4tpyI9keoTdD+dUCSwOLkZf/HEJAEkZh1gR9ghhk3soCY8UUCWq0QykcdxlgEJaJRfhwkgBHHqZwfx/6LjNPORNgihwIEASr+Uvh0WguIBxDhzXEiKKMYiAi0fjZICIyFSiFJ2HGInE3Qng6TR++HD4+Hr4epq9Ph6eXw8ebFACp6wMTH2Q6jmGCLu0wdUxYCCYu45QZShAgLiwRAJEq9SBiCHHzDIyEApjiZ7ilGk7Dnks166HetnT5Fa3BaTrfheOa8yDnXPLcpk0lVIXB92j8BW2XhuyxV8lX7DqiOogGmFeMPqT5PuS8okxFxHqpr5QSY9zd7Lu0G3PJlbS6rjscDpYjpCE2Fs2CyDWJBhk9FxbmrLFOp6nkzHWkYgu7ialjz3VEAAsbmlPaz0F6DDMpRb+aAKCeQbCquyJic9p1HaOUwhqUdZpKnlhEIkV07DKEEBCISLMdmkpJdWEZf+F6mwUReeXvISvx6dkcOIbVvCV1Y2pIcKznosiBq4JwXRq+3EiLpnhVAVewNeNaDMQ1jm774tu5DqRnwQbweoy+5RXSNkBqWmsmqEEpwDdkwJUh/CfHtQbbl/UQ8OqGck1+m69s0oYuRxAAPKdZ0yYMe5q4xt7iGiFKRL0fV5BvaQU2iZfG4kftK9hhnPmh3PQ7oWD8Vm2DfmgNEV6Zjk1gLq01z9m+Zz1ukhmCcnNCOCuqoqhEZYYkUkSKujxoBC0WHIf89fHrL7++f3k57O/evvvxL59f/gMRCCLMmpUKpgJ0DbY1DaDb/10avl9KF5Xdq/xHaappbcZkJQz11vnhzcN+v395eUGBu7u7VF14AGAccwgBBHmcEEK320/T9Pvvv4vINJWbm92cHQoYA+33ewA9FgTmWH2FUCYEgFJ4mqbhOCCKQBmG41/+8hciikgnka6LKd13XXc8HktZ2OjqXz+0tVr4h0tDh7ZYsIbFhws6wBrzHr2Xam6ugkuvaDHLzBra6y/CBW7QjHeTCcsWK17jYROATYR4OeVX6PeP6Ppzv5SMXWzW/2a5Mn2bv/qu1/D4t76HhBrmsNlX83kTq7LczKzpfN2yFtv+2V+sO17aura6iX99w8N51tHcxqAZ8pq6GgrxZBnPLyAikYAQznYqZ81sD948IWrrMUaCOVC4+R8TkYWx9nOJWxsvZRYYimQBBg1oCVAA9MCRZxWUJISAEbXnkPYAmSEX5CKYWY6nUxkQxpHvbnG/4y7ALmnWeELR4KKa3UH4HGSl73uFRPWiqXBkiTFOx5FFBAoiFOEx51FKAZjk9PWUf/n49Z8fn15GxrQH6qC/Pd0gjgWHqR8KicRJsBCFUIRehuFYpowlhJD6LiGQ8KTeQUAMUERIBF2McjDOFUCtbrvdTuqRg+4qFP8aQFZEM9exmYP7NKPX7Glq7zqdTgCkFjnTBnPOfXer7VTjFWkuPq9aNDE8Z2sbz3mTRKSU0nWdhnE3jUX/TtNEjUg+67ShoSttre/7+/v7vts/H47PczDVjIjTNJ2OR4PHFDNEZBc3SX/VKGdS1TwNtDOOmUJavcsqbo1iTYICQErnrYBfXVSjjFq2iXmBxVlzNl2d5nwtQc2AunawmhaJcJrm9BI5c86ssU/1icazDiGkMOs/s1OrGjYBEc9xuj3/IiINWheXYbhtaJfiL3v+MiMERLY2UtrRpTw5a/8c+wm2yvq553fr4inKhsbuosgl3t006zcrtrjW/j8Np14DvGbE9vybQzbxsGaSVmEdTh1WeF4P9vvL+vQdVwLye7rwmDeceCQ0CLF2/FGrTQdWIQortIvUoGhKpL7BlUpmvWjQ5JlBuRGch7O1X/E8qhmvYcmjS1k61lQ0qRbdsk8s1RUgq8fgTLe4gGTG5EpXvARJO5Bl8Tytqe8n6Mqk6wjnk0EACyojMB+hKqZ5mgDCruu6rpN8il33+OXLr7/+cnw5lFLevP3h3Y9/5dD/9//xC5EIEIageQ+YmYtAd5GuNofmaQOWxHMJRX+ouE4rZtwGnYhYEKsP8M3NjQi/efP67u52Gseu6yTPaB/HcRimENLpNPz6y/v379+/HE+73W4cxqenJ0Qket11XYxxyqOepY7jSEQh4JxfKgQiurm5Y84vLy/DMBLh3d0dIP/+++8551evXr169Wq32/3www8hpHEc379/fxx6S1UvRKgeulCM/80Ti6ry4iaSrzOx9WIxJlCv6pznwttzYHXwAasFxX/k7u4lIgF3pA4rTnKJxa0b33xir39PO1fYZrP0mg8eLV46XDrz5Qt3CC/17k8N9JX/qhW0Lg1IaxpoBKJ/6xIHboA3IhShGvdOX8T6mRBBhN1zfXFD1oDbL32nHPTeWLCi57U0bIin/rUwFgvaJpcou4HBD98DPF+hcoH0tVokCVx4ziVGgJg0tBSSHuQIAERCENaoZwWRRBAAgUldSTUkW56DWyBz0L04EjIPk5ggB5e+Q9M8mJjXE8Scs3BEiDEAoIgURgAsgDJxIYkQogiVLJA5xC6EPpcnRJQAEqIQjoihcJRyivuvz3w/5Psdvp7wfor7GGMIN92HEEIsMVACgiIZAXY3+5wziJQsKpJFZg+i/x5/DCHEuAOkLDSW8vwyHIeRAT9+/vTpSxB8HbqAJFMpQuXh63Q8Hk+n0yhy6nb9fU8Ui8jLy1cG6GPYh10pZRonjF3c9cAHQIrUpapIIKIwMnNMdrEkMzOSYAxdOJv+ueRxmLPJ77t+miYuDJQRkYALFwEolJhHDa25tvhJjZvCBRApxT6XEwDwdNYeU0oBMUQUYRZGwBociIu6REJMKRGpk4wl4pv3rHpAcBYJkMZxLCWHEIigFNELiV3XZ4WSAgBMhY/DGLteCCeWp+fDKU05Z84jUE5BELALUoIwFymZQ0dILFlKjJQIAmsQl4wTC1BAoDyJ6n3MchoHIgqJEM5BLBCD3pEUKKVe/CNSC+p8mTDnLgQUkZzn6PAhzOa7ruvUY9aijIoIFxCRlJKeOGhG+67rVMFDiESIQAgIUoZTmcqzqe4kmDoE4HF6RkSWCQru+r6PqZQyDWMS1FNkncTKZYRrhnqbYhRIFLoQM8/pNCwYj9KSKdVmcZ0JQ0RDuQIiq7WZMMY0liwgxZwcSDcQgKHeNeUm4uuZf7E49bumAtdYkIg423DWjF7DJXN7eGylYXm4vGm9WdmLUn3RDj4azcryBNhbXhK0G+XL4aTXuiUsxFV74mbc38sMe96IjStlXdME51qGydIl0r+4BhVWuG3qW0e8vIfZDN8EpEfUWqI3ndZFSiIinEUACYhCjCQiIWApgIgpzTvRcZjypCscEELERBQQYsRQ5ngzBQEYZ8JOFEvNA0E1JxsCAjOICGewNC0sIkKYCBEEJM9HcupporkEur63BGsikos8vRxVFdRFVIoUJoEYQtSkcCJCc64eFhEIncewVG222a4Z/UylQK1gWJ0PGc9pFgXmHdC2H6nFJV+QkwgBZEwkjJARAJETgiAKhJKh6zrJJY8lvtq/nJ5fvQrD9Dgc0v/t//p/L9Pwt59/fvv27evXb7qb+98/PYHk0ylDlxE6iaEL/TiOEhAu3MWCrd2YHwsAyDm2EOWiBIal1gFADJ3w6EkUttavIRMAQBIgixQATcssCARCMaRpwjLxbndbeHh+eX54uLl72L/e3Y9PJeccIu5T5MDDMIDkgPTm1dvDy/Ef//7P//j3/xCBv/745ocffhin4zAM03T48Pvp+eXp5uZmt9slovE0xJCIiAtH6pEIGGSQU/jt5eXl6fnr6fTl4fXuX//1548fPwpPzy9PAgwodw8Pu10vIgDh3bvXWXb9nob/mb88n6ZpyBBi7IahpC4SCM3eYCKgnlFimTtt1Xue2UwHbDEEcbqN5wBaXwOSe0puiLn54PkAOCamlhbPtTyXXpOKvejBto4adWjNtXyzTQvqdAAAdqt2llwABLNbevOKbB3k6a81GF6rCDViq8F2M0eXZo2W/vaujoVKBhEWsdOouihEd6F6gh8AwC4f6c8Goc1Ig09zYYPlFTO2q8gsDMJSb7jMhxN1QgHaJx5FcpbX6FPaMhMwcIHKtFX7CygEIiIBwR90I+KYi3f4AscfNkmd3dhFRO+1a0Z1H6dHv1K9oOSbCiEgRjXwKMvyvzJzPRNEEchTTeCs6xR0v6eHL5GICk/N60LAzJQiAIgFcSwiIkSuY6L5hpIUZi5ENNNxpYYiQgAlkg6dcE5IyCAMLjw0zUhlAMQzBRhRespo3GMUW0sCBRFFSUA7jQNRTxJEBGYB0FxSJYAACHMp+Ut+3kfMaRqncDgcbgL1MaWAu/0xpdR1XQgTEcXYpSQhZN2XlsKlzBnh5s3xfSgC0+k0DPnpZXh6fnl+GY7DlPp+zBkDCfM4jhNPIoIkGjfl5uaGMAKhiAzDMJUCMO/UBQrMSeGRaw4iI7WZqWmewACVx6ksz0ZV5nRkiPWxXqTmqZeatVw5rWcKxoK5+osKo4ikbt45oTNnEVFKiatvlefg3krJLsaaLcI5PGZdtDRnC1SVQMyePDcCZL3YwpumwSLf6K9UvUmNC9him6bJ7n9qsxQDAIzjaK/PC7UW604jxYuIzIlBuaFYItKxUC1Rg6gS9X1vF4GMDdm8IAkgAyIFQAQKgEVIYxVV/siSC+fT6QRVmSEMonTOknOOMQaMVKOwcjXGiojGkpk5oAAiBvVDcKzNPqA7nbGvdlNUGzGClFXGBcOYn/TVtcoNRQJWRbZsEeg4wLrgSofZbPlPlzUAlyBZ1/ect5ETzdemES8vfWUTaU1l2NozNXJxDcZ/pniQmidwQYH8c12v8YCr8+kGRc0HuKQq1GUVQsBsQZ7OJ+hV0LWbJzP7I4E5aVvQLFjuGqchY6AYYwyd3gpWzqALWURKmcZxHPM0B4+R5POaaiaM5gJPHRQqY1qjyxZj8xycodXvOGmZ3fuPlu+c2ZxzIOy6Luf8j3/84/HzL6eXL8chiMjf//73f/nb3/qUUr9jSjGG3W7XjQemkEUKlwLz3ojlvHH8PuDmf685g35PM0sdYEH2iIhU+xJVpXPOiESIekLRdZGITqfTNO26Pu12u8KD7g1EZBxHkPD8/Pzh/e8fPnxAhJ9+eve3v/3t7v72eHxRqXc4HJ6fn9+/f9Y4au/evVP54jdUwnI6HUqZRIogxEi3t/uUfhrH1799eJ/z+Pvv75+Pz69fv76/v9/tO+YI9PD18clujDNzgYVp15H0f9pzdFmM2/vZXB94Nfj/X1Suc3VYLe3mIO88BatFZ3/XgmzNzJu31mJiXb95sikdmr48I/W8Yg1P5WMXnM/PmwcvoRaZBfRxw58vibztXlalEfd+Lrzw3dx16N5GHxKRWQBlWdM/Ofd1QYx+k3jaSV+y37NYr0jw2PNPmodrjDVS0kZt764pqqEQ/woRRa/1Ii5HgoA1t4S+X5bNefcMT77N/K3IpV0tm0TvcrkEASBi1NuMgkBOtykkIkyMgihBUHIpnMsoXFIqMp1yTsAJoQsUQ9hz3/ehK0G1wb7vo0QQ01J020F6I7wUGPOXnPPpOD0fjoeXYRizYCIKmY+ZYZhOp3HIeYIAMUa946eA5cJ5yCIiGGKM05Qt3/e8vUAUmVP2+XSZREQYAUBgPvyuTo+zZ5FV09Y0yOlZ1QGQqgeKSM5n/Uo3Q376xB3LCRQWnqbzSRvWQwWb6GYphhCYS0Oy+lUb10ihzngFSDMklobBL2mq7tS2CYsxTtOgo9YY3HVcs3uV2ZYVqlJyxW0N1ioLvIno+cWCGqtuacdR522W7UQVLV0XTPFTPbmmiJijE5WaON7eUgSqz6phleply+KKzZQOP1DkOV6ZAEDXdZFSybnkcxezOm3LSua5oaoQNhzEizHPT1XN9ot9nl8/rYS6Bn2bnhGreLDn6/VuX/3DRm7BBangm5ILalIDD6yGv/nEF3Keit+svPniujTS0bO+piNjlX4pbTJJa8TPJlzYczQ4+Z7STDE4btAIKlgiH74PabQVDGbdVAMDXDgOWDfVyE57xRvHGn6lypi1baM2BojUcG8R55ms/OHu7q7ruv1+r4njRCRzYS6SxU7HxqyhlQszBzp7NZdSGET59DQNBobPDLw+nfFjb0hF5BtRNP/XFdRbJ6iJE8bff//9MUzTifv97u2bVw8PD8q0KZZc5rO2uktALKIWWjsvW7e/ZmvzhxrPQzY2eAAAJAvf4Cvwb/bFwkjVeAIACMKi1BNjJEBmDqhX/fl0Oh2PQ0rp7n6PFMfpqA7DQ5lSSs/Ph69fvz49DX0Pr9+8evX6IQREvAEAC6E0DMM0TTmPX758ijGm1OsppKbhBYCbm5sQwuFwIJpD1Op5NxA+Pj5+/fr1+eujCpfb29uUUt8nZlYzQAhBaoDreZg4/72O7SsLvOHhckEv8p8vTeVmudT1NznPJXmx2YKJGC8i12wHnXJyaaSXhrMJ8JXRNUt7DYyv6WE2vn2pl0aCmABaQwtnbPh2ROoVhgrJGUi/F2rW1PdIWIPfT4RvZPM5uCiyxr2JiFb3LOzvprSVparmO4XV5F7kwB6BjdwXkQsY8LQHSwm+2VRT4RLR8taVRa0/b7n7vtddqUBR75pASBRzzkiaAz4gnmNdTmLwYZ14xHOY3YXRT+WrzegVpnB+chbIoEGuzoNBAJ8iSpUrUfMUIGi8EiwajDQEBCwMHWBASIhJKAh++Mq7ncRYRHIIpe+BaCqljGPNMVCNMKpTxe4p5zyNJWeeCohgiAKEzDCVPAzTOI1Emuedcs7qqjRORdUVIkp9TCllZr2+lcucHELrV/PaYo91djZw65lryAGuLoUAoFcL9Ka4R4u2qSeX3pWCl9GWoZ5/S80n4U351lpxmej98hORnEe/Qyo1aaGmvCwuoQLN8Qf0euSsLkodvNKbrUy7bKM9EtVM9EQqLIdhGMfRyJJrdBZKoHVmmZei5KyhRHFBWmf6FMOXW/ghBOYzOzB74H5/q569+orp2NM0NiZc3WB23c5mxBaezHFNcylFRb7tKTVAuVUrOte5zMfDGgeInemyLBQ5WyyNoGXQKBugLpreBis12JR1ahONiOLCi83YEGERqpd69dKJnFOBQ0M/nlrWkqapdr2CrwZLfuc/b0oID8mV4hWVhkcZHnxf3u3W11xbzDxs4PihR45VWM+grMSPFVidZDdo+eaoN4u1vO56LQibMX5Pj9ayVfYcpmm8YTtWgarl7YzAakUWmd3+G2rU+8MiojoDCWhMLxQQ4JmABRoLGrPKn/kGhF55IDpzBi2v7h/0AwDknIfhdDwOGiNKj6XMJyoghEAgrAfvrLllmebuF2cfBKDp7dmSHGxiezXSedvRoO7P0cNm+5eKHt4p3iBA13Wv7m7oYby9+6Hv+8Ph8OXTpz6l3c3NJPhyLON4yrlgCkSkIp2ZS6n+6iswGnZnHxBmbyWfoOPKaJvFe2XU2iMzq1csogjMWS2B0XI7AQgLszBhSJGen5+naRB4tdvPIcRCCDFK6kKMsd91t3cUYxQp03Qk6m1R6y0DEVER//j4yDwHG1OHlK7rQgjHQ3k5HL5+/YoIKQWREmPa3exTH3e7brfrvnz58vLy9PLytNvtdrvd/jY/PT0xM1EEohCAZrlb7PRQHDI97hpOdQVXl7jclfKnadL36xfClQZxZfFbg9EssTWTb168In3W2Ph+CdXU9LJvDXzTiO+XV7H3TPrDhkb0bYYsIuBODnjlDLwWYb6RBl2wtRKNljYX6Rryc31ZuNouALswF+vRbeK2GU4zHZvVFPRSM9t5qQ3L+fXw2PMGCR5CL/r9E3+/kQL4jvy2xHdHlrfcLEvO43mJgmonXMPnx+yhtMpcs3hfKduoxJrvdt5h6qkeGSYFBKEAIDKq14YQigRBYYgSAAKemIcpE5UUqQ8hACHLy1i6knU/j5hjLGoPRMRxVF+gEkj92hEg3sqYcxGBkPou0nGcDi+HqXDf9xRC6rrYBUpqAxyY+en5oMhR7YWBmHkYBr2Mh4hUZhOQ6hWst1BodrPUKS9cQgiqS5sRKedRFS1T22R2DZ0dCO1ERGQ+ZiaicRxNgWEXGAaXLtGi57U1UTs5z+/1gvQWrRDQ3DXFqY6lBiXSBv2dOiICIDPZzaQiWTcCXrNVfamU2dBnVkFEVJ9Yo2apG7WYElsur3hOF2F0a8bBWcCf77PJOaTvfKn1HHMIURQ5XbezKDXaEddgOdqgVtMIoopSVRqtKcNSmXMw1rtGagpGZ/oTdT4XsMg9uajrORHxyndCCqtjfYAtFxfY4Fx+CTeccYYKsdRXqEbmN16v78hsmTw3BUs+i39KqdiEU6Bl2d9k4pe6uFIuSTLrxbP1TQ62+RMs9T38g6qaH2AjV8Cd8zX92vno93cEW1i9jt71i3+iowZOj0N/VLEcddRwJgAAoPnwHH2e+zG01Gi0Ahhm3+8QAtIEIqhqJAqcL6uIMXMBNsueiMQY9/t+t9v1fW8O/MAyDMPj46N6RhjnqW8x6u16GyCS/kuAhFAClyKcGYEF3GE/zv/h1vqqqPhebe1PlD/anWhgPZCcM/X07t27v//8OlEJ8Q6Ajy/PXaJ91zPQ6eVUihAR6LEySuXMOUpqDH1+69MAYIjCLYc3X+ajMfc2rk4leHXfdRavRa86L0YqAFEzowh0kQBYSk7p5u3rV+X09fPn58LjDz++DhFUZjHrB3l4uCvl3el0Ohyej6fd/qaTsaSUiNI0TSpTYqQYd10XVdwrXZUyDUNBxNPpdDwex3F88+b+hx9+UH8ZBWy/389RCT59+vr166fDgPiV4WXKRWQ2BhJRoDBNk+UedEz1zxwq2QrdZM5rtt+4pcC3GMglqvONeCbp+/qeBeJH6tvxOyLfeOPpsIbBHhpCzGRirW0CvIbfg+d5cgPwepjWqSfp5utyCNtqD54tfguMQbXIecnY7Cj8GNdbyvWvjWiDC8VvF9G16x27DMKcMy4smf8FPLOZsk3I/X5p0TsAOrrymDGTg59c/7rUrbJHl9HD2adv6dBrf02eeopCxHg8vaBLT4eImQuyMmh2MNn+cnHJ2PqgmhDcoJxttXpmh8jKgxBEufblxL76b/0f7bqgiMz3PICoGnYCYAEgmPfZwCiIgAFCZBlzAdQgpBkAs4ig7DgXYl0hMI5FJItICGliKYULQAjzEEIIEpImJwgsiFIYhQJBoJBCCEIy5XI6HEXmK2fm7BFCwJDijCgwH4/oNGoi0isHNgPz9KgOU+mklKmUUsqkdkvdlMCsNpzJqLqDkndhUgcSvQEozqLl82Ip6iwCmDcneja0SVUxztZFm3eLqO5XHdUIn2tGICIChfAc1EQ/TNN0Op1yzuqqqAZb/Qs1uqatAVU4mRkF9Z4PkCEEzv4MNCswRr1qclZtcI5BgiyigV4jEdi1jRAwxqjZh82AbFPptVO/3kxbtpMX/avmTf3JgsqKiF7vnScCKRDFGHU9qg4O9ZYmuyD1s2u32mBX8hRtqyRnztIsXmOank0olye7YG4yDIGZBefN0DyQIoKgJ+VN17AQNovF3nBk6/qiAnPhQNc3ZeTqf91ubVXWEF6v3yyK9a9rONd1ZLmJ8bPTuAhuwmMwiNt1eYR///DXkH/zJy8L1nBeaucSeLgUqB5vbs0uTlJFSHn4Gtu+ndlmCEregWiO0xgCxboiEEDvIwBAQNSL8+eFRqDXArXEGLsuqkk/53w6ncZxHE+TnlvZ5fwQAlZzlZ3B2XCIav5PRKCAIgRchBGBZq96EiAERE1gjwvEbpKTH7WtI7+glJlszsv3lCu0YaWUEmPEeacYb29vHx4eAkwsKYRwt9sRcIxxmEq/G+84/Mf7T/EwTKwsaO6lgXOTNtbANDhBRNhSEUkAoTUerldiM2QKAMv3dB8FQBpRAhEREELYpXhzc5PK4eXl6fHx8ea2e/XqAYA147GSx36/L2UqZTqenr8+Ur+LAXcmJogsqhzvdjvlBuN0miNOM5dSuv5ut+9E5OHh4eHhQe9oHI7PoHHIE71+89Dv0u3d/vHx8XA4vP/0jBRENxlV6yulxERQF5cYGlfm6DXHbkpzENnMl+d1fmuxnsHvoTFfGi7U8H97CCt+u9mUf8XEsW/EatopPPgIBVtBntYSaj3SSzJrLeY24d/kujoEi6+mX223tnT1tEakgb+ZNYCWJLTZurU4P29G17R2aSBbIG3IHbwQvK25/ernqCFCa2pThK1hM+TzynV/htNtrqx3v+/yPaIIryZdP6tFwfZj+quZGQ0na2psBmKv4LJ4HPrWzjvRWmYgojnwzZMx64GMG1MF7k6UTZ6OB90JyiYRbBKxfyIAjHr2iwBlPgIkFEJGhkAB5gSFpRQmpBiQoiBiSBpXWZA0dwRLSXxS6d51HSLpIa7epZymSRg5Tgp8CCGlPnRynHgcpxA4UAKg0PW7lEDdNU96h3AU4K7r+j51uAOAIlIESGazkt7eU60JKZp25G/0GfZF5lNRLrnik5k558kcMitJzbt51Si6rouxKzWVvGkm6p7KNcO7Qo7udqK5E5hWDy59CC3DOdjKr7clz1RrZk9zEJWqEUl1ScXqr2WjIKLCsz4510EUKaVMw6BRmCJWhVC7ZufxyHOozzlyDDFMJRsHnN2AVculea163SwGpUwyU576j1X1eCbCUgozMkMFb76lacZPcOIhV6NtKeV4fGlOzhQbuiGAOcxUsPVCNYosEQWKEAgRkc+m15RSAFQ/ommaqAbonN8S0Ig4UqOD2tyZ0DJrrT30fEGqudJmXH/TNIYFFqwZcQ76NK96Ae+O2zCdZsmvBcYfLY3EutLX+smlfj3YsrWraEpzQmwMt+HXDR9vBcNVfnhJHIKTJUZam4r0n0Dy2tJoPcIKq5sPr5dGkHsOs275OiQiwgwijER1PwMQKnECwfkOHoqwGQ5VTlduoDd1PfIZkQJFY3cxhZSSut7FGNVrQ/PIa0L5YRgEIwAEhNgl7TSXoqy+3nxHjemtnaCMaMNkFkLVM4dhynw2jzECYCAAkGlzT2BE4olWFjbS/7+WSpCV9+qMS8kFgIuos0iZAKjfJeDzpQMEoAAIAcu1BAN+vOAGvrlBvFIudXGJjZCgoMZdZJmDLhIAsEoc0eTGHAlA+HR46TgDyvF4enp6eni477qUcyYkAIya6FhucxmfX/Lx+PLLL//++uGvXdeFiESUurPXqEghCjEQhZ0GoqsiNd/d3TJz3/ddl0KgGAODXaqHrktdl/b73d3d7el0Cn1+en55Hp6MQs4iABDkPFhxieqbVSmXFSppNYeN+puMwlf4E/yq6XTNgZuW112sx+gfetkNK4lpv34TLbDiaV4OriXOWkCA22lsAumFoNQtpc9k1rwCS46qj30Lfk7rB9+IrNHetIkXFN21eG0E6CZOfOX1oKQec9ivCy39W0e0HiGb0+oxs6Zqz4FtLLR1NqG8zkttqVtTX7MRxGuC9HhbU+NagPrZ9GYbfRJ3ux3XOJPMbL8WEBEU4PlcspoQuW5kG3w1K+T8wcEty8/NZMyfqz3QIhCjCk8KUAoQIhNryFFEIEQERBSAwqIRclV71fyHKUSAqBlxUZiEulDGMhLERBGAczlBAYqcQgIUjBCjAEiWQiAR6eVQSpGYer0MNgxDHnmcSoj48vIyjqeU0s3NTYiz9jVMExF15/ifBDWvxjRNeeLYzfNhEy+ikAtz3c8JAMCUp3r5e9ZJNIRmCAEx2GGAqmHM3Pf7rut0No1w7UokCFDAEIJlNMKz+6gAIFEQodNptJmy2VG1VlRLmyZzhSIidXkFzZZeC1aTd6kJJ7BatEDmY4xzoDPJBrAqhDGeE47FGBGDiOj1CahnDTZArr6yooKzsOo8CoDmXEopqfuorRmjNwvop6qdpvpoqumo/Qrn6pUaarhXtRjoECa3RrwCJlXXIqLdbmcqNzgW3/fRvhIGNRMjSL1nSDFGEqiOQ3PGZ205AAJCM0BwZ2MNF3AnC+fAhrg8KfCHUqzXIusKDc7t6iyAl3IOVly7KbjSu9b7iaa+rZrmw7rZK/1eKl4ANGLpSoMNa4ZNzlY/+52BOOHhJ+6bqGv4uJX11uE7Br1RqF49b+YFlqx7jSjYwsa6XEImr1z1fPtrwSx1xyPnJL/GHAhxAZXyWHal8KSnLWcEgiARIiCSiGiMx7u7u9vbWwqojnxqDzwejy8vT/UyYTXEhcTMGQqIBCRABIJ5q02EQR1AzrtAAnW10oGUiKHrQ9ftfh++BATB2SkGIMgs2NpZaBYCLrfCIv+Vdwjhuymq8hNRX3llmAHKbn+DXBBAM/J1XUKgl8M05YEnZsGYgGICgIk3aM/PJrq9sqdVncXzWwitHfBbI1qvowWSRUSdp1BpVQAE1dGDhfOJgCnKOJ6+fP6IOA7TeDyO3fPTD9PY7zskQiIRiCEBAAYKHfb79PXx49enR5D+3bt3d/tXKvI0SEyoeYDnvtAceZKIdF33fHg5DqdhGi22maVzqPs6vLu7u7u76293//Pf/+P952dNHwV6dFipd0k8qp//MQZiNvD1TG3W9wTs1/sfVeyvl2ZpbG6Ur7y7XmIGp0sP8I1mbcsOSzw3PK2RCOAYXdP1pQF62WFszct6X+fCEsNGitX26wHogk7OLTTNeoRsNrguBKhpe1AApWaZ4nPAEhHRkGAIiNJeJdChsXORtf2w7pOz23V7/FzySGr4zCbym7KeBd+afjjLrFVy+VKDJp5NFA512xbRWqDu2aRiQFMDrgfld57+p2g7+5zVajTnjlPhihAAZwsMM1vc5EaumMWG6m0uLaUUqPeyjC5ts97cPYN5Ux61HeVEZc5PKwxSGEphRui6LvUdqN9jh5NexAIQUnuKRHWKkBIgpC6UQtMwIMWu6zocI2HOOQ8nRAxqWZwK5zGEECgRMCLGlBARmCVEYRimMkwveokRcc7UWC8FihACBRaZpqnGPgkMFrtyHqPycKNa1Rz6PiGiMl+RmXbV7zfEOWY389kNNaXeUO3JZRznSJ7eyqcM6xz4dGKREQC0WYVhHE8W3KWUor6KzKwqkPmXWoGq+9lqKaXMKXSnWYMlItXEdB8gbr+LNdIMAAgUM9YBwDiOMRIAH4+DSi/ddZUiiNh1nVS1UCOy9H0PtTUFI+dMgHp3QtGrqmzsksVVsyzwaqyvbmOBiDQCqtGqwZnzaNgwJdDHF9W1pzobOLZri0uqu6y+YlqWvWuMaRxHdfFFRBBk4WmaptPAzCmlSCmEEACtEQ2HAYjCogHEcVYmt50G7TYpOOuWcfCFai0iInrEYPVRL+2KkJp2Ya6JNceGmjE8+2MfAmfFg7zC2fzkuYoxu/XebqbtZRBXw4MpGI0E8h3JSu56UNcizf9qXtBW06bSXvTCxhi6/Wo9kqVLck68tPKwaJDp4bR+cbnhaPr6ZkFEC9u7iSX7YLPj0eKLuAJL0XWp6+aDH/t6XvjsWZc0YxLX+Maa+VMfqMwtpaCQ7idm1wnAnDNLTgEzi8aR0jO+m5ubrut++uFnNQAOw3A8DafTSV1DZY5dPKFAIAqkabhm9wREjBREJZQIEWIgzqXrdiIiOCcO3e/3yIcYpUtd13W3t7f3969u9ncU0//j//h/vv/9C1CIkViCEI1TJoLqpr2Yd5txW6pn/IhQ/cnLa2a2PIRXpmM9O80qQEJbkvqIhVmKXuQLhEioV8EB4Pb2dsyZBJgzQokpIkrJGYXv9jeIX1V7PB6PSZNoLZmDLyGE5snMbUAaJwWpGrgfbB0GrPkVLFeTPZnpfBIAFAAKIKD8kBACFDidTpHCPnXHl0cQ+N9/+m8//fA6HL5M+TbnMXV4PB5jJLXvlVII2Y7hHh4e+l2IkZ4+H0v+nZl//PHH29vb0+lUjxelHtrPl7V0k5Z5GqYTIvZ9tUhnjbXWhRBElIsWRIR6wKr3Ryjq4XUquR4XiujpvxKZbsV1SXkC2KQZWanQDcGYo9C6hYZt+heNqm0K2DnRGecxCNciwB9Jozu4aWbcgwGevJdOoV5zBscSjRc1Txr5JTVouXFX7yPmAfMir5HjHp/rifCuPfqrHhCwC6MQXNARPxYTuHr7qcEMzucRWlNRqruXc2t2sr82SDaCAC4oTuj2QoaHTf5WgTsDeR5XYb9+DSelFHAdeQyzy2PpWWhDnAbtJuQAUBye/evN6BSqgFiWxyXmW6eYbPg5OqdQI2l/gcjozb119sjzyId6U0k93RBR9/BRrWqN3rnghoJ2ux0RAdXSojhiEcv+Srp6hBmQ9K78OQb0EiPqa9dMDLj9KDOzJjMU9S8BYUBEjIFEijCVgjFQiiPnIoWBEQlkjn+RYQDNgwSCpe9STF1PRCmFKWeKfaQ0TyozMaKe9FAECgwIgnOmRpFRaLaL6DkugN6ExAAkarpjwCBAggVDYABhprrh8PQUQxcDhBSxujIiWkSWOXHAjBD1X7WLkcCe0xllOIWQum6OB2OUJCIg1KU07yn1aLOoN2aZxrFMWaRYmJa6EmaFsKgFj0XT+KaUAhJQEBGCmtRYkwrmUjCjgBQG0Isv9RimsEqUGQ/MYx7EaYNGqRppDWA2ReqGSXmJ+grPbsGsgQAZBco0GydZyikfRSTSOcsn1KMXoHPsY102DIAh7FKKMXbnpIW2ZFQJzHw2SM4LTXmOUak48aar0bHUupRgDiuoxMkFQggInGKq7Fh5lB4KYEizsl1K4SKMZyliDMs08xgjLuSIdbqwCoJjW14YNAzaV7OvJlD1u8xuA9UWD+cjw3nsy6bWcmUmjAtn/7K1Y1hUvnSy6CQHLOWrXHjl+k8NYN9TLrXmB8WruwdXOhKnCJkQWvdiD6835T/4Vy7NxZWynspLJ/rrwZp0/86+fDse7AYGUeMeIoUkUmbJWuZ39Xi46zrOyAwBS4GitJ1ixJ28vLzs9rv7+/u724ebm5tuN8eJ+fjh8+l0en5+PhwO0zRCpTTVcBAXLpE550xoS5UAKQAwEFEX4lDy4eV5mvLtbtf3PQkEQGD4y7vXP/30U9d1CCGlFFKXGVPESDJJEc7qDztTDsIae83uxGTu5oSea/6xGfhjxZOuisJ6mhYJWAqihEYjlfnOmtAsA9vgyX4If9SCZOTi/zZwfnNEAGDBFEAIkLkewBORVBFDAe5u929e3d/f3T686nPOOZ8AOaWYUkAMIoAQqoQBPaWM6ZYIxsOXUsrvHz6NQ37z9tV+v08p5ZynadRYZepXIlBCCCFSHrcPZaZpAqVDnO2JWmEag5ErMwjWc3xw6aMavK2cGOECda1Zyh8t199tZesF7bSBZ91CwwzXADQte26zydBgJdE2hd03y5oarwjK7xwCLA8xNx82w5zDGtcKTvosdOB6j4zWNb850ktyn5cWPKtsEFJ1XdGiQQF9IwbJZr+6QZFq1xFN/QoiCFyvDIjxCASqqIAVzje7aJbA/FXO4700F36kazRaa2vO70/NPKKa7vxClrqlNJsHzhm/p2gprTfRBzCjqTBg1QbtgMowawWWq9TPnx+D3754VSeEMJWsdRiQQUBIZwuAYoiInLkw88QlCYUQMo+CgEGzS9F8HsaUUpJpzDlHwKLjx5wLnZAAAMOsEDIyhkAhhJCJom2XoYimAA6lqA5gGdixRhvnc/oHtYF0AFB4vvBtzoSllDyxotn2+iuFcHnesHQYqPFOKITQ9zuPW0QUmbcgnhw1UGopBSFwkcLnaJa6b6kWwjlEja5rACh5nsfiIqb4iYYq9vyka+XiYsbYcx9qT0TKasEbI1DCqOf9YL6p2rNVsNt0mhFV3WVLvRdax1g9YQilHsjpu6VmElNDXKiX91TQmoXBzO6KWDqHiil6piH1YDKEcM5ob5niz4Gagh3h6HOzkNtkmTIjIiHOJtlpmoSB63HLbJUVKGr6rBZLqkpsw0GagzQ/L+KK1VlzgPl5ma1y+r7MO4YaQXiZY8YggSXHF+eS4TvdLEuRg77Z9Vv2xHC4bupKL+vP3wTsyq/X313jebP+Jiu3Fvxf/5ah9/pG2Xpfw4NbgnzdUTN9CuT6PHH9uSHFS7i6juSt9oMuRhEBQaTZVUY9TcRVUx4+5jkOE1QVq+/TLvR/+ctfuq6LfUdE0zQ9fvl6OB1zzl8/P5UaDRhcNiB1+dYApzZTMUaKcZaMeSogIhIQiSHzaZ9iutmnQK9fvybAz58/I+KPP736+99/fvv27TTlaZpiikQ0THLbdylSnrhwEUGeY8+0/j+XqO7MEP74ucZ/SVG2BrPPy5xRfZrG3S7IHHlFEAGQNXNHjEQEZT53qqRyIZngFSK5QFfXTkyut9b8qomwEADQa4akJ4Wcp8I5IN3f3N7f3ex2u/HlExH0fY8kAFKKWvAgUF95WkDMiBhit9vJDz/E4/H49PT04cOHYRh++OGH27t9CEEkmWsZEVGYjQkma2wgKnWGYaClYMV6LOIVQiDRbf0lHNDSdfx7lucVltIIBbjAKwAuSqVLNRsAvsnGvznpDeO61OB6RFcqr8W0r+8Ba1i9Z5i+QiNN7KEfQtOd/drgYV3NV5h/PT9s2bjvzuuNfj/QSPM1cgAW+eKvz6Mf+KVRbBZf2coCBt+I2zUZ/F7e+cpSz7RaqHix//GU30wrrORjM8ub9MAuuyA6gMFRS9Og6S9gnl/MpZT5JncALMujRyLSKILqbrEW+X7BNP4bCy2f5lilxrZAQFgYNCZXVdn1RZiRXs38ACJICJoxJxAiARdG7aMgAAPWSyOkQQ6NaQZKTMw1DStLJsChuxER1egAaNY1BUPYCRHj7IEZAgTVm6cTIsY4298UPyGEzLNK0Fh1iUM9w6PCwCKlyFRq/vSpwAScZ49cCxJdh1CpQdM5klRtRJ/PXYPTImSOVAT+0pcw2udTOc2qjmSTBBqwVNE8m4Cg4PwWrgstHbVNt1HMoNOpsFqATQtS1cU0h0srvJQJqu1LlZ9Syul0Ymao6Shs9Wp3qiWBcwTVG5I6HUXN14K2YADOeoPBMzveuGjyZlvzOoYphNN09kawdQgukpW1YHizz1zd24jIrnHqpVCT6GM+pywDQQhERLF6FjFznqbi4usYC20YR4Nt+2AGZL+Q8Xww0ZZ5FeJiyYOnBOOAS48aXEmjTWbdVPN1NnnoJSGxyUMv1WwGeOXrpeLZN2xJ+nXXDWtuOvL1tU4zoZeaha1Z/hOlGdEa7HWnxhA2320+X5mRPwSefa4yBQECqLP0/ON8asHM4lwD9PagBupy7XCMMXap62LO49PhRZ1CT8dxHMdS9CyRQghd1yGCrU2a3XLmdcTVsyvFQKgwFESMREQSCd69/eHV3d2bN29iCKVMH379rRyf9vv9//avf9/vdyWfSp4QMJIIT1LK3e2uSziMeiDLAG3w3iuE2vy6SZb4n6GV7yjKjYVBI2Trw6pXcxBBDYUlAsghUt/3MVLO59VRSgEWoI0NN1wmpHOYlxU86D77Nq9joqkMAHpS3ZSJJcEsU2Kkm9vdfr8LKBCw7/uuixQg52maplIYhIZhQkRhTX3JMaZAsVB59eq27/eI4cuXT1++fCml/FB+ePX6Xl339YQ0hJBS9SJBYITottoKZNd1wbkdcr2qA8bzoW4pXJHFpvxMKn5z6Vnx+tc10i49+R6Ew9UJWnPdZiDW2iY33mx23Voz6ubvJg+8Iv6+k6XLUm1bV1u3vxDQK6GwJmOjDf98vam4gIpW2i67a1//ztkXaaN02rYHHYX7TpE25vd7uvMwywXlGdy9FT9YXU3rNhFRQDZQvaq2MfCrwt3+ypbWd4lU/JbDBtjgttk4aSo8ZuawjGaDYQ5DjHWLr0EagaXZ5moxZcCfSKls9sYKdKYeD5Z7S84IFBJC0/HUx6EIElGoQcmkiHJYISFQ9Qkh0DhmBgyxE4BJFUwARMisTpglxhiC5rBnkRxjqnibUwKGEIgQpACgCPq9fhE2F0GuXoi6v+/irEWoGubnuKox5/MSOC/Ic25A0gFUhTCEgPMOo93EI55zFdo0TWMxJWeaJoJZQSWiEGea0BTqlr8LSTSbAgCIzIRO1c9EYbAJcnZRLcFQUWrcVDOveSnVrBNrzVRTItIdw+3tbdd1ADwMx+YV/7VGaRMi6vuemTUBPQAIanrgxXUarDZb/TrfeKwzKMujFyNs37WfUwPGqmE98qg2ihBCGE5H5oCoOr8gAhGGQLtdb7jFqpqKyOl0MnQRkcwXAucVpwqzOGNpkI2DLriad67hekZ4m3Vm1DVnQIBEpDkS1YNU6weiS8Hsm/X+PYBdqXaFpa5R0VQAx0mv9+K/rrn8lfavNLXWcptiROgH4qXCd8K/bnbzSWNJvjQQcSJz/ZMs41LAkts3leG7J3r94rrr+jAgsmaVF83dEufooIQRaD75ms++jOarTz5O8uHD0zAMx+NxrJldapnv0OacS8n2rnpYxBj8TQwAkPGYYkhd6tOcm0KzTjzc7W738WEXSsm/f/jw6bd/9Mj/+9//su9Q8lAAuhhTTAB5Eogo97f9vkuHYylFL/Njvf644S8KywUrzV5nOYPwp+jnj5a5O3fFK4RAEhHn/MJI80QxE2JQn1LIjN+xiL5ZrqiFi69wDoK17gIv6NVn90p9joAA0zSFkBAgAKYQU4jIMp6OOE3M+TRIztMwDNM0IRJhdzwOXKQUmaYhdfTTX94+PNyXLJGo67qHhwci+vLly+Pjo5Lobt/FGEOg+Q58gcJTzpkoEkWhwMxK+yIIQH2/B9bkVQBzFisIcLYgERLKfPlcRNpN6xLzl5jVJtIuNvQdr/t21p0a8P7r9U79WbbxVVixKYPkEj9sBWKVhs26+3NIWLcAS3XXFrXHSTMc3+l6XH7tN/jx+Gw+NEgAqMHnhDwGGqGwOfymwiVPlsaT6HtQ2kyWgrUJEl6gt2Y2PTKN+P0eqcH2oqnVSO1hg+f5yRaHaSbLtgSwXIy+Ha+4rvcbfkehz3Uf66+w6tcIyAIFUBCj/83DVIS5sE9P31gYYKnEc733rBWUhqwmuTAJUGnd9I0Y/bVgjSQECJQzMwJDkRovCAMwM0LSTIUEsfYYBYNIoRgxgIjkPOac1dMjQgFhEQlCQRCBpWhquWISRJ0oZte+PCBiDgPWvBqIWDOeIzPn6nthnkjiEht4ixBUnQHnDLzzSTUi1rQHUQthDCEI2JzNWhMiptQ73axeimPW9rnA2USmjqnlPPeO6ytzmSlGXcH1rVxEVmlYbY68GU1mHWnyhkGzqumvpkya1qTavuFHRFQbtEknUmMs5FxyznbMacA0pjbTTlUh7HY90HmZLbS1GM8RZaqPKNbJMncarOqZrTT2Fu+lUmHjtVQfquCZPskp1cCwiIh936up2XMNBUZdZC1sT4xRo4z6Rd4IDGYmNsgWnM5WWfOuZw22Wo1xNDUBwK4Bg00eYhCUQFBKc1SOuAhc7sulJ2sWZk2twfbv2pMGP57rbfbesHL81ua4EahNZdzaNTag+vq2QfEDxOVmohEAHuwGeI+NdV/fWS61uTmQzZm61NTmXFzp9JtAejLYhkdIpFQlCBFBHa2Na4WQmDPWAzhmHobjMMBpHKrzPBDNYRiKBjFzoduwMuu+70sp6n6vLCKl1Pf9DZ32+/3t7c3d/qbrukAghTnnL18+fXr8WA5fOJcP79/LwD///PB//m8/jfBl1pewlOlUigjSrtvf7PsuRQII6ltJGtISNdPdNym8qbCmf7+c/hzZXC8zY3ESMISAmBA1dcPMsUsuBaKEGIJqK+ewWCggJLIikz8HLTWrvrZxfe18iznUzSvODpwkCAI559Pp8Pj0JY9DOXwSkcJTzurkH3f9TddhoHh4eXn8+jzl4fZu9+rVPd8JYui6HeIUY7fb7VJKHz58+Pr168vLy5s3bx5e3T08PKQ0XyMsRUDO54kG8AxzzVQ0azgCUrhAKeZ9pef7iDIj4YzbynGuYVuWG25bjM2G/krZkDVXN/3r+msw1l1v8mfjwL4pX/N7OKqsTlius6bN9hsRtqa3BozNX+EqD9+sb4Xc/srtGTbAsN2XiAgvSG7VcsttrgiCppf1dKw3XV4m2hYafFAfOOPzO3tsMN8g1n6yzeelmQIH2/mnrQ2MDmOzUxuy3/xosdtPm9snqRrKWUGrjTebWHH3obxoO2c/pMAIZ83N/DwRkZDcrrGdJKnXurwCbTtju8RpxXiWR/pyVhxNaJIfkGJua4ZQzdoLHVJU2FDvEDIUlJR6ioQoY8mZKBMwIwPEMgUQJIxYUES4EBRERL0HCJohIIMAaBAvmQBA3SlLKdkyDcTArPf9RW8WCAII4Hi2f3oKKDVnHRFp8BXVfELUNAMqMlPVBGIIofBUccazp00NEZlzznl2XtJ9jAaABSFzC1YACs8iNoRABFVNVS194XZYSe683qTeWOWaws7bBmeRD/PprynAlsldP5j1GGd9b3E6oNiz9hFxt9vtdt04jqfTSfGGS0bALq+OcTGLPhpqANsFwkW48rJSLQWKvRR0dGcTnxFnqOki5oeMLBzTgsd50sWzYfkc/UyjAhgf6fve7qmaUmoRXM38OKMRqBSNoIs65BgjiJRyjmZmzFfNHYIL8DwLgAvcx3MoWBasKXSMeRly3FkGSg2oKyKI7XHPZl+w4mXr4teR55WbXH7Bf92M+LE3XL6BxP8kW7L5EqieODdHuv7sG6dlZDM4r8QFe2xgbsqlA9cr8Hvq9YD5ypew3TR1HTZYosWTx39VUSap4a/I8ZamFyICIWZzBJovw+ecnw8vRBAohkAAeuAKAHA8HkONKqxNalNlTkUzR6/R1BQPDw8/3eS+7/e7XYwUkRBgmoY8QIK733759fnTKU9DPsq7O3x3v8+Hx/QAIpgnHk/HUoQoxtQjcJ9ipFnhCIDzFX68GAnGAGtX3P8CZe87SykFhKky0pyz5FFjWJJw0Nv1mZlISGKMBGfihwsU8k2yWVDa2Ui4YAXrBi+tAlhtTOHCBMQYUersIJdSNNTh7e2tQAHYI0KMsev6Lu1i3H398vz0eIwxPjw8vH5zd3//oBH+oDKEEML9PWisgcPh+enpSfMz7ff7lKJu5sw3p26tgOpWuPAs5pQ+GWZBI9Vrh5kB9CYCejnyPYjFyxaeSxyPlg5omyLJ9+KZqm1m2F1o8aKh4fMegDW3adjCJu9a89s1YzQgTWSbQnKpeLzJBY3UCyAb++bQNuE3w0zDctdrav3Et7k5asOeet1XLK1lUyvprKxX91ooNxiD5ek2LJekLHVO8Scal88LvEilVShymxepd+14GaJPnElsDfzGTmNl1jY412TQkNZaisEWfdLSHc+Beu7Ug+QXDjo/jmhecMgC4UxMbLksAqG/QnoWNAsS8fON1bxGRMM0YjUfmd5INXCIn5UKWamjaCcSiQgDQBHhzLNtJ3FXSVL3VVCEkYViKAUApQgzksSIAAgFhiMhBgpBCIS55CCCGESYkHQzP28INAu3QKmZvkspRbJCqzEJdBuMWD1shUrdNCCKKQbqSzlfdRPIIoDcdbubm5s8x+ZRJMzXyTS0UamxZ5hVVZAY4+Pjc1UkzlDpdBLGantjj1ioFicAENS0EPOBSimFJdsCW1uujMeZ9gLuviIzpzRHa2DzC6rsWw1uzWnKml/a2tabOX3fxxiPx+PhcCCikjO6MwijN+2RXAQaROz7Holmv0oRxTnqsUrlxWqIAztNqbGCDFRjBMHlh0EIEKCUEuKZoxlyRCSlBNUiqkqpgrHf9aYYm7qokHtDpXEBWh4ZKg0UmVXEEAKI8JRtvsDd/cO61mF1N8A+l2WahPVYwCnGuuAXDGbZJs4WbHeZNrQnxGt+19CYr9bUbHj3dbVnzVKpBt29/kojbDZrrpmyFx7+Ia7k8fr1RvCgu8PpxwtL1g9LJXnd+Kb883U2xap/svni9TGucQirS6pNL2tkfrPwKjqr5wO6kjQtFda7u2qhYtZIvfUgsjp+y/nQZ0bq/CLLxFnZaUoJwJ/vnE9SD4dDSunVq1dv3rxRF/fb29u7u7v+9M/ZDSEXkEIIHVGIdPf61cdf/vHyPI0jRISf//Lu5x9/4JKHYSIiFgYpXUz9fo8YDsMUKdgNLw3grSNe5zg2bMDWIoKt/cRc4X+lrqiXwIUL4bzlGoahjMcQAAEiQpwPN0WwGHNDzBqlWkT0X3T8xNPw5h2e68UIdR2Z+U8QZFNKKShIwl0Mferv7u7evn0bg+D0UhN36WV7mcZyOp3ev3//228fQkg//vjjmzdvQoCX52PhKaGEELAe3d7d3SHibte9vLx8/fL09PR0d3f39u2b+/v7WWKG5jBrHmMMcz7blBIKTDQNw+DuO9RNLc0vXh+/X3qXOPkamd+D1YaZXOFgHoA1q7/En21r6vvysske+t431pFr1tOh7XZkuem3mo2cvTKoZkT6lZb+dPbTmh962NZ4WHcnToH0lbGeDqwnXWrMBbUQIgItPcLWPSpIZ0t1Lc1En8erb7lZ9u2LeQzULniJ7dk+diGvICLKhRwqzRCarr3YFR+T4gJW/dc1uVqDtu7WkrQhmHN9SzBYNUB2zqKwJHhcHdwYMM0BDdQd+ByKQ78TC2oaH5bAgEgoiFkAIGIUEZlkAsblmpknsp5zY/W1nTXAXC1USCGSiMzRPhCx+uMZyAhAIXGdb72FAAACEJEAUZBJQCBEpBCCBBjHLCLCQkAhBIyMpWQuHYAUCSEkFJQSQiSi0zQeIRBSpKh+QZICFxCRmLqxFJTQhS6EUPIMV55e9ACOiCgFlAQayaYIIsU6KcgSQtCkDDYZiARA+lFvm+gsplCz2FHYYbAbiSDIzFMpzMNut2MAZp6moabX4kGGYTwHtPTICzR7zFINVDMTDaqeRiEAM/NYpjzxyF3XFYEikCdBMytlJgiIJAI5q4agDq6QMxOFvu+odfKeFJKu6+7v7xHDNE2ljH2/qxRyNruVUljzOAMQUCAsgpkhy8Qiu67f7XZdTKfDcDoOmkkkCmomdFiyrZIzhYQuoGjXdV3XHQ4nBOrTTgk7UCilACOIcClElDDGGEoprAlS6JwI3gg7RmLmEDQ/JzPLrL7JELiTqkHNpxshpJBCCJaZUJlm6jvaE8tkpstQkzqaifJ4PB6PR0RUB6Fpmna7vf6UpwIz2dM4jbvdLhAFxGnMw3AUzhQgl1FDH2HV8EU0kHiw9T8zJp75WtSEacwwB14iABCQGIN4tZBFE1rrQp1nEM/zrmclkllD64sgASFgYZg3JectKYAIC2soYC+Sja95PqjMgeu9bb0Kq/vFEELhBbv0LLgRmeKiKsPqqJWd44eX3EUcA53pFwGAy0oXxbnZNQDnKltsHZbMs8EGLnMK2a/WIK2ce8ExejMXeL7fwLNGl5fT+lNzKGs1pd6msPq+x0b4Ne00Y9ksBI5sfJF5qwuVWSkFBmECEYRCQeMkM3DhIhQSATNnyQGFCEVKzmOAXlAEBYmYYCoMRBK61AkDDoVZLyUAIkYQit2EiMCSc0GWiGpklLf3d3/58d1ffvwxRUApN/suRjk+/ppioBxEJGICgGHIpUDX3Z1ynuLtKTzmjm9vU757eIophND3QylFIHb7HWEsTEUQqROgm7vX+PGFJ2AoE58KIQSgcV5g5hY1T42d02vOPYtOLJp7XEII82muXv24EMDT6MFPqx2ir6kRSwZgBFLTLIsIEiANPFGALgFyYZk+f/74/PnXPtFuRyFgQGFmYAkhhbAXPI2P043EUXgaEYtIiCVAYe7deZRtVK5QUZDl0pgZILvz5bMbBUAhd9PQk26zh7NfR0RAJkEURACcc8Fi13XDcIwJcj7e77sf3972YQySjzAJy/3dAyIeDqebm7uY4N/+7d9+ef9bZrjZ5fef//H+8z+6Lu52u5zH+/1Nzhkh3N3dPTy8vrm5CSGk1L99+9Nvv/3y8ePH42ECIZCw23cpdYJqaM0EEClgjCQBAFII4+kUQsDY6fVFIup36R+fyodPR4ZOqOv73WEYmctu15WcRQpI0RgKICQSRYTovN/wCJGl7mRbT+NOl5izIbOZU2tT/85JpwBgeQ4YXB5dWF4JMSCbM2jjmfaK1W9Oz+HywdNaAbCBe6E2v67uuBo9USOvKQyB4tIbE2F2Ai91n4xLqcFY1zIsCBLd3UgDg5CAF1Y4KwZ/MzoNmus2dbDWBs9dQEQMSPOZSp21Aivjzbn1qhGQuyuBTulYtI/IFWzdRIHaGBB1vKXuT85n63myLsx9mlyePD8vWuwKlRk8QghqjTC/M1NfJ86Ac7ZS3fLoV6+yzq8ICwuh9gtVgwYNGwVid/kgEM4nMqJWnDOZGTYWRgLncRbr3XgbC1Ri9lNs8x5qy6FG0NBRTzlTmFELgYScyygsl1PTuqM5ms1wy/pWLn09rxkEBAw18KPWKcvTGvXBdKxBxZtaBwMou0FUQw3P+etmitfrdkREEuql/6gmN8Vp4GIefdapmcsQMcZqFyLQcNggsbgMBEbT3nffWBtp/sD6VVM9Kq/c7XbBpR843ysrOIfoYC7MwiUzi8jLy0vOuUxTziPnmuRApHCbelU/qIXKqPnMYemMbTtLMA9PP1nzsqlhURp+GioeDYGV9Uc/fKKA1T5cmfJiX+uJ1cxKRuKlFHX+tJoxhAKL3clMuDHCnBcVjeP4kxvrwj5ATWONNZu8Z09GJABqV2CLJWgb9Hl/eb4Sg6SJ7/XmZxVOWuau6wGK+piB87ylGmUUnUV9mqaZ01WeqNZOO3Ap56wb5yWIuBAz7bqTc57AUtjGi04omsXe6HnxukplOotArpyo6cu30AIjoIqrb39dcz0EWhqucSWrrrdw6a1tfnWh8c0Twc2+NsGr3Owi5Gt91Qj+Ooo2YfBNNR+arwazffie7oy9bD7frOOXv//awnyhcw/h9xCAyj8pwMwEbMZDlnPc0TlPWwjmJhMpUiREYuY8lWEYCgwpRKIYADHRvt/d7m92XUwh7nY7ZhYJgSKCMpNwPL7sdje73Q4RNX+9svrn5+eXl5dx5Lv77scff3jz6nVIgZkLgzCWUnhixEKUBAMLZuaUUgiYj6Je2MIMYfvEYY3nZn7Xxc/UetKbX7+J7fVbyppKKVAYALqu24WYApRyAkDAOYioCDJPMgsFzjkXFGYskiGps8k1C/9m8cNxG4kNI8aVoV1aBbg0plWa1AxPGjjnLPuMb+ecVV1i5sfH50+fPh0OcHsLNzc3+5teQ92mFGKkcRwRkbl8+fLl69en3W53f39/d3fz8vJyd3engjvn/OXLl/3Y7/f7uO9E9zy2g5IsIi/joDJlGAaVek9PT+9///Dh6fbr168iknMWOYkIIvmDMw1pi4gCjLBgvNepAuuBFGyR3xrhfu3b7Fxh19bOWeytWt5s81L5Q3zVg71eLH574ykEl0VWCLG/V2THFeAbqSQyh5jz3NIU4AZUj7crsmmFpQ0aaOpcoplLpenaFuxaljXkcV0+bojCJZL9thDcKca6x29uHlyd7wJm88mV0tD8ZvtnMvOF52yndsQMAGq/gaWyoHvCeAHvG8fDVowK/VZ+E/ozX8bzIQLW/TEvGYfNrljgRESA84JBnC0hQKQp4b39V+ZT//k0RRVCO+/RHXwHYqtVOxARJL0jyyFo/A/U3ihiCCHFvYZpMQuPnYQ1A0d3wA8AKaXdbqf6aqk5G6WeVdhDYmKYrwVOOecyZhdDskxTKZPeORQRBKDQ2/Q32F6vFhHxwTZtAdiJb3BRhgwwU2ysspq/bIxUHSMRkXnhYy3VUdNm86z5l5JzxoqE4pL+zToxkYio1qQAFJ6QoiU+Mdj0LEfgHARV+9JJN6K39g1C/dX8WkVEcA4nE9zVPn1LFcIzuTIrgrWFRpEzf1rfo4ggsTZuTwzVerdQjYqmjk7THMmQmY3wtAURyVOeas6JmeDlLIbhMpdxgqc9c5Kq7631Ln/cq7Tk+c78q1PbPNsiwCYgREOfazGwCTmdncmv7eE8v2o6+mbx9fn7pEvTb4O3pgJ+Sxv0b52RXQ8vr8jF67B9s7tN7v2dL8pqH7b5fC1HrAX/9Vx/bYmt1Zq+rkNuPzHzfM6OKCIpRhFJKamrfIwUAk1S+hRyznk8wJwWL+5ixNTHfpdSCiFwLqWUFOKui33qUMrh+fn08hIj9SnEqAZKDpIfH5+7OcJojKkHgOfn548fPwzD1O/w7du3P/zww+3t7VQmIiIYIQSeU0JRSoKEIAAsd3d3Nzc3n59f9I5DLrlRkPzwPZ4v4dz2Cpfwv373e2gDceNmIxGFoJFEoe/7169f33YYSQBGtRCqQkgUkZLADvG03+/p+YSAIWJhRqSQAkxnV+pvLqIGKsf0mnuArsKF9pq+zvgBCIBqn0X9TggAOWc1r8QYd7td33WJEgGx6GkvlFJi7MYhf/z48dOn5xDg3bs3P/3lh91uFyPNp9BE+TSosHh+Onz58uXLly/H43G/31togJzHl5djLqMGMXp49zqltN/vd/0uUmTmPGpuYdkBMYrmcxqG4df37//H//j6NH0FSjFSEWER29IgnHOoOPx8Yyvc4BOX6Ys8abG729Yg9jrpXurdbzBMJBkYxkX/KG/0jBEce2++NiD5atY1rFYoi6jzzpoaG/NOg5kGHnCs257P+40l/rwQ/87hX6pZn187BIQlf143e4nPnB9eEG0Nv9p8svniuv1GhJ1Rt9KZPdhNv7Ry5Z2rOUPuWlT5DdI8NbLtYeQfWkceNv/XKMGUfzErejX/wDJOgR+7fxgpRQ+EiIaUBW7V8YtbJVjRrm+QNdEgL3wwWF3S3OsAAFTt0Ob1MfsJASJmYSrVhwuAhMA12GCNYb4MOo6jaoCIGGsIT6vPzKVIzlmghJCIZr2lDiGISN91WB3YzKAMlcGtNyV930sVDLaZQ2dRRKfDMPN4GgVBFcJhHKc85DpDzCwWN0k38QAx9nWa/JmElGyen+ofqHgCxA1GbNqsPTQNjWrEdRupftW7Otq11xy8klNKIWJTkHDWe0UD8c05BnExZUYtwWmhWDXVXIR5ThuGVWuyWKAWy7N2pMf8wdph5/6B7lzAzLaeEgznahGEun500isJBZohbc9E1LDp1+fcsky+F/3JlEnDP9VbT3aiPI5jzjnG2Pd913Wl4tAUQqhhxBeLqK4ILycAgHl+mOp6Nw2W6hXQhm01EsgXG6Z2DPWIYSZGW5VrvsyyDhvYCCFPWrZkbHU3bzdspxFCm2AbfvyKsDUCf1AhhCX+Lwm8i69/d4XvfKsBwMBoLJBWueFgazRutr/GpH1uGl/TjyHqygRd6rfp/TrSHIvR1ABz/RBCiIgTomimisxlFJ4QOJAQcNel29ub29vbvu/3t30KEYCGYTi9HKYpS+E8nhDhdDgOpxOidJFEJKVwf3t3f5M0sXhKKXW7/X7PzF+/fv3y5fN+3//0lx/evHnTdd00jcycUmLQ8zVg5Lpdmcd4t7/Z7/cBXhAxIo11K+lR0fC3ZvibNS9Vg8vU+615WZM6CnPQA7tA+/3+1atXdz0hT7kcTSGUwqoQsnRv3vavPz2+//zEgEgpT/Otb9raR37PapKlHthwku9fjw2XQBERwhpmEVUZJgwkzAgFAKWLSTdhCMzqEFgYMSCEx8fPHz9+FIHbW3zz9tXDw0MI2HWRiArnEEKsmliMcb/fD8MwTcXO+HMeRaTrI2XIeXp6Gk7l1HXdzc3N/c39zW5v7irjmE/TKKP0fT9N0y+/vn//21cQuL29zSxQILOIkKjckQwiCICkyq6YXwjANj/xtLGJq018bsqUZifQ4P/KvDQwyFL9W7fpB+Ib9IvuUjXf6Sb84IdvvqPuFdHEU8tjUyu8clWF5eLaxM83x7he0SuGX2C5rDwm/SLyTxpU+A2D78I3tYm6zbIpntbLdu6Uts+JNumnbknaTcKaUSy6WG4StDgPuIUQ9LzQvyuu2LiIiMsGZjxF+a8iojs5JNItPsFsSpnfdMFF55R0IemL3gcVEdUTYekWJ0UT02NVCUSEC+iouTrmKuRuNOfDHjBFBRW8FqGwpMiyeUqH857SwOAa4ZRwsbXVOCqCGAFCNJxWLr28DjuvwBoGc77QCJKSGdBnLWIcKeccAnEBZbg4+yJyzhIwqc+eodLg94Rr6PEoNvdRWIb3tQkupQzDoKm9570+F64nEPUvIc6+8oFIvXyb2fWobtawzbpZL6UqMDZ3Nh1EFCmZPyE752wwHdUZ/XSMNijFtr5bSpmtfEXsyhyz5uJDXBonFVCNl4AAhEiIDAAsBYrULCDeMomISGalZLMBeoOkuLQQ3rRo2lcpxcKvLVnnHNDVVq9CGyNBjTdrF/S1grN2njEmIlM1Wlo75mvarDKb1jJnpZdQ7yUS0cSsCiGU82kWLq8CeQJT07TTUR3XWFKvvdusXP+60oaPGHzuzr3etiln8PwiXdxFdJA3vRsy0Z144eXrCrDcGfiF0AzZfvIDWeOkaXnz+Xlf+B1vrVHUQG7VYHnXZT3ABnLr6xIkl8BYj/16O7S684mOe/su1hTVtOO7s790YX69IPiekeJ8800ZC4U56UwcxpGZMxfmknOZpiGmgMKB+H6/u7+/v7m52XV913WaIQYCEwYQGYc49LGMpZRSJt7tdo/p8VMeh2GQjIWnEO5u7x9ud3R7e6tnkYfj8fn5eRzHYRiI8Meffvr73/+KeA4vTAF4TrpDRPPdB8iFy6wS9DGBBixRdpS/oTCv0dXMyBUKv4Tw76zcglGYCQlFo4ymGnoq5wwQAaRwkZKZGQlYaLe7vb3dIwln0awfIpCn0uMfDh7TwLNgVmuvswsuXvbWRlP1DZL5tBsgpJTUNBcDh5AixjIWhiIRSxEuQITH8fjrr79+/vzc9/C3v/3t7du3fZ9yHplZgKdpmCagMl9BDyHe3O7u7+9VHNhpIKDeI+JhGIZheDo9TtP0+++/f4aPN7vb169fv3n15vb2VgRLKXniPPHT8+nDh8/HI7x9u7t596/vf/94+PS5MEBAAShcRITQQqQKoq5NBjhvbGC5VA2fsFyba3uUuP2MPbE2wx8MDoS4rQCY3G+8Wq5w5usV/nRZLzFPRX7vBG692GWwhnNuQij1bra1dt6KrGpeHyOutJq1XMbFpncBdtOXpwpfxz+HCzx8BqPODGzJpktDaIBcv4VVUW84AF3QzzeLuAJLMoblTK1xbuYH63o99uazb62hn7UKZl3YyX7d+gpU/y9tx+/5fX39erYMiIhmaV+So/5tkwuzc8DblDT2RBe8urjhUl0BqiatLdRUUgbE+VKtPuH6tyqpC9w17dhuUkRyzmPJiLOHXowxhKSYyjmWUgC4cClFiAQxaJa/I2fz3dJcf1AzRjRUa7tn80j0GoKfNpvUUko+aeoKYebCzJKlTqeIoAhiUH+nEEIMgSiZ8c2bqmzUZvnRYu6F6NR+9IlKnKREDUpWi21JAeb0hry6OG591dXBpWaqqDzrzCCICLjYpNhKhkpCttLOHs917OacSTVuKlL04kdh1mwcntC1F7vjp1CpuZKZGc7XJ1z9cxgYfaJ2QiIws+KyPqSU/HQbYBTmxIPahb9d6b26jTDGcVL8dF2H1Xanc9ossQDnwxRwLMZPtz3UCPtENI0nWt6ilBoFt+FEsjrmXKzQc3eLUOC6mTg3JaC5CheKhMx+VrLenzkYvOzc5JJwuRifWVfTJ0b/sKRnBew/UxoO1oD9TZitvl+Y1wfrUbfmww1U3/P1e4pv2YjE2AhcHrJxZt+pZ+VXemzm9ApWxV1RRhQVH6C3uSQz8yxZhLvYdbv0cP/jfr97dXe/23eh3phAxMyMKCyQqIQ+hv1Onb5S6Ajl+enrMAyMgBAI466/2d9QjDHn3E1TTP3Hjx+fn5+Z+V/+5W8/vnuz3+9PpwMRxhhzGceRY5pz7cUYpe6GiEJAhAAppZRgEgChSIshN4Qhyy2IZ01rLHnMN0sb66nfpfrrWQG1wIp/hucr1hiDBj7hzGUKFEMIAQRJBAExAAYNZazOk5DLLClCyC6sq5G3/7oujVfIGi2N1L5UfHf+lTk3BgAptUoRDMBSpsycCQRj2Hd913WROGHiPuWcSx6GYfz8+fP797+fTnB/v/vhx7d9P4taAQ51RxEjgnS1O1RfFSLq+x4RBeZDVQC4vb0tPL3mV4fD4fHxcTgMuYyPj4/TMMaYXr9+m2JPWE7jdHg5jkOhALd3r/76t5+fDi/lQykMMfV6FhsCCmcEQBBEknpPR4AtSN7molvLBZMjzVtrRRFWrGON/83SSAotnp+vodqQX6sG7fOVCpekiZe8IgK4cMLyzVZ/J2t6bp+LIFZH5OU5b0PDlwY+U/vyQM1modGT7SfzmLu+IuxXWspHG53HPyxn3+0Nvn0gpcB4/aKBfw3YFXHQzI6v5mFrNsO+cYNh3Wkjv/QDLTnVJr+yMRJR1bCkaWcN6vzEyIdnaQGANdQwsp6oChAiARJSPueuO5/LiIh6oWuAQ3DBmWKpycSFEar1pjqALTA1F8IqZRcb34uDWTJlG2HxThcrtBbPKSz/ICIKU8WvHqT5KPrNrNeHM/Z1Vz2e5hBDKIIMRBSRkAQZMKZEIQeN4RGg3hyDSvFSt8jBhYcx+uB6yc2bm6AqpaZoGTXoh5yrX6SIAFCYR6viAUWYcc6vNReNK2n2wxnIqn1JKVycESmXOSSJF/aeyg1j3oPRa1P6iiqEUg8YKvKL3mkxAhCZB0g1LY9S0Jk/0jkIrXEiRIwxegVJybTrOigsdI74YmsspcRyXpbnuxBb1tpmOLoMGqZzJnAiVQj17pAhBwBEyrwRqPXtuMX8bD2RI2JMvSnA5PIiqvXYRmTHwOisiIg1Sn7O4lRNQiwrxQ+WzNEU9VkXdVcNmzMkw2FzfqHL3FtyxI3Ou5T7Bd4wBBHRy4S4ZNwqt7i6AKDjDzZ3Bhh+axsnW/JsLTz8xPnGmym79Nbmc/iWn5Is5dD1cgnmpo5v6nuaXRd03PIK9/4mhGuAG8ay5jNGbFca2QTYin9y5RU5Xx5mjeNfSgldKqOKgDk0766jVw/3//K3nwk0RhwjCmpwDhZCIQiBAIMUkEASI4UQDy+ncTxpxI6bm5uc85CnL0+PEbvdbqdZcG5vbx8fH0Wk79OrV6/6vhe9iJgiBZ1HQQglF0RMdpNQIHYxQDhlCSH0KU1jRpIO41QuIv+bODTsbS6lS1SK37GH2ywxRlA9J5J53UekIkUYgNr643ja7brdbvcyvYgUkRJDR4XsXhtezn23Hojn6rCiPbi60q27zc8AJFJAUKCKDxFByDlHQtIYnRjKxANMGGmY8jiOLy8vz8+Pnz9/LiO8eUU//fA2URgOxxAREaYpS4wxEZdSIMSIKfYqQEthJhbBcTzVg8U5OTAghxBev9rv9/u7m5thGKYhn06np6en4/EEQA/3r0NIhCGlPnU7GXOKe73dGkIALiJFvWyIoEhBgJlPA6o7l6iTxwoPNhd+dfvD4gbbcIE+19Tlu9icl03+4/+yuwzfnDpdaaoRT146NCS0ftEqnJn/klOda9bzrjVBGswtw6z112Bo5cVpJsxX+sFR/nfKlEtPGqgAWjBWFRa/Gj2g05RgawEa6tZyX1aHX/AtUrHpWGyQHMx+vppTJN+On46GzMDtJawCBcQtnmn7Gas87/n52weg/gktV5ysrDue8LQ7A9VojHn2gLMFYliKhQE0Xq1ag2B2eJOcmYXdOg909iT0s2JdNiPRz7zEo2GnrIIHrGlX6vEJIoIFw0VUH0vzP2m0BX1RWyulMJ/32Yg4TSPUK09dOlveiSiliBhMaQaAEDDnM5y6ZVcwLCaK/apfNfScV5i1juatojkbb7bBxhilzNt9JAoRNZLqHDWUuRSwRPamZRkdlFKmaZqmSe8uzp6xq7A3DTXj2aA8Y4xqYBXO4uFH55DpF6SoKluKhuF0czfjs+s6zyGN5kK9hyn57JFIVQGGUjU6FiLqYmRixsVNWf0QYxyn4h0tYBYG4dLyQKd66TkIaZrNWhxTlhCCCs5SUy+KSM4cgoUP9sGaxWPbHkINYG8AWBelxj5WWrKbgX3qQzo7taqBVSEnQKmkLjKfDxEuwknbdE/TbGlsjgZV8bZBmXuwmVUbwvCssx7NIOL2XTtZnkT6H5o2FU4SKH8kKhfiRviKNfNteOjmBxtXe6q1Cc13Fxu4x9t6LN9sZA3/pQqXfvL04D80jXu2cKXZza7trWZa7ddG5HiqWPd1HUu4LGLnCBdgnkNbFaJ63UIbiSHmTAX0ZESkTIR40yXSEMJSkobnTpCzqF6XuhBCmjCPnIWzFGSWl6evh8NhmoYY4/39/TRNLy8vX79+pRx+/PFHAGBm1QRKkTdv3qipP+cRuKAEnjKUvOtvJp7mMFpdx8zTMCLFlIoQ5lyEMxJwFiIGCqWUGBa61CY+DeHX8XllLv6TBQU0s2IBwRQDIrAgQYyR9TYdiuodiLN/4jRNMXYpRUSccp6mifokUgD+mEthQ//2+SKFX0CSd2Vc4JbPcTvUJR4RRDSVIgUEkXw8Hj9/+gLTcdfR5+fjNE2Ho8aYhf0e//rXv/71rz93XRynU4wBEU+nMZcRKeWcb3a9iJyGA8ycafY28gpDjJECAAQVHCx5lt1diTHmiacpPz8/I4Td/m6/v727p5Q+TJNgTEpsIaKMKgVUGFV1ZW1fqg+sd89STNyIO1L0Ty7RWMOO1r9eot5LlqiG8/gtwbqpP8GTmx79V9sG2AZ9s2bDkBtm2NDb+derirGfAjMqwIrt+75WuN1e+5cs7evRXXpu/H+zfSvNRDRoMfibgfvX1zMCS33vvHYAABZXD7Tw0pusgU2WRX+ybVhDeJ6h+F9DOKf1WrQm2+lAPFb9AP0Zh0HeHOUvfqW613K3pUwHwZotHCr1RvNJM32jlMI12Ze/owWV+g0gUxX01NO7SppGwVygAmTIZWYCBDcAw3UIoQgEfWiYJRyOJySiRCnG2CWo0fw1FRyebXQFAIDmuB2azQZrPgMIlGsQS2GYU73FqEeYwzBYiDmsZqUYO66XuzSY+DRN4/j/4+3f2iXHdQVBDAApKSLWNS+Vtav27p4eu/1kP/j//xJ7uj3Tp885dcnKrMx1iwhJJOAHiBBEKVZm7eMxv/xWRigoEgRBECBAYKhc7LA4eVo+k7UopsBwCa+ir0eIGKZQkwKAJFhylzdNg0QaJUyx1/e9rQGFypLa9f1EYVgSaZTI6lP9XMqmAADiAUlEQVT2duU4BrbixKLF6EQPwxAw+iXkaVfhb5pGQ54oFytmwIULsZewp6GJFHIS06udfgvDMCCLYomINC8fohyPxwwTbcQY9YCzaZrj8QgYjOmbhU09GD2WoOwimpLec21mbttGAdPr+KXa0LaLjIvlyKBJw+yO6xdhKIFVbckRUdu2uZjsAZAoxNgw8zgOTTP5BTHzOKZhGBFRby6llIY0WL9QhACUKSlfSikAAqlaCB5IW6SVgiqzGp+xeMAaQ7EheLpV+BfadWEu66N6Yy7TNciyJVtrnLKl/xJ/hFQe+lMMWaqmsHXWWx1Fk3NJxaX7sWJD3IlaKHG3KqEhhGD+27DcO3npam+LCJwAZA9p5Q+5+dkXvyc5bnZxQ11vwIaWauvSBv0Gb3Cud1O/LW3utZfg989tAeLlxLgVJPOvAnb7t6pgXVBJBiMiYXHKMO8v4zg2xWuRAAKWfDkhKOPK4xAIDrt9QxQDImcZ+27fXu12w3A+v5ybNlwf2qeXE2dEAALs2sgM574/n17++OOPX3/9tL/a/eMf/xiG4Xh8jjEOw5C65uHhARGPx+PT08vLyykEOB6Pp/MLS+q6hgL2/UnP8Y7H4/PpGCNFoi8p5XGcDuAhJobH4/Dxy1MaeH9oRqaU+dDtOI8V6vxsbs5LtfN6BrgmaVlKtOKOctZTXAAAmxdEFETOzCC7rk3jSXJWmaPvj1+fHhJgiDj2L+fzSxOpbVsByhLfvPlp7IcY4/k8hra9vr5+Gc7iFpfv1wCrBovukI6duzuuLFrzu7hYzvY8e9cjjyIBBEIUQkLRo2cWQSJk5mHoQ9ecz+dffnkej89NkOOYbm5umti9edPe3l7f39/v9l0I1Pd9bDT62hhjBJyksb7vJ14aWirFL1U9kdTcEjHGBOPV1RUyHo/HxHJ1dRVDe7W/ejmef//jYwwPN7dv7+/f3r55+/nL/3h6PB7uj8ZamZmCO2kV0YALGp2QKCDSOmO9zYWfFxMYqvW+yb7WXLQ6RvR8b02im/Po58sDuf665s9+jzOy8bu/h9w3KO6uPvg8mVhztuk5zOdX1Soz/i9ObUBEcXkFfX2rvGCeiNV8VevaE1LhA5PUXSF5+2AXAKBekta+r2Q49AKG5yR++AuEc72uVXz116D8rOWxVvix3FeqMAwAuMxjaSfmKg94OcGasn49dYE7+7B2TOux/csfhuIqs6s4MaOiB/tgVO3ZmlU2KU4leXXis4j0E+Zd416Ps+FYBA3VEWb5zxCndGsImig+g4hwlimvg4NS3D3FzfMbLJMBJbjoRMoCoE6vjnHnEsde3KYlajFDBLLjCtDwI8q9HN6JiFQhLLS4TGxKUUSwRJSRoi5Px3ElJg+VfAApzVFSbKmY4OtVc78SqtUCS9aARX8AgABBjVRSGAN4TUB9C3OBIaWU6g3bK6K+9+lrmFlYGWMgIkv3Z1Si1aq7asatZg7lOB0imkVu/RZOOsYUF0E14cNhNy0n5y4YAmE53mrbdtd2eg/nfD4xM8VGtTXlL+fz+Xg8xhiZ5ymw9em1CI9nrwqSK+qaSYDAoq65KIACeUygBxw8ZWgHEDXRGf9CV7ymbbwgpTSMvUanUK31fD7rr6bBUslkKF6TYTErt2ESEUlmAlOS87zQ0dgcEAjdhgF6wr08fBIX3RRWnP1SMeYOSx1MA1DpgQ4vniNOMRi+q3hiW9AVL8jPgFmDXa3E7+xUYBFK59KQv6d4IGdWduF1zwP9Enu9cd+a58Brrv46ABW/BcdRv3Ow5O4bW7+bEIrbAqym/fR60CDfshZ3smDAYwW2TquWxDnnLDmLQEBAEmFOKXEemZnH1OMpjT3ncRxzSmMkysMoJLFrm9CdTv3XP798/PT56fHlw4c39+/ejuPw+fPnEMKPP/5wdXXF4zMAPD4+fn147PuBENQK9OXLFxFBVJ8iAWAkIaJ+zDmPKBJjaGNTbhLmzAzAEVEEJOmlcchpWGPD2N082NWqqXaf9cPNmvAqAczbEII3M+iNdx4TN6GhEEJ4fn7+3//70+nhU38+DQw5QxPg5ja8ub9tGmSBc3/+9PnjaaCc834fR5mOBWMIOG6vF39ecGngrwxkc/iv19eHLMKadBK8IQLUqUQmhwsSxjGl1PPN2/sPHz7c3l4jSdOEtmtCwBhpmARZO1IMRSdsAGDatt3xYqWgogTADAA5ScbpjC8gIYamadq2u73Df//337/8+fXlOADEXbe/vr59OZ6fn5/P57PeTCyUgNM9cJgshNNVNkTDQrWWN/EsSyn/0sPXmbbHf9WdrDzRvqds9vKXNoXXmbZRXUWKl1rzCHmd1X+T926ua9hawp7BGmyFuuZ8wq+sEbd9bAOz3jX8X5s4q7DJfAA2uP/rk3UJdZd2f09Cs2Sy9HPcnNN1F9Wv6IJuVANcT7T7unGqBStMuomoJ9HK5hbsYzqYTMXFZdSDrRUiEIJmigcBAI0AoufeKmgrt8Igoi6kS6nXo9UArebD/zUherALeDxzIADnQqbmiHKHlfSSGHAWlgyIYm35mSNCIqI4mWvUJDUfORNR01llZs45AQgzEImFv0FEVbVVBbcoIObqSS5NX0UKm1xMlidAVhARcK4peoRULAM5Z1DjZF4obJ5uPPFNXlIl0IsafCBkIwWbtXlbEREX3YSZgWvPLtUYqzMkU65sQIYHdOc93mhsrEG3t5QS5ywiTdO0IapeFEIAFrNAxhhj06jZ2wdWUVBZFo6764WB5TYsOlsxOoXQFpoUiysW7jkMg521YPE1FcgaJrjSBrHY3PzJk/rzeLzlKZkhAoAGKLIwOXajcuYL7vYGIgIXgp/kZgDN9QcLRmNvrLT0+WylWqFG0vbZUxdcKHMFWVA7lqi/AEDOY3weFM6vA1yM8geOf/mHvHQBwiWH/eZz+JYgUo1ujQF0+6uH0/dVsezqwys9+s/fBNL6guV4PfCe1fg5FbcRbsJ5CZ+XRlEdKGwC70Gt+OTrjV9qEJazWQa4tI2rMgjzqVZ5DiKALJI5D2MIgUAIhFMWkRiDXgwTJo30y4xfHr/88cfnL38+HM9913Xv3r3FGH/77bdzf/zHP/7x4cf3RIQ5KvsK4QURMEDXdVdXV7GhYThnTogQAhI1IWLTNO+vb8exV4UwBr2PFkLTnId07LndH/HTw+PLwBkIcUwplIDPl+bF7yYeD5srenMWNqfj0tSs6UdEAGZ/mWHITw+PZxgonQ9d2x+HnOGHd1f/9f/2X969eRMbSlmej/2//Mvvnz9/eX4+EUUe8phHCVEIL8V4smXo6U0uq3lrqp4+/0Wxez12nrISgiYQjk1zf3+/C/JV8svD17Ztd7vd4XBAEgDOOff9gCj7Q4cIIrOZAnT7EAEhLqZc1S2HIRERIFJEPfQnIhEUyIghJ6EgIQSkBgACNU0EwjiOghQ+f3r45fff9rvrbnfIOX/58uXl5UX1bcagVzxExA2Ryt9ZpPkmnj3V+SevMFu/O29O1rplf3D5yivV8+9h+P+RYqM2RAlcpBlYYe/SFnPpV3Breb2oL+2nfi8Ad5wt1fa9pcb4OUUMr8zsmtusCeM7y+YGtLFZLIGs9r71W0Zv4m/DaZKYFbTflH8qeBBRJPuxSzHDmIy62pE3FPV1R25oUEFoU7lWCMHZe7yGspxQNEISkejhA0HUhNLFq0qFcCIxtqVhr/xOY19937PYAWChZBFRioEx2Did2XCBBax7EREWBrVeRiSZXSm8SqCVh2EwoV83J1UIG5pdPXPJF281VVHRX/VhjG0VD0Y7suggHpuwRfq4PCOB5QLLkFVGRnUZdXYh0bM7ZjB9EqDtolkw0Clgbbcz/TxnlCmECQrO2pEUZyouQWg8bJO64nyajQurSbo0PieUBwAN+2o4CWFKElgZUQ3JwzDofTkjFXUERU3UDtiPvd5zaEMMMQycM08hbQAgxqh64ziOIhtLV9xJjyHHiN6vGX1xivq12sA867R2Muc5CJPTsf1Xs/vpwNuuVaum2WPNYfhwOBwOhxCC4kRRlKeUFhmm05mpUJjzhU4rS2ZoK3oTWTOshRpQ1beH3jj2Cje0pnxlz1xm7JX4N3N3Csk3xLBtbRAcX6sGuAn2utp6UPVACP26ttdlyfrXgFXPeXkHY93I+t018JcqG+RrwCqGWT03CC/h4Tthu1THkLmu/J3kJMtDijVgm9xVP8GrhwsTL0JBDEiCMAiDblAo0B9PApwQAFggN10LIsMwdPvbDPD0cvry5evvv//+8pIPV91PP/3UdV2g8OnPz8fjy/39/Q8/vG0aOp/Ph7bpun0ITYzt58+fX16eIND19fXd/c0wnAGlbWNAYEkAEGOIu4NIjjRFAD6fj23bXV/dCmKf6HB1HDI8nz9JygFCxr9w98lW4noJ2Ar1FOsZ4JrCX19HiAhivyLoURdgYk4Dj2O82sebmzfv7m/l42PK57///T/9L//5f20ink4ngdQ03du3b//tt8/Pz2dpIqsXA+LQ97vQrHt8ZQF+Eyf1eP+iRRopIjAgMEgAySAIJIhZOGLAQCGE0HTX+7b5Qe7vbjLIOPYPDw+AvNu13a7d7/cx0pj6kjGLRKb8YczQNR0ApJQ0MpzwPFNEhEAibDxbBLuIOWdhAJwS2Aqj7inX14e222UJv/zysR/yzfXdbnf47c9Pp3PPrDc5sRzYkTADiADaX5WMpCSH/h48r7mxX7xrCrz0FiyZ23fSvGdBa1Bf4b3rRjbIezUE/2sF52sA63We8k28JoMAqJ5I4INdr4Ex3lj9tAbPYPMHxGt2ut4pql8dVut9UIqYVE1fJR680uOiwnKa1tVe50vTEKCmrunv6iS0gnn94ZU9sWqkPAFY+S9Ue6LhExEBtvmtf+gJGwOByPRPRzSFeZ5cJkV1TET11VJJW0R8VHmDwRgLFMl5VgiFp5CyCmipVJubxWnYm5iqUIkyn1oog1GXPN2TJ2LS1iZclmkukQkZJ0FVYxJquBHDu+ZF1EJECn4lbs7wMwM1ACbtBY0hTsULNOfM5dKXyuhdJ1C8dWl5oROXoVks2mdFMegc020+ZgSywxWiVgG7h6nqDYuyAARou8A8uSObriIiTROKNC8h2MFDxmVMES4xNlXhtztjphMSBFwa2fRFIyw/OiKCEmwtlDyBVC7By6SbLVQsc2WMMQJPrrkKxtgPGsXh0O1ijOM4nk4vp3GQ6Qrk1LIqVADAgtaRdYHuHpHNDiyPUvwsgEtc6V6ZzJtmwZsUPMhE9cLWEZ1OJ31uwUKnQ19Jds/TL+yu69SVVAo/1SkYhsEUQqUO0FjnIc7d8QYXgwVzxDVn1xKW3t2ehGDJcD3LWxd/wFFWE9oX3y9pIFr3BPG73Ec9N7SOaLl/iOPIVVlvQpvbADhmVb27yeg8J12/a5+thQrUdZv2fBOGS/hf0vBG+5ea3Wyq+vA9lTefr6mxQsgrTU1fL2iS39xuWARET3nV7yAiri9AlbuaU+hdQAG9P7zbHRAyouQ8ZpiuoA8pC+PXrw8fP358eDhnhtvb/Y8//vTu3bvj8fjx06enp4e7u5u//e1D04aUh7aLICQiu93u/fv3ItL3Jzsh6rouNqFpAgqPqWfOiNj3JwCgtokBQ8A2Nm0MbRfHxDHSbt92bZPSOIzQdiEiWZSPCuF+9j3tOcb4vZrkJv7tc9WgiHiVSjkwTtd4QoyxCbLf76+v2440BFru+3R86V9eTl1LOecQYtfFrhtRM+6GQMwEAWMcM1cHK9VqWgP5PTT8V0e9eIiISAiMgKLH5yWdVmgiMebMX79+zee4J7i/e/vSPxFRP5yG4fxyxLaNbRu7XRNj1L1l2ugRi7O9Xv2gpmliaGOMzBAjlAqcEyNJjFF3Ogoxu9izzCyMRCHnzIBE4e7u7uHhpR9yjBFCOJ/P45iYNXF10EuQgM4Vc/baXrhBVWhZW/a+ifk1+12zaFhKHVXNS+bBzab+aYJfA/xKj5tc7hW3l0p48Ix63emaeaI7ePXNWjtUQVIaNw81e3e9g2zC+Z3l9Y0Yl7nybAOteAsiaoq7NZOpMfwtYlvDI14wenW3/Z6xV7rAd8Jziats8ucKeK3mcwFov2YMgyWdYNFKrAuz0HiLhW8cAGLW/VNAUPTGNAAQYRIGJGqmvlkjIAuHJbiGCw/WGgXIE5tZZCuSYi4QkXn8iIgyu6+XWPkiQhtj1m+GGtNDVIVDNNVIE/1J4AgwxVlBgKCiP4U2NglQNdUy3mw338BpFOC4pNGWEbqnYE926PQQzxQ0jsVUoTw0bot6UxmnCuTooKLsyuxmrJNodrb0K1MHtTWJEyVVhi/vrqnthyn4zZybUYu25npc4BMnAlM1dXFqpToSIgbAlJLeFWRCgEWUGragCLgRmCvG+aFfJIY0PwuICC5zugNmAtiPK+ec8uijzrpGFp9tyTVNc3p6sWAtueTS2O/3eqvweDyqn2oJQpgcDGVN40STyJJdtgw/qAqk6ZjMYQZsk1/dvUFEu7SNONvH4FtFVroQbongALDZmmb04lejjPoBVizPg/EKkJt8f13mRmZDx3yqutlF9dDTjyyjxq0r4AXV9PuLvVixXNuoqk3UM/3NX2FFFb7ZbwK8Fhw356v6WkH1+ng3506KD0IWQVIIL96KQUT1CAXd6whCCBEJgJgZmAHlPAzH4zMjAPAwDH0fhmEYxnxze3V7e/vmzbv9fo+ITy8vnz59Sin//Pe/vX13L5J10wkQx3EElhjj3d3d8/Pjy8vL0/PD/tB1XUPU+Kg5iHh9dZVzjoS7Xds0V6AOOCFkZhCOkdo2EkwSewBMWxhYfzVWf2nG15jcpJb1fK0+l5legcHMKNB13bv7N+/v95GHhuTulp5fwvF4/Pjx0/3tVbdrApEI/Prrr8Mw7HYNx+Y0DJlzbNoYI6RUNVutowr+12npEoV/s77R8xTWGXG+OolIiAyYUhJO+yYKwsvL6fl8fH56jA0gQNvGJsQxDV+eHs/nIyAzZ7tNEGOjkdLseLTrul13aNvJcoiIKaVyH0STWiUql0qYBUCIIgIGaoD0jAMZUJiurq72h8Px9PU8DnHyUA2IhbNN46RJ5hI9iCcQAsQpx8ZlzNgqWzPDaoJeWeaeRMHNY7W7vc4o1rP/PVzrlaYq4eFStc0n3zPYS2DDFlW/3u8rr6+BqdBLK6H6FRgQ0QgSlhN3CSpD46Xte3N0691nXW1zLduHzf0IAagExvOVPalgKd/c73y1zV/9EOz5mnFVkKwb9E+yXyP2TwQL5MyM5VxAyqUkKZKzleDyhXpclRwAUv+Qcyaa7lCJU0O9i4XXi7R1cvaoaU9iYLczIaLGktGwGUYomgx1mntZIIilKISAdkLkQFrE85wbLJoh4mTKU0SMeUTEEBBAiWACnqeYooaQKX6U5sZVy5VeKtO+yN25slnc5GUGqgJgcE6IRRdexbVpdjZENYBP6GVOmsEeERGDb9neze6eWxY2i5NX2OxFrNKYwOwpSiX4ZBXlSTWcohAu1rzIrDoWOp5aKw8XgXehKCEhhDY2RDSe++PxeD6f1XG0aRrG6ToKujuQXHw9rZ012KYYe84Fy8MLm6blUfTkpKraGpk7cR40utJiRSAiYtu2umd3XafxBm0eLYMFT/F4J4uoDsrC0qpC2GLIzphp+IGiVHvgeeWXWL4uojtiORapKnuOUDHibzLudZ1pItydWKtAS0b2PQW3XNf81zUvXsPznX29Uox0K/4u3xoOLe9kbkK47uuVr+tiFOKhMsDWG5Lh0y/PSoG8NLRqBW0Cs4YfV0qFn75qAb7SvgfsVWzoB6qfA1rCFCICSJbZFTFojP4vn39HlBAppeH5+ZwBYgsA0Mabq6vrH374sev23X7ftu0wDE+Pj58/fx6GfHd/uLu706S2IQTmRKixrzIzt217c3PT9/3T01PXdW/f3sd4RZBZpAmNLvG2bQElIrRNiJFAIKU0DOem2fVjj4j7/X5/6E5DTyAjZ3BXHmyAcCG8flX8luQ5gG9n3fK6kWWlubKf3BhjSiNx3u/3b9++fXu/p3TeNYQdHU9P4/nl4evj6eURSc79+HIaP31+HMexaXYjRIBBypHi+mDl0tJ7nTzWv2KRmf5SQcQsQHqmUCL/CQYEzCmDcAiBMKZ8Zs5XtAsBUhpEctOE+/v7u7ub0+kl8/j4+DCO4zAkxEkh7No9Ij49PzBzE7vd7tQ0nVKI5rJSjVENkiBTQPg8AiATUUqJijMO85iTNN0uyyQR9n0P9NI0qet2WYZRfV1n9mypIV+L2uJx6OfaPqwJzCq8wk82Obm5zKy7/mbxvf8lDvz9xQbl2/8mk4RV8NvXoXoFWpVMYIl/gIsEbd2tBIOF2gZus4CtWZYLKrqvsO692nRgFYTMfg0ux9gaFeupXAOwWW3CgJsyz0ZkeXawxsa6ZMtbVp9P1WnuTb6qhMbpreVFG8PVJf5WKbrWkbjQntaCiADOSplJ5irPVwPU16OIAKhn4oYPgF524hJUN4SAeVvBZRe2tZon0i4ALAej4QjCa6jXEDYiIsKZGTGIwKSx8NRLpMkSaJYc3fZNFUZcmFltSpZGJxzHTDTnN7c6PM7hRrzHgpTLdewcKW0DE6ddFzAWvMMQOI6j+s6pxdKEGe2RNZGdowYqG6ThkMsdPyoJ39XQpPM19L1JfujSstPs0rnIVB5w0lv0LVX8cJnE3KPacKuFy703C29jE6EgMc+rDouypHcIh2E4Ho/98WQR1aa0FjRPro2diAQWe0YBYOrIYOOSVcUqLNXyOZ2AI8LZNGRLfcIzNeLUTgMshKBob9vW7IEhhPure62sY1Tka9qSEILmsNaHXLx2vQHBSCWEwE6vNtKyIBPL7WFDtiurLFfjtRMjX3mJjY0yN1LtHxcEr/XzbxZ0EoaBtGZTuNI61kVWWsclhks4O9CCO7tZ14RyMvd6v56EvgnnuovXi99ajEXYYvQwbA4Ets5iq53ye8BYQy5LmWzdHS7Fx7/ai29wPm0kPUfbFmq1r6ZpUhqn5EREAHA+n8fhhJxDlKv2cHV11ey6GOn69ma32wkcmtiJiB3x5Cx93z88PF1d7f7Tf/pPd3c35/MZgK+urljy8fHUdV1ookYCUyH+eB7++OOPrmtubg8U1AIPAMiSTqfTbt+FGJn5fB44jznnlKUBPJ/PIwdAjSTegx4ww3weVH2wlbKgzyWGKwqspvv1sjlNUg6nYaZzAJU50kBE19fXNzc3RDwMAzI2ze3b3bux378cvwiPYz+eTqe+zymlGCNgGDOEEBoKjJhHoTb43r34IhXnubCu/etrvP3V4pGwXNfQdG1DCACPj48wnv7+4d3/8//xfw+R//jj93/7t39LafgQ37179+bN2xsReXi4fn5+fnh4SCk3TdM2Oz0TfP/+/cvLy9Cn0+n0/HzMOe/3+9vbe705XzYdJJoCz+QsRMTAOQsWDsAs/XlshvE8ZOEp28owDKdTv7++jolxZBAQQZjlb1ZWMR3OiwCjoFzC8xqf1TJ/pcKlaZKlskHLrMuvEyo6efrSfK0rb/70l+pXnWrNV4LKGH+GsmZxpad9T1m3YwDBFtp9ugVZKCG1Ucf2kWr4pcJr+F+P2o/OY6mSpe2venj53eo7l+orW8mMJdejb7ySf+DVGdeSlxmwHT6nxk1+g4JwXpkHTX7wrFuL6REeReBsjFLurJlgX+34ZbrZX+PSkRKRmlsqVCBiDKElEo2ICCUZqsh8zoqIdtOs2n5kEW0Slaer9qlQppSEaAqESLNSwcxAxCKQWSVjAEDknHOQyHpsQYggjMhZkuTusMs5p5z1DmHEiEKSIfFkOCq6B4hIGiYHnrZtYyQA0OztY04RJYQwpD7x2LYtAIx9RsTdbjcmFtGkf7HMTdL9LlCDiJzhfBrmBYPz2A0VnCbzXdM0eoGMmQEIEANGImHmQEHvyJ3Pg1CaTG2QCUSzy3FKsVwyEBEg1LR1OWdBDE0IxV4kLABIsRGYTKyJBTSbLWJiUbkEAERYr6kLMwpoHkhOGYrfrGRGxGGcg/FAWbpGUlgMhk49m7M+iEjxDM2n08lVyIigwfpkpL7vmxCZuR/7pm27pun78fn5eZK3EPPMCyIHAoDMgsLIYsCI7nyaFkrjw6pLKoEgMwAhJWYLm4tELCVMLgEiMmRh1ihxwpN2bRFlEREIecq6wwg45nTux66ZDmM0DYbOviquqgoqlvb7/bRMaL7Om0uxZWlgW++jJAiaYRBYWDKHECLGhpqeMzAGCUhIFEUkUADOiJMNBAB0ICLZbPtl2U5RTDXtlV5ByZkBBABDCCkNbntynEIzVUG930PR/eJ0QMPMjAAMGYAQJYRyXI2TsK4uvojTFV8lV2SYguhqsyxKkBN/L1mPJq8BmViPh1C/esHFCiyvnfgDhY2zOh2a/i8zF54uPCvvFkFEAw9Kmpb15pGX+Yvm9mGOdemxjbBUdIuhNUstN4hIFgaAQAEIFY0srE3QlMjO3WNBAMT6iX4VyeNs+TdM2gZDzuNDVn412wh0x6K8cN2f6crjxz9UsNzmKuv2/SvMnJGntSKC0/VARMQ0cAqIasURQhACDEjjeIwgp5SIImFz7tOuaw5X7d1Nd3XVXO93IaKIIGuUReIsmHtEPByaGOPpdPzy+y//+stHZPjPf//h3Zsb4NQ1IWc4Ho9d1x32gXngDAhht9s9Pobn53OgbhyycEBos7BADAGJcEypwSH3+WUsWn1mTqOIRMDx+Ng0h7uuuYrhswADMkZcomuieQQslKDzqyyBEVFdoD3+ARAxVwJoWTt0OciKzbg/AqckMvm5ZAmAiFlE/XkCRMwSsRXB49ALJUFp4BkA246wuVX/iMPVPSG8v38+n/pR6OtLP5wfx4Gp2ccQKhd9K0QIk9cD2F8AmHyJVnTiR+GpN3G9TrUYb/f0DADIgggYkQmnEyQIAJEStM1ufH6U6/TD3+93Ld0c+i/n/9fb6789vfz+8PQHABxPzc24v+6uCeOb+/dN3N1c36eUHh+fv359LMHVwpv7H5qmeXp6enx8bNs25/w//sf/DgA3NzeHw2G/319dXSnXIWqaAOfz8fHx8fn5Wb11mHkYUhNbZshZcpK+HzuZIsaLQACEzJwY2hYFE+cQAggTBsZpYVIgwTExdGHmdeBYgY92awzNL1jPpe2n9VcvN89TuDKjyVLZcPvahnHCt2OOOb4vESRa+L7pi8JkTtkVFRlt4DIYgW3chgc9El+AsRK7K+RU7fvGqSgy1fB3bTezZUDzuzLPFP8KIuZlnsYKXf7hpQoFdfUpp0G7+brfR3h1r8pesV7ytG+oRyCgXkLhegnr34A4FlON+ONyxKZp7GDdwMAQ1KVNs4Yq6lSUzTkTIJcdfwpQMe2cvijCoWujiAinOSImIjBa7gMzj4kTNnQ39KhoYiMl3yDN97DYG5Zsydi6s9e1NaLZh65aMplRGAgACRhBCBmBQSAGwUUjCTJIjuM4mqkHXJ4A0yDXKxPc+nTgTrNY6cH+Ap6fUVuiNlR7nXES/hCRUHU/V3TfzFn3MFXqjD2pvcqcUaXcqtQ6DaGkWoXTsWgImTJzi9C06nRaLSdmFpj3EjFLC5JahHTgRdYH34IU02uMsQT1nKLfQrFumXZReWB6PuWnpu97b/NcMzubAn/wZlNjs+P1PaNRNX9hCQajupAUG6n15b8qDBZpxuY6NE2wq1mIDKAusGNJ+cAAYtHPRALO78KKLVYDtFsZ+qvfbDxDsUkXEaIFg/Pk7cnDDHqqQnuS1l6urq7AUn0svFJFXJbIsAzWiiWdBpdwRLqBQTl0wGJ+7Pteg5FWcyeyIAmRjVM6X0wLRWdaeeV4kpyztye8vEoUW6gRUJCLF7Rn+lIghC0Rv5pZ/6GquVlsCAbt63i4VKoePfybFS71gqtttbyyPRavrPr22VnvrdmwTP75Pfi5VGx+AeYp+54Gxe3rFXKsrJ9X24evue7U1zEhzAMgIpqXQC/Cg9Oxp8pap6gxelIJJIUaiYiaptnv9zc3V22r2x8gIklgJluYtk7VlV0E3r8/MPPDwwOXGG6haQCgfzkS0W53iHGSOImIUx7H9PT0dDze3tweQmhzOg9DYuYQIkDmLJw160AGAAJNFdXoYGKMRJCYAwXZVNhWz6RwrvAt+qw3lAsz7xjOSlD27xSTec55F5socD6ff/vtt7H/Gui830XKDYNwliRMFLuuOXS7EOPd3d3NLWDodo/HP4/DcXwGon27T+dTBed/hNrXLXj6t69LBltFv5ANdANMfh9Eb9++/fvff4rwMvRfXl5env/81+PxdHt7dX19/e7du8PhkHPux1HVP71JHkIzDEPf98fjse/7cRzfvHlzf39/e3srIsMwHA6Hl5cXZv78+bOIHA6HruuU8FJKehHdJMXD4fDDDz+cz2cEIoqIYejT09PL8/PLMPQ8NLoKmibkgJlLYu7pev88Uh30ep1+J3pxi8nDFglt8lLD/CtdGL9Cp8nD1lbiJSVE1IUvyzwWiAhCfIG4PFrAKcZVF9s1t0a0idhqK7cPa6RVytUmotbY+J6ynh3r3RbCN6fGj6sa2iXCsNfXGxCWPM++AEzeruuRbu5imy3rEy9GbiNlq1Tkiri47Fb9ZDiswGDn88XOkbCSr6q9r6LwSrjd7N0LHtOu5EJ4WM04jqPK91XTTqJdssULlOfXhrWumreH2y8n0zrsdSIyi+LcCJJoELkJnsWJnZFm8WycXrRzbsMyIgYKdlIiRYdhnnSk+WQCEqK3INc3rKYhuOiJ1ppFgzQ0igi4EZntHhG7rhWaXA3LDYFZ/TBWZTphZWf3tJVdHEtwhG4HHP5YC51A7w2A1vvSQWVKI6H17SKlSkuaT0+WyqGO0VpGr34X4UyKwuZPEJQk/Fw4gl4M3LMVzw5847CUsysSlaL1+e3E+rLnJpGrJpxw1AEqiozO1SPUwz+NKARDix/I5nrR+gDA2Qu408HKOlopuLF4JHg6qYo/NUB3txaXjF6WHHazeNbjeHH5s0L1/CbPrpZVFx4h68ZfKexcJvwHvxiti+pJ1c43+9oc+He+skasTdkmYDYdniH754u3/roO7KcbLuD/0pNXRmespqpWEerrUFVPPBM2iiKqHZmknOLDAtsiusvG+Y4KMwNoiNFd13WIY86ZRWJoocTsNl8AhvlW225Hz89HZr4X3u/3k5f+OJ5Opwbp69evu9355vpOnQARJeUREV5eXh4eHnb7pusaxcPkvyqSs6Scc86EGAJGCkOfmqYBDCHA1dVV1z33Lwlj3NRGzJ5cccXMHGLcqL+F6lcWxbqIk0RRpQIkQL3KISDCY8KmReTz8fjx4zHg+cMPN2/e3AZuj6fz+Xweh/Hr16/DMLy7v/vpx7+FSIFCaLr9gQ/djvlxOB+DQNw6J/onyuYwRURTFsNqUa8JdcItgIvJWSK3CYS2IQFE2e/3t1fXCBBDz/kcm6vdbod6F3S/150xBtDL5NpvjFFk//T08vz8PAzDy8vL8/Pz3d2dMWc9PlaSPh6PT09HRXnbNoh4fX19fX2t+5GeJ6aUrq+vU9IACNK04XDYEcE47v48pSmMWYi8oe6+plq8XkcLr1z61y1UPGezzpoj2buyuvElrhhZrnt0skTNRWfe8i2F0EPoe5xZHwAsDxrMi51oMiuvR+1NI4vBcn0Tu8JMJSd8D54vFWtwzbcdokiWCuq6F/+58jFZ09vrZZqXFZyyVAh9Zf1QycPgRHRHBtvCwPeAh05JXjwsfYHDAxYnMqdiTFPm9ZHNmao2PnbGc08Jfr9boG5JGB6ffikZTqIzoy9S93ogrFcovsgelTgpOYjOTmqgVHeczAyoCeu8KXmqjJqEQhgmayGDZGZNzEJE7Mx1sKW42gKT4kQ7I6iYvIxiRESAERsDsuCUASxj7EL2MrL21zmqmbDj/KJTRZt77V1NRm3T5RLto+s6syuqo0hFH2baVgDMIufPGDbISGbi8HCSc4kx7BERAKmZyxQ/fUVN8NYjM+vlt3HsPf1giebi28SipOWc+2G2cfk7flxcC7xZz3MQdKqjHexVNdesCpe7RbUAAGDKebU6dDTC9lMZQoA468z+J7W0281J85qG4oatIJnXqEkDfkaIyBjKNFgWAMiQZ+Xcx3lioVUQF9PA4XKpeNB6mVvxBzqbhLTdvgAYGllYE70IWKYZ9QmHFV+zFuxwpFpZlxQ2dJvB+sPr0PqyZqmv9PXNdjYW5oUGjcKrt/zJKNg5BS6eryfx+8sm6wC3EF5v1nftidmPyLdQtfZNclqzAv8VAOxOsmtBDPkGnp1QCLNVtIMbmiJY5JwTkggjRpq4EZp7vABA13XX19cjZ3Xne//+/Y8//qgXLgQx5zwcTx8/fnx+PiKE/X7f933OOWe4vb1ChOfnx+ZPOhxaQgmBANRjHREZUTPCSIxNEwhYdrudSDNIur6+3ncPD89PhAt5cl44Zb78PggXiO0S5qcnr66UappERAjRYodb4E0ACijCKQ+nUw5Ab+7aH3/88e8/f4CBhpRjjEPi//bf/tv/9r/9Nx7T3d3d2/s3DALIbdvev7m9ez59eTmCZIHtaL3/weJ5jqx8SmGLJb7aLxNFyCk21DUBkIVl13ax6Rq50o2SmY/HY6DG9tYqQMNu197eXn/58vTw8Pj01B+PZyIahgFLNLumaYhi03RNA2rZ7rrub3/72ziOz8/PDw+fRESzZ/V93zTNOI4pMci0rSNiiM3t7W1+eDgfB5Gg1wswTMmlRURgoWsBzHuEZzWvsAXDlazEcaNPq7Zux+al4vP23N/p8Dx2yQE24Fm2M5sTfCPfk5Ty0qjn7UYA3DLUlglRttigx4OnSdug13153Mqr+tU3ubevaR8qlg5Lyq92h/Xz7+kFsY68sllsJeLW5gJO065egdXBBBZvwbV4ANX0bR0ovA7hZjEZlbacou11XhohbEIvnemvPYlsZ1xQ8nJ0ImJMeubzuHETO5phyqvOa25ovbYxmBJiphgiDbu1IFNxp9pefxXnhw129QtxMlRKGQOIqPPwtHptwhaJ/tZd6LuVwGQjMhF/cVSAjBDq3RQBcWFRsX6tTV8mbMBCIyoaToDiPavMXQ1ugQJAkGJx8sMx+qi6tl1EZgsnc7mrY/NohBhdVDqbHSm6vfmZIKJCJbLhF8olnbpubFLsgTlno1ujZuua51QTk5dszrk/n7VZ3dJUB+bi5e/RW8riYXXAs/DdZ+bie03F09VoWNuvVouba6w+K0FWqwO2jG/auy2KyuamoUe9omhTXDE4v+5sRKoQJknWb6GNskRwsUhf4VDWsimo2aWaWKsiFYvf5I+vME3ve09TLF9YuJepdw5tA+z51Hd1t3WC+8qv3yzVK5t7Bqzuwr1SCjz1vTibes8/KzA2sQGVzrO67vD9UPl14blEhQRcbpObAF8YdX3S+Urjvpr95I3Yi+FPgsU3hj5zaUF00a2NbwBAoAYRBKYjW2UhwzCwgDALAgXqdu21XDFK2+rdj5Rzappmt+tC2+Sc/+eXr6fTiSjGhlSgH4bURHz37g1LYk5fv359foaujSEQIt5cHwQI0axOmJMAZx6TYJNyOp1G1nXKEkAYtpCwHCk4hvk6WtbF5mWT8HBDCpzi0JcnQQQRoGmjSEbJIcQYCRFPL8c/fvv90N0DYRr569Pzw8MTAOyvrw6HA8UQEBmJSHa73WHXPp/7DMCpVtX+uYIrCa8aWkWQ1b7vFhqoKEJCGTKUS5f9cArC+7bd7bqmaSTHAA1BGvoEgAhBrai2C5jPJyLqphBjfPPmzZ9/PiJC24a7u7u2bfu+158Oh4OIqDfpzc2NBrIex3Hox3//5d+enp4Ph30I+PT01DTh7du3IhLCVdN0qkbK5CEVn9PhPAxPx0GJ3kQXnMIizCuaRQBrbfw/PhGvlIqqfV/f06PVXDMW+8wrdlHT9oV+KsPOJvDzBiEbhzFUdbx6faMdnNJxwwoDa4+Sf269XyqejWwy5EowWNeHV/cFD7bv8XV46ocFzbK6bb65N4kT2isALoo3F4C6VF/KHctKlqtiCrjP9c64OdgK+bCa7kvYmxkXLcaec0YUIrJE5ZMeIU6vAJeZwAelWLLF+frm+lzNIDD5kl0gRK3gWa11jcW9M2XOGkkCZ+QRUS7RkDQskjcQeTAmmBe+7zO6hTQQ3aSQLMz0mBECuSsExivtGKLuFGej1twOizpv2Cs6AntRFcIJpUICZJE8DduqRClC/EMR6fvez3eln7h9ayqa+twvWiNQg1xHoekQysBBM2uZUmdpGDxIANA0nacTm3fVMUpMHbC5FufjEUq8o5yz6p/VNozlMqERlQ3EswBrWX+l4uXlEVL9dWuDiilYrxQGokmfjzE2jaYG5nJ5b/L4rdanN8r5xaIYwOWlaigXBW1EUo5IRISqaJ9F3zOFnMqJCUyRaf9adDKjDcOqXbyUJY/zGF6AtKyw8VaZIILpMuE8mxr0CeemKqa2ZDUbnPoS46PlXTj7S85hwdPeJeRcOpnzPMHAeGUPu8igi4eDh4GWFzXJ3fLVNC3W5vRuoQp7a1rIAPAXZYJNVOByX39lRB42D6T96sd1qRHfIy4f+r9ratRK+t9mm2AcrxwSCQCwMv9oOalk8m+HgMCCIGZho3l1gGDxnGeAt2/fPj09jeP4+Ph4c3PTdd1wOg3D8N//+39/eUkfPtxcXV0hBOZEBLe31+/evQPkvj+9HJ9TOvd9Zs7DMByfXxARiDAUlw0BFDmdTl23SxnPCY4jpH5AF+9lzW3WmEHNW3uBHtbz/sqiADetF6eyqEaIRfNJY9eGd+/f3OxI0um3337/P/4/jx/e/ePq+npk+eOPP15Ox5/+/veffvxwfX2T0tC2bUr55eX55eWp708pDxRaEaq6e2XRvV4qhlPxcHSH1P7X19jFBBsBQCSUNOYE/XDqT8euyQSU8iQh2I5Py7v3VC7Ym0NQ3/dEcHV1dX9/f319bWe1ANC2rclLT09Pf/zxx8vLy9PjkQjevnvz888/Hw67YRhCwKurq68Pf4YQ2rZtYgfTmW8IIez45pfffgN4AGSQyeskpYzTre9p7FPEjVAf3xha+LKnxiZv3NxiKl7huWLFh6t9Z3Mel9v69rFaeXjxLt+lYse41eh8L4vRsYjb+zb3VtjalWYmv+Scr2AD3W5eUfX3DA2gnuX1h6p8J/dY04DN3ebGAavhWyGnt8yNwHwU5dHlMWlUcQmfuFIiDEK5rBB+s3hS8cPB5d7qw1542HyEmEvtbHa6fjihq1yGN+zZYrd3iShySTwAThucLt871Fi73sRxSQirMLIezLomFJ0n5ymoDOrt/vLb7HTH8+t+JRQpikUEwxzeh3mKn0FEEAiyKnhs8DvnzPlWGzgCBVigbB4LzkibEcWimpW3mIkAlzxCsFhvgEXIsNwDUu46q6ak3Xm1WYHx6hy6qIA22ErKxKXu6meWihutQ9okj+r1GP1bMUHDFS/z4+lZSAjBnEvBrXMsN0s95SDibreb5tcdpejvnmY8018vfnJJ1f3M4vJSsm/QXoRZLpzPBXhZEFEdvTz/Nazy0qKrAPj93pOENWITasAZMqk4qhmeuSwOm9BK4MMVJ62Kp1XDYXWkAks2UbXvO/KrXn8KAHq1mglFBEUQgVEIcDalsQAt3H5gsdxmbIBjFJvM7pWyiYrXkQOXg7t4wJZc9bugWveLyy3KmrJlu1oI2w2iF2T/ojZYjWsT1IoevtmaX5hVm7iUe74Hqs2WF88vzFdhgJBVjCjHTwQgwsLTzUN1DJnWFwKRCKPCrocX5jk/5qTAa3Cp0+kkIqfzue/7l5cXAHg6vvR93/fDmzfXP/zwQ9M0aeQQQtOE/X4fG4oxdrvYdiGlPUgex7Fp+jwmZgERFAyBAJE5ccrDMDALYIPUEmHbtl2TmS2IzsKh14e5Z+8r9R2ztrlGbBWsd8NqT5nyPykA5rwDwpyA0253+PD+3e1V+/zw6fnrH4+PSdLvP8f2/t37w+EaEK+vrwPB6fSyaxsAeHp+/vPPP19eXpCgDSSBcvqP6oFVqdrx62j9U7VfICKIOiuZukIkAMhNEwQjQHp6/PPX3/qrFm6uWpCxCZ3iUl17it/KIlgDIsaGTqfTl69fzucBESyuu2a1RcSnpyc1OH/58uXr169fv349n5kI7u9u7u7urq4P6il6fX2IMaY83N7eTo6gkBCJAoQAMWKTGxFmSUgdCE/B3J3gu952K0S9XjzlwIp/rvcLz/2qHWE9X7hyqveN2F/fiBcY3NK4RFQXnSw8q/dv2SkNLzHG5ZaTb8EaqQYFbgveZKFr5Hhh4xJs/sn3TN8rjcA8a4vhrH6tz4y8mLGJijUMVX0RjYe9cTpsXYiT+sT5kVkLm81WL243u4mpi4Q9j8DEQi3eBgCFx2rFigWJK7hVPPLXaMel1u2oS6reYRnwXLe56C0ealdRzuUb9eNX9VyHO4n76ghLqOwFEEEQ9fIFAoNgIA2PzlNUfAaA+USUiKZdDfJyVeh/iubZOXB1sVJhUyWKJ7c9r6gk0+KQqOReEACxpPYTUmi6N+gJmigA1LvgBI+Xy8vpBRJ6V8zyfNLxmqYxo1m1bFT9MwLSqGWqUQSXQ8Z0Hk8cBgYVr2VDlykhDtkIAJphWWOrmvaooWVV/eNiG7S8hebp6jtVsO0Kr4mzlXZkEKZhVLsZLOfRxijO/RIAVGCrVmO1MAwzWFxqpVjnFO3FG3a2yHlG4BebokIveOigdFIUM0SU3V5exd0xpdEvnP2uM0x6nNgdwmqp27mAPcGiJ0xzVzBasSRf5LKiYocg4BgQLx3u18X6Erel4epGHyKWK7EYAJhsAxNYevxPV1vBohJ+Y1d7/TmsGPo3618qm69UjXvCvtSFx9j3dOqOABacvbqzrZPFMOcHM7oVmRJjfO9QAcC5HuFSivpLjQAsdiZ74ufUI01cwnGsREB0Loj+7zQwt/BBPUEWh5KKISg8kNVXWVUuE61Kzh5G1ysDI2dZhKdn5oaiso4xTw7zinANRRNPzel0OqtaeD4R0X/5L//rmzdvrq6u+r7PPIYQkCTl4Xw+t21su+bq6gpgD6InRFk0zrogBk0GgykNnMc0jEQUm72E7umYMvx5HvLjyxlDs8a88QrPYRCRRS5FGd2cYimKgZeDpDzRHXlKaSJSvuqeNd20V6MZooQGWaRp8er6cH3VStrLeHP9/HLYX+v9iKvrG91cRsghhD6Nw3H47bfffvn9U8aAEgFgHM5EV5vw/weLkWXmRWQ4kyho6ZI3L8mZ/5bTAwAUOJ+P+ybsusicv3z+9IRpvL+KhIfDXgOGE0XNCWazDAApJZakCEkpffny5XyGwwF2u91ut2vbVpf/MAxt2768vPz7v//7b799Zobr6+Z/+V9+fPfuXaCYUhrGPueRmVJKgFPCWmXyGqIWEZnzOHLipN4uejdTIIuoBEiACGDsHdRbUZYFvoOp+gr16r7MqA356IS6dR1ZShTV3/VXe2h8EgAAyG6yWGWR+QrG64PyYyGTIvwNeX3CiwsRFQKrPdpTWvUcV9qg+BQLq0i5r8P8zeI5Niznaz2bm9X8GP0mZTU3MfB6sSs/XvwImuSmgFF1CmVj9SSx2iy2SXoG9QJB+O4WD0u/lUnGNAJxsW0REd013eqw3sNpXaxjT/hSzR0AYCDOWYdhb8m0/yKygKglqQSPzGpZUgMRwKhyMADO2X7n3VFcsgsF2vZ1LqnVjJnmEqJN3zKIvRkH3AKopoeZBYBxyuviToam62Ft08QYz+ezKd9S7IQ5z1bB7HIQ55y7tnX2Tw4B2/IES9YKSGTXGkWQSxYULLetlM40ymh5saSVG1MuGTUsspBulqrgeWkDhNquTSlp1GkuVl1m1m1Dz6e9UBhcXh0DCVziDXT5TBAxj2kcx7Zt9dKCodduA9qMDMOgENr8skVUDyHGOLksLuMAbSyhMjrjwkYhzNw0jQYm1Vvv2qOU2/CVViwiRPMtQVvY7PILeV1UuzMtxZQoKnfkPGPyCLS/kzVgHG0xm2QwacVNRFe0ct/34vx/pNyQ1JseikBVUG1R5Cn2d1OOLdhzECinCU2IRBQxWjTXlFSXm5ONwpK7bTJoG3tlDBTnUOEHJXOL0/PsQiKJM2xaXxPtBVIpSUSQpzioBCgiLKIWT39ZmbduVXg+4OHhKZWWO4grAG9yZ4B6f13TgLfQGnUZP0HH2X3BJefd7MXDMzO3sp17SPTcxz54V+rqYGUarPsMPvePHvhvFd+jfzdP5FTjMLiAUr5cCmZTsQKbGrvHv56gTcAI6zvbfuD+pyWhznYDfa7Khrq9q6ynCxCbZhwmC/x0wxkCM3ddl9MpZ8GSjJt5uqZgR3j+TrL672lsYWa+urp6G0hEOupCCA8PD7vd7urq6nT+l/OZr/52td93sQkAMAy9SG6bcpYEQruWQsgC5jwf2066xAyx6cYkt7fXQ8JfPn6KAU85wxLbiAhY7572k8dShbfNeamWFSyF9eoAyDWIU1xu1QqFIeecORCIjIf97c3up8+B+tPxdBxOp1Pf94B0Pp8fX55Pz89Df2rb+Pzy+PGPx6aLf/v735/7dPz3X1NiWQbNmvnGhUjruMqUAFvL04qyYtuX7Ux5vX+VwuoGgYhEkUREskjetc3L4/Fv79/99OM7GE8kfX/uv7w8Df3jfr/f7TvNM7Hb7TQx8tPTg8p2KQ+6zI/H4zAMIkBEehysXjMKj2bv+PXXz+MIP/9891//63/98OFD0zTPz8+IOI7jv/7rv/zyy7/97acP//jHP8Zx0O5UtMg5t22rLOXh6QsAE5EGpkpj4nFs25bziDhpgNPokLgkKNpkdxVR0TKYWbUfVVzCGIi3ovhqFT8x2gthca3Rk73Nmj9W84fpvpeqXzAyXobZqAYr5bRaubTmmpbiAwwiIJOpcGpfAMqRyrrTJd3OmMmr2CfrzSW42HJ+UkyQ8GsEnaBejY5oscx9hWrSi71h2/uxAtjjH1b8R5ZX0gwzfo0viAfqJ1VHFRPzcpTvSMUwfyau/VoUX5MhbciI9UyRpkBdpiXzSDBdw3iIiCUkXwilaB4WK4XIv+u3SAteaABU8gktrUG5WERAFqLyOI4RJ/8as2QQUTSvNv+DXqY3SrJxigjKYv+eNbFQlBwnY3mDTMFvmVdeSPl+/qaD2yUfrlhPKEXvyFlHPGlgC6sxzmVzdxEDTL9TEJlOOpETI0J0nr66+GOMMDXFrCF8SGC6A7C4Fqijt3yMuICGQttAufbm5d28TOhnSqAqil79M6KfgHfIIaI+zznQrR1VOFUDhGLpyiWI6HqBGW0YhbAzhRWld1F0vLbADHtjToKAgQQhcbZlOaQx55w5cyEwY3J+nVgxS+B81rKabhsCX7CAiTu8MILRgDr21ZaxgWF/jdr9NMFyL6QwTaVKe578/OsGnt0tAbfJJUmmOePESvToAgEW/FEu3Ouzz1g2J3tYVV6jyH/2fMee2zEQot7BWgAQoE5LSDBrgZ6X+blbnzh4zL8Op6///QXdIYjvy4NnQIJj3AaAJ7k1tOh0uapf3BJwp9M0IHaHwb5x3+8M86XJ1BA+WET2b0VhuVQMFWswYAvn1fKviNOPy6+pNTGvMTkNa3mP3RqsFGmDxD7HGEPZBPRgkXPfNeFwOIjg6eUMgofDAYreaOeSummpKhiaaOGXR84ppRim/HKn0+l4PB6PRyJgTjKJIAtHQURU9ikADAhAQgxgd/ino4kQMERsiE6cgZoKbwDFfLeaCB3/Jjeo6NOTkCf1bzIHLAoSAKgjpUhGgMypa6BtAwEzp66JV/vrw+7q+PLw5cuXJJAy/9svv1GADx8+tG0zjkMM7Zs317HdEVF/OkPm3a49DTXk68/r0X1/IUECBCQEBBYRBhHg+TrrFkELCgigHUMgwDj2bQdv729+/PEHTGdJ5+PjUxD+eDz9/vsXEbi97W5urx8eHsZxFMk5j13XNU2jkQhiDF3XEuHdbRdj3O/3RJDSoAk2RVgkh4BXVyGE8OHD+/v727aNOaf3798Ow/Dp06fj8fl46p+fn4/HFz2tUJ0QprWg8XhRfVB1P6cGGmjGxH1/arbTk9Ac7aiM3vDs9zLDfHWeC46bVYuxYu+vF8+K/fNvksSajNf7oy/kLMP2t+J4WE7xmJmg5lQEi1NOJaQ5zdKFnmtUlIx2DBtYwpXvvf31ghm42fHKISyuaFbXoxZ4q2CTcvHs+0uF/815f50MENFcRqtxIWpI178Gk6LOk0HFPGE56dXWv3533lku01WFgRmMcrq07siaNROaPckuHCA4V0QoUqhRiNmfBGZrFiKS2OHdFAYHEAVgVkKk3LlSjdMsgVCopxLRpIjak54aog3SMO6XMS5lUHP1qSiPiMxrhWUht6AJs66YnYSni9p6uDVr2F4O8MCgM+uppZe9xSxkAADkpmnUEmhjsX7Xw4RyMGCzUjpFdC6F/iKpWX5gqTB7DcQ68spPNRZTvagUdCc0NoM6xXpcoQqhHXehy+uwxpUnNf91TfeeACquBABqY1SozGJGJdaopzFTYisCUAC84Ohp0rDnTWcVhDW9Lb/aLKjurS2ooduvw8pUxSu3dWYexzHCrHCCC38qy7M9fYtL7myDYfKxLsUNzUiu9tH3bVZMDcDytl3cF9cPq/arLnwdRBWSRCtZnQAgiCQIRTmkopuIu7kK7gRhc7I2R7oG5i8VXDJ3XhqsjH4u0aHROdb8DavKBrRvzQZVWeahKC0sr0lCnilpjb8oD//lUuGngsezCz+69exUlDljb+kUtAagQmyFlurh1DchlewvLDSvI5k6ijG+ffs2jSeURIQ5S4wRgYgIBDmLMFAIau8mQmpi3w/agAioT3oA4IaHY6/84eHh4ddff31+Ht+82d3d3QFKznpGBoikN9vDdK1RRNPtIjYSCJhABFkkSxbAgCiRcNc1z89HchaM+cPWevdPXiEejzr/1iZiNwuL6EFQqUMASUQwQObx+fnp0ITu9na/37958y4L/PLr7+cxxaYNIfz000//l//rf4GcT+cXFO7HIQuODJ/wKWfOeQBYuMhWY/8mbN9T0BkovslMkERjDollqBcBkBippbzbxRiAgIi65ga6phnS0/H4+zjCfr9/++ZdymPf9yEgAHRd07R6kgvqTgIAhI0d/KveodyxacLNzZUek4aAT08PLy9PepHh5eXlzy+fnp6e7u+v3ry5R8S+73e7nea4F4Gu6wijXnb9+Pj09PSUkkiTmCNiDAH1ZBKw3s0NP54h234HF6jLvwgX1imsGeOFFjyDhRXfeKW1V+i/LJpVuUxXXt4Ap19FClX9igdqsauGl07i1v16Ut9ktrASYGALn5dm51JBp216GawsjW+8e6lfD7P/+/0ASyn2hEVgGSmjqr/5sNodYEXhvouKfipq9LtbVfN1wp5ZDSxasLH49o0GsATgMHetCQ/FB9VDeInJIyIJMOK0Ctx4c851MEZr1Etm/qdqQ/Ko2RyzhVexLvRvE2uXpInRyLy9QTlfQXDBA4q0rdK5OTF6CIHQLn1NpnvtvQhe5DIrFDDIpHYpui4AdF3HMt8eCSUXubZvLRuDMHOtv6YFMBkAdeYso4MwjuPZdFobMqwUQmtcx2tTbrPuXSlM8wQAFDAlUC8PYDGvmcXJJlo3ITcuFe4RAPUEAVEQoURnUGxHHak/khcR83UGd2og7vQClqzTrwQPUuIMy4W3psO1dQWdB0tltasIlTMQUaBAqBJN4AwiHEKMoYHJZ6bYYCExD0YDRvlU8hD6qdGfqmRTZrmtnKihGCJgxZ6YmWRSI3PO5SYwiwgKxLCQj62g20r9uvPM1k9EtYhm2HA+XKheWXdanheeCIvdzzGjufJaK/YTWrFX449rUCtOsq7wSvH1ZcnxwWHy9U4rqCrI3ecNzxzrApebsSokhqV5vayST16C0He9yajXL17CcFW/Qtpmj75atcw9Eiow0B1v2a++pv9QGqzlznJuIkSEArTM9sHT1XdUPwwiOh6PTQRhHsex6/Zv375NY35+Pj49PSjTDs109yHG2O53mm08l+MaUj2PKITw9evXl5eXz58/D8P49u3+559/vn9zp0FibGh6lR0RmyYCgBAxEmIQEZQsktOYdClRIAQmhH3XEC7waRi4GPpgORffXCO4FShu/e7i1/UTAURo2ih5HM4vX75+zueX/vn5en8gofv7+8+fP39+eAnxfLi5vrm7jTGOwm/evEGB89BnQQhNhrYf5I/PfyZ3TLOGoaIu+OsHIv4gz4R+KlcMNlFUDi5mLqfXpds2InA/nHDsG6QAtO/2bTs2TQRIu93h+vqaiAS465qUUgio/iMAU8Qjgcxp8nBThg/OY/n6+lrVPDUJqhcSy5hSapr44cMP7969OxwO5vijV104Qxr7lE5PT09PT0+fnvPx9EwEQjTmzMJIsWmachd0Y7zf87XandeVPS2tafiVrj0TWPONqn1f85vtb5acaxf69Sg8O9o8mAap7yJe2i6tLO0H2wrJK8ivtgNY8UyvMMDCYPhd+F8PvILhUrmEf1nqVK/XX1PC9HfrpGCzl4p4qk6rndfDxrJh8/Bge5lEpBwSLXtfg2d9lQCaC59PKqY8D94aIZURYr24EDE7i/EM86RfsJfQNCNTtFtMWptcFjUDkReXIAGc9O827w2Z24Dw2wwulZlKRGBhUSPGMkaT+VWLqY7MWPLL++m0rsso5q+AJKx54Vqfaq+8G2x2VY63F9fUllLS0ztcuo2ZC+tsnCXSaK4eXTApjawU4fFDLjgKLEPSV5NtFOBfZ+fBCACS2QKEmnXOzE26qZhWzCVHSKXDW6fmsOocYhdKlwFmIJm91DAgxUveo2KT7telquOnRn+yy5OezNaMzOZLeLaq+xfDVnqJ6l1Y8QV0J3ZaIeVkc2R2S7vFaiSh+FFxE9xpCyhKl8dICk4F0nqA+C12CW7dWSOv119353ErIrq/mIRqBwxBkMsQSEAd8HBlkbMGL1HCpXn8JuV8Z7Hhe1ZGS3+ninF9J5ATqpcEaZ+Nfqqf/ER/zxhfmeh1Cx5v1Ur8Zkf+FaOi9ZNNivomdb3SF2zJJVWbNlg9j9eFNPHkoGZ/YBYGUQOU7oBd27TtTqQjor7vv355+PTpz5eXF9KMqTHoJWdVCDVNXGyaQI1mmE/CzNyfx99+/0XT1v/004cPHz5cXR3SOIaAIoRYoulwBFRDELAAMwuhYAaErCy1xN1BwpGTSG6aILIhgCKi4Py8msE1B/snkP9KEWFAEtEAM8r69GoDZYYQsI1hHPs//vjjqdlFpLgPNzc3z+f+3GcNoxIC/vD+PWdou7gPlBio6d6+xcPHT+d//wN32/1W9OyGuU26l0ja3+q3mq8ohACaTZUAbLwCCOdzut43u13bNZFQMDOXsKJt2w5DOh6Pp1N/e3tNIYooS+Gc59lkSTlnMfN1ANQUQ8gCmSWHEPZt13ZRD3abpmnblmVUkUM1mX44icjhcHh+ftntdjG0T+eXT5/+eHp8FsEY4939bWKR4zBAgMQiU5zqv8o9K3zadKzvv20i/ztZWTUvsiVhb+7Rfnde7w6Xe7+oHplTj4kxU2HBkl588QIvjmn+6aV3CfKKnVpRV6aKQ77S7+uMdPP5XxrF5jRZuYSWTWHGf16P7hWBwe9Efoey3bYSP2xuAeCvXq1Yb0NrgMFtZMzZdCtYCvabqGMXZ0SBV6nSkFCJLh4/aPKMuuCWmlGjQyMAwCLMpnFGrxDC0u2wApRK/JLMySMCF0ozwNIZTERyqo0AMB12kp6kqnHdeNV8tw3rIhvF+4JPIwIACmZ2m9UAQxyRxNhSSYqg5Xw+A87ep9mlUliTwgKCMmotdofQamo8FAgb6xadQmVGJwVANTEPvH44nU7VcGR5FARFo1OlbhgGKu6L6CxpBp7X+e1do12ab6PO4yV3WdYsYDM5rmjdhllZxjxNB5fByfr14EnRqUzdshXCpawx7HAy11RKLlHgkh97LnkjaOnUB+5sz8Njv3rLreFZROx2ByyNq2oGtDuN5mpiD2lqGYmIADVcYTVAWxe4EhDZuVLDFjOtUGQj9UO2diqeMHNAKcEJFQadSkC7gmA6ocACXcY0KmK+NH3fHELVwqXiV42HQS4IJX6Nb3a9Cap1ACsd2H8FO8ACgNWmIq5pdL++PsA1PB5vuOrlUqmANLDXM1UxomppV92tx14hxP9qeIBZV5eqHSl5I0QENNEEYs45xunMiFOGwn/6vn9zdxjHUYny8fHx08fPz08vAHRzczMlaMUpNxQz933/L//yL9fX1ze3t2oqTCmdhn4Yhpevz6djv9/v37179/bt/dXVFYAwp7Ztcb4WjRhAY0FmyJklAzKWlDKIJBJCiER6PV1YAHnXtZHqBTINdqmKG27xwpRtcgz9WvlTwOXVND1HqCJD6XQMw9gS3N3d/fz3v8GYTw8vkgEYP378Lca2acIw5pzz77///uXLl+PLy9319c3NzW63Y5H+2J9O5zwmFgirlbimWE8e37nefan45OstyCRR6P6LCKpPgQi0TYhNAGQiJAgYhASur27vbvtxHE+n/ng83t3dBIrD2MdIRbUAooAkOU8B0gEAEZizuoGIcAjUNFOK4KbpDod9iabGiFLiqMswDCISYysi+/3+fBoeHv78/Onr8Xhu293bN+/v7u5435379PXplESIIlKUKeTma8hZf60ozSrQ0tEJVpT5nRO0JtR1X+v2cSkQV8C/3k55PHvK2IYOAG3beoaPJoNxFhFC5OUyYWaN8WVNvdrvIu/iKyiyn/xdzaoLj4EFPK4F666yvRt78fBUKN2E6tL+4r9uEtJ6yqqWEefI6jbF07sifGGuL0291TQJtvrVf7Cuv3OLx0IGm1Q6b9+OGGQlN4KbBQ9JRX6wjDPiD2KsGhHhHOndrc1VF/ZuNEsULBfAAikAhjtLEF8peOtRVRMAq7JmEGVVT5jyUM1w46I4xQ/KZioCUzTOCsXgSCrn0b5iMQo1TRtKyoHpRJnI/HylqFgpJWUQ66FlF/+zdJSZF/OaXX48KAj0RkUA6Loul4R+wYVRrXBiR1brRJYGhr8iaGm11icNhbAW8frIBSzVKNhcnHX1FVNdgstv7kG1KbZwT7YUrSa7+2OwXDO0HMvmwvCj9o2zc0f0q2sNm8cql/wlXPx7/TDNxIPOK1XKcX41cFhGa/RYxaJMek49pb1mtrBXuj9xiVAqIqr8TomwZOan67W9ufSYt1c3Xtp9l3Nk0YzszMi3I+5U1OuEMrW/gX/DfPWhmp1L7OX/J8W3aXrvekf8JivzdS6NEZc4r8ivgkeKqlNRbwXGvApm/bEuFTxr8L6zeNjWBABbU1lhzL8lG2LlYsZfwbatd2YW8d3NiwLdK1q4RF3jNIWzyjm/PJ/+fTwP/RNKUvPg6eXcNt3d3Zu3b9/rCVEuLOV4PD6+PJ9PA8hxTGymgyGnnPMuNB8+fLi+Oez3+xgp58lMREQhNDaVRCQ5jOOYo2ZjggwoFPRQKwT1EsWUBgoIQLsmXl8f9nt6OF8UcNfosvW7SWbVZPm1aZR5aWH6ongWEZyz88GY5HCAm5ur+/v7kKVvD5KBR757f2CG29P5dE4vff/16+PxePzXf/3XL/t913XX19ehbRPTacwieHvYvSy7XW9wK8D+GmH7PcWGfNk8CCJZs1WhYEErBsCwg65rAHgcR2BpMIQQ2tDuEtzf36c8PD09ffnypW3bu7ub6UQeUel2HEeY7spAbGZz5fQkRkTU6Ny8dHDNOd/cduM4hoAhRJGMGETw6fGpbbuPHz/9+usf4wBv397/7cef37x5t9vtH9IJgMcxMzFQhCIhiDvQXw15Ichu0ox9WCuEFbY3G7/UKWwxFg9MxUsr+QGdFO5huEjMIgDiG7Fiwvd634SyBDzRTM8vo6Iq6zqX9kqPBD92OyuHy6v49X79u/6o95X9Gpbc+HV2sZ7Bar7gshzip3tBda/zpcuKWUUbm51uDtB3aqP45hRX43JbNnpU4OrkHZbz6NeXGY1MoTCowELRgGF4uqkjImpyE00iMOWXmbqIqkh40RaWiew8lxQRhNkgA3MHfCnytR/kkuDCmrMrYqZIuO6ejJQIH4goUKsNHuwJ78JhznO1gMSWTc5ZZPYwhBLWTzd4u63XNjuWZII7lvuHzDwHGVtyEHQSf4kxg13XeTQWagh2VcAISyG0jH+mzumsm8OnllxSNeidPU8QU5uy0EBMX1KF1hqR2Xd5wRRCCBYxVQOUWSRrP17PtdmisLpiNdfBMzwON7ltNSLfabUH+I3cPpdxbdsJObPHjKbWEBF1pYayGivPZENptUysccc+5nihhlKDh0tKRgUyhJBKrFcDSUT0xpGRHxGJIDMLS/BXZAvSKq6HSxEcVuxmzQ39r346rPG8Cn8PRSeBOeBN3ZRn3yjAIBRm12Jr0DtR+Fmr2qxGUXW6yYteL1gUXXLOw7x0S1537V9f17zU0bwlLCH3SFaifmV2YLVkLhWv38K3Jv2b3Rnj9b17krPGq6nEpd6Cy50eHM1XQ/Pk4bFXGgFYzbhWIr1JCFM87sHdBdC9IaV0PI4gQTgDJ2V0dzf3+92h6/ZEwc4cLRzU6XR6fh7O54Gen4io67qbm5t3797tdrtdaLquyzxO20fbjOOYxxERNZ7TtN7RzqdJ4dRtmYhiDG2k4XzOOQ/DEFsIoeu67hrCfr8P4+LOv59EXMkccuEgwHhCNWsevZ6hyYVUpSJSPZMigoQAXdeEEMaxF4amaZomjiFDS89PL23bJpbT15OmRCLCvu8fHh4+f/7cdIcM2I981OwguPCwqFZHNS64fCByqXh68+O9VF8mnUGBiSAZAEEgBIoxdrEJgQgkIqWUMuTd7spSPjw+PgIAorx5e6/DCRQszKw676SRTSTQol/V+ufXgiYqPA9fBbjtOs7w/PwcAnTdvm3b//k//+fjw5Ez3N9f/+3Hn9+9+4EonM/nBImZYySKzQDUDwkJmqZJFxRCWUrV1T5ScD4T3vqDx/NfKp5KV1Ow+FxR7Lr3VSMXFUKRhThuH7JLn+OXBq0cNxaQ2P9bau0rENrAK8vepdcNA7x07LJS+fI46Whb364+i4tX+ZfK62BXZQ129dO6gn/+zdfBTYSHzfZ9WM1U9dB6qfjn9KvrDpZMFYp0UW/0Jc+NOPGSmX0MDg+A2Spgmd/Cg+cH6AQYhjL1yM612AEDAJEgBNIkhFlEApGY3kKChHr7QkSEABFjmCxj7OxmIjCOqlgGKIGKFSXj5CU/a3ElN/0cHAVVsJ4OzAYCwtgIILLonciGiIFYMzzAiIgInIH1SMzGw+UyZIxRshAEEeGkuCYB4l7aXSciwACaSxdRBHJOGmGc08hpZOZIiG0jkBFQ0uQpqtqRZi4ahsFS5zVNs9/vmfl0OomI1pESzBMRQyCRXAY7BWURkZRGlEYyI2IoUqjFHdEAVgVvzClrHnlVyczER4BIgQAJyfw/J2JCaBo9wNZrhGR55LFstMKBMAgICHAGDK3fdSjE2LQaY11EhlFyBpaQWXIWZqI4K5kpJRamGGLbcPHAVBTpVbpQ0oT4BQMAIWhoNVRTKlG5R8coLs0Ju2uZXPLtgDOTAgBIcbXFaCbQnJhZrD6iS5dS1ow2i4o3kTSOIQQQ6c9nI9FAlHMA4BCi2Uux2FH7/qSasG3nRESr1LS5FL+wVX/OOeeRJQNBQESCEIBYmCWjqOc3cM5clLGAJFMabs+ONPiWXY5daBQYSESyMMCcVFAmE3Z96iki000XxTYiaQRo5kYVOXa2bgAEzBIUFEQyK7/KuwLQADFKDsjMWRgFMpT79+VIVY2wFoSp4vIVx/fs28rMbS9sD7g8hzP8iIhFDLI2jd7WAoeUMw7rsdqMa+6POCYiIhAmQEQhIIAp2+f0ijJJwGkcy/MFEZW4F5Dwciuq4DRQlf79LriuD0uNzm9m+oFL6t3pK0/wUol8CwBlJekRwOzKsqlaezzbdmBiuiddX+afMAAQUkuocamFhEGAAgaMgTTcfhJNG40Q2nY8n4ehZxlj18ad7Pbx/u767f1tIJY0jv0gMF3QIqKUkkAmCm2MDCSCmeg4jNjA8wliIz/+ePWPn/92dXVQNhtgAOE2hkiYU39OvQ5rHHvdPtq21dtijDm0lDADhiyYBIARidKI+Zw455awC03EgADPp5fh+eVuH9Ofg3c60GXOOcNKmhGYbiDrdyjTJiJYlqTOxOJFwgrDEx/YytuGqFcHkSUDMwATIhFExEgxDXnfHU6nPgK/PD+Ox/MwDONw7s9jZtzvb7AfbpvDjz/9/enx+Mtvv+6v707n42l8ur7Z396Grh9SP/TMGRAwhNgkgZxzAG6bwPlMIAQoCFmAISRAFuxcEAhbyJ6kF/yqkJynST9eXCIHABgjYiAUhISSQBJxAh7oDH97866hgEnapkGUluL5fEoRhjRwHHY33cMR/vj89er69ue/33EeUxpCACLIqRfApgkAjHE3jOeceb/fxYZSSiIjoFCTGZOIUIwAlDOPktM4ttjELqY+9f3YNYdATX8czqf89NiPA19fX/38039+//49EKWUKQTIfQiEwKnvFUjGOPQC4YCIAAycJGeQMWBqkARJWRKiwORAJyASwxSePYveqJz8QWzbqaZgsQc5nBcLv/jKFc7XrKBi77g8X8bia+OXiW8KQGx/9EuGZfRveSKhkpfLzAa6PSUQ0S3MERsAyAVPnE2+KsuoCpuDXaPCI8Ee6j5l5pPlqMG36bew6ifle3ZI7RHrDQbWDi7P7MqZta6jWCLlisgUup9AXWqzcABEgqAxI0mIMQkIC89jQmRELgoHzCYvYQAIzGr60t1Ar9wDxCYIr47vERMIBgJzdSQEQAbBMDs8MrMFg1UlQqWeiRK0TtZLotPXCTYEgmwz4uVVD4mfO87gw0aYzKPnhn6ZVMzK01JwYQ4LqidiECQANb4JAKAIocpYWY+d9BfhcTricg1B1SjLIg4KMzOIRbmEZTYMvRPlcVGtNxuVVvNuh54u5yXh0MHMglgmiRBni0rO22wCV4xecT0M4JFYcLq4Nqk1iQgJOaPJ6zMeOHVdg0V5m0KEycxl1svbr2E7AjQRzbBELodhdkVjhPpc1VTu1BkCrQVbnMYlEVGdDI0iS84oweVOn3hUwcUw3DRN0zR933NJBO9XfmyigqcTqvdtiGgYBnTZ5JTvp5QoNn7SPU/xxWa04oweVD9Ga6EyEtoUr7aEmSZn1XoZ5EaH73EYQmBOiGQzXuHZXHPBtF/IZoP1y8ErsbPimrOT7RfFcEjL/bXCiRVzXa4oJCxdejzvsOe+vhR/YHA8zhQkPyIPia3BCtsiYpb/7WGKq7lsqqJzWK5xPxZfbZN+jPDWD18psjyxfr2+R45BLiLzQUyxrlRgwHJdvA5S9YqH08ConsASt54vwRKH/kVZyQ3fBGbCwHe+8x1ljaj6V5n+ginPi4MCvVU4CUwZEgDwmDRJoMiUsig2ASDEGNUQPnH+EEWQsyTO6kSQM7x9u79/c3t3d6PeFsqnm6Y9n88WeDnnMcbYdZ1Go9HUc0jzBtTFph/HlBJDjC0p25dxRGCg+ayTyn1vvUpgHGnN3Awb//SU+YfV7nxhUnCSiFQaY2HkLLln+fjx4x98JmbhFEVP9KTddYfD9fXVm59+3u1310OCh4fHGElP0+7fvvnw47uuC5+/fvz68Cv0oJhkSVOsZXDLvKwgBCSIggKXo4PCamm8gh9b7NWSp0gkKCKZAQUCEiJSwBhF90xEEWRt3rYVnbumCQEQkE/nl0iBmYNmP4kBAUMgAMycbTsYR93rMwD0/VkDbudclFUAgtAPp3EcRVAEhaE/n/788vDly8Pj4/j+3fXPP//nt2/eN02j5/J6Zk1E+30HIw8ZQFCjPktOOG1/KEQIIRIg4pgFpvsJc8QgKMZtmfQefx9sQZnwLT556Vd/YHdpBtdT7DutOJ6f90vcsuqr6reCzVaH55yXmPYleKrt6XVI1iNd96WNVL9KEU23Wl4oqL6RavMqT7b3Jl6uO89PalnFGMXczdSX+U+ucVLJydaLLjPfIyxFqArtPu2H7Quw1LjQi5GXWUTFSQq6ajTCX1kCuArAuWp/w2l5U8jRzwyIZUOcMUOoWg6W1wlQ86gvDhIUN4oYZlZbHrk0CZA5zxl1xTQTb4+uSgWoDdifuPsx2K8yxWcvCMUNKpRVPBKDxI5w7Cf9Ok7eO3PGP/3d4Clyebm8l6cDp3LskVMqt1CYVRMmUkg4RlJCEjN7OtEfXTE49SzHE5AZ1tQOqbqWKV3aTtM0nkTE5ZCoqMT6FVnoh/ZVJEuJMwIAbdvGGJum8QqzhjWzxBUwR3UHxJkV+kweVEzbHhXe0dQz0GrR+fUgy4d+yOL0MbdyZscGDwA5rrHgHW4uKqZgvp1GycY7TOs2P94QZj3fXHBDCOLOfvzUGB2Sy8rY933b7Ool6daRShzaNfBrypUdSdiammbTMRdvsfEcfD0L6/Xu6RmqbePChiqu2XW/OtPVwHFJGRWj9GO/ZMFYK7q45KdrOCs8VACsu75UfBczfiiKCFgWhCI76vilZMazLVOzjLzeEQHkuc5FkcImfXprGa3qEuZXCKlFB9mSdVwj2wLH6yOqam5Ox7IX8NU8dTGzuPjGAKIHOsJzNClkQZnibocQiCCEOQgwGGOZaSkAjFdXV+/evdvvO4LpeuEwDOfnp9PppEloxnHs+56I9vu9MlXlruiCk6WxJ6IQd4JhSGkYhoC0bwICX+266fi5QUSMXbvf77sunU4n2610pEET0iyXzyUafqWI1OHyv9mIyv8AgBqjGwCRETASXl93bRdljA3Kfnd13e0BYH/omtgdru8Im0BdP8K//r//+6+//hJiw0O/v95/+OHdjz/+0LTIMrTxVxwQQRCITW4TZnVMcKDqkokr4q/IoxraN7FRDX9qTWO9MqIAkfLksNtR0zREBJhEhCWrwBIChrADgMdHIgEGzjmfz+dd27DkwFHKAsFJtdZTRdA9AxH1ck3b7kKxUAGYyCgAUbGSxvT8/PTly9cvXx+Px3R9FX/88acff/yRMB7Pp2FIun33eWzb9v7+vjkOz8d+ZCDhDGqVmWRrQgEIDOr9KwBsPgh2TZQ5Q1G+JzlfZt5gWwYsd9v1pFyakYrVb85p9aGar0u9g2MdG5O7BQyYw5SLUOBmbfv88fXBVjVfGeZmmXfVJSTVPliJLutGqsdVO6/gcN2L7SMepMqi+ErRJSylvn/Fb9l+ipUrq1NY9XwNIcAconK799VsbjYCDkuXuqvgtBFV4wIAvVLkn2vjdrUK3Fyve7QKm0FlNoAvbeq86B+trq4hsbJimbggIgQgBRS/BtYDM9XLg7gW0Tx5VdYYh2IRQRZ2oisiIoXAMBse7QhW70d6BGkxk51BUmySdVghfTvnHJbxNotqKsyWOQCYJ4OhFD9MVRvswrf5Slm/Bl5wGREMS6YQGlY1w6GqghbeRkpcE/XexK15ge10EQhAIsWlc1IIp8swUwtz4ElUVdBQUaKZTTcDUwmlbQjUlEdmJsVi2KxI3Gac8+J0zbNyWC656VdarCt25xGrSURwCrZRiCe/itiqv+iMhN61FcqSY2aiqb4p6n6ubVIUgJQShY1+AUAvz5j+rLv1MAxN7OyUSUTYGdsNEvusJ+5wuay4DwqLejmIiAY6nHAnsx4yQaufl1JX1axfxROqkXFKOL/Y3UV4CrkmokIGgADIlP1xqQ2u5xcco6hG5D+sf73EhTbrw5I2PDOsKlfkui7rXhz/zCCCBFRCJgUQYE3PKCAqWYmU26e+QYA5Wo/vi5YhDWQpB1fDlJWUXPHzNVrsV4HXxJSqMhRh0Zdvvu4BrhjCen7FOfP4m0Er8psKERIgAqCU+9WIFLSpJCBBoz5O+jnnnDkLBtYA1E1Dbdt2Xad8T0QaCoB4Pp++fv36/PRwfD7GCPt913b7ttvvD12Mcbfbvby8iMgwDH3f62JXBtKfx66NVze3sd09vbx8/foADNe7KJze3d/tdx0gRkBGatv2cHVF9KRrTVmHeRZtYhX/ik4oS4nBv/7KlOUsAJlwSodapgmGge+ur96/f98FiZgPu/2ujZxybJucZLdrc4LE/OXLl98//hojJU5NE26vD3d3t20TQFLXNoc9wAtHClkVJGCAwAhQzhrKwMtF6gsLcxMJ4tjsGoHrBaKfScxTyVCFiLjb7SaFUIAlaVITgTwMQ9M0OedzfzyeRkIgghiJQXLO4yhIxJwpALCgMOJ04yDlIaesCidq1OvMIsiQoQgDzHJ1dSAiZjmfXz5+/PzHpwcAuLraf/jhx6urq/P5fD6fT+cBgPR8mduw7/ZNs4vxDIBD4pRR8kkoCBKIcLnoLYIiLAW9uh1wsRCqZFhQxgABlDWt2HhZd1QtyYquDJV+Otb8017fZN1+Tu3JLEtcZmsmVGw2W9WpzvcrwDbfrYZc8duq8lp2ulRtPQSvyPn6FcLXDLbqwiaiMtWux+t/ovmiipSvAGBwTqcY7FyN9MnmiNbArDmSkaH/dd4RKl19aX/zI3pl4tYgrb9uUtoahk1Ur4nW3l2ThHXkz3PXABsYNF3Byb5HROQMpUkRESBCJiQyw5ewoHrFai1mRkIzRxjQ3iPRhC1/DG/DMBArG0s1Nk8EMl1YAgYBJEQ9dCwEsXjbFAMXS2YJ0szEVwkzKkjsq2IDzX8POefZCFneyERo0RpBMwjxmHkEAJBoFiQj4lAC1YQSpNFA8icB4i6YqfJW4dnXNPOXb9BrYmUgiDgdYDsr4kxSZkQinFQptaNaL9qgqoJ+/RudEM0ulKrEcglnav0aD5UtGkDcphwi0vR1FacwSKpl4w8mfBfo+Dg4E59hFRzftDa5RPv0LWDRk7nEg1HvMk3j4SFPeWBJ3lLqmbWp0IptdQYmInAkilwUQm3Ba4az4Wib11ROnlaqlagP/ekJrC6yr+mQl/4hvtAyQaJm0mLFsNNkECe5w2p6juanu+J0a8YHlwVfj/OK1YDjrVW/VQuXGO6lh1rWB2T6l3DaGIM6Hk8XoVFEcDqvY0UbAgMsqHSGXDYU9fkO4gWs2ogMD7Z2qjPvS/iscLIG7JvlFYx9Z4VLr8jSPXUamtRPIoUxc/G6DLEEzSIiYBRmIRDJzEjTZIWBe2ARyk2MiNTETh00+r4/Ho9dDCzp8+dPf37+dDrB1R7u7q7fvHmjJ3cxxqZpiWi6o55hLEVx3rxtRSS2XWy70DQppcevLw8P6foKKGBoIgUUAMGAgaiJxkvBLU/PA/+JUk3fX5pfNH0MAGDapoE5NvD23Zu3d7dtCzyc2tgQAoMMeq9+HMaBEZrMiQiurvcvLy/drtnvu64JyBlJ2kCHXZPzSAiARBAywHQZHoEgoLAACwMDM4JMzKaGWV41lq7xVo29+jBxQiAgvSMtGQSZu66jJlJESDopCVCD2+EwnI9Pj+eXIxHc3Ozv7m+vb66GYRAeJ1cPZkBgBpEcqMVZqBbmSQElQIwtAJCQMGSgJJwkPT6+6A7y+PD89eFpHOHmpnv75t1+f9X3/fPzcUxJRHd5CiEkESLqAvKuhcxJII05gpzHxCJZchZhQAYSBIB5P5pCRpe5LnzVBE0954VLFhjY4ueypYD59m1pv9Lm65T/CnvfbHazNXEiOLu7ElXliuS+H8hNeL5nXCs5qqZza+R1Fl3tjFDMIZdQ5PvyyLHW3JAXlkYRAWTgbey9AqTppfZieX1+y7+OiCLbOPRSkD0ptvf5yfSXN2jmm1ityMMPv3pFDUj+oX72QRkX2HMI8Tt1JeA5IWce8gIJEABZEJCnOnrHvvRKqBpWhbiImKvAmKsjHI+jJSkAFIvNWiCzh15gBYCsE1AakGlWQ2ZWCyHUbLpWr6d2yh0qD1IIoZx+TYpK4TviF4AJ8TxyzppSj60OFFtfiCgiLInzdLkOAEIJiFJNs6pYBrZVsPyEhltTHjzp6xBUe5SlbqPVuESt9MdX/sBGinBMRIjB0EUlWuakDBOxuytoor+d6PjuRMRuxJqII0t3Wal00dWS0DH6DWD+K1NaDlhx1fXq1b7UFbOiByy3Gdk5Kvs217zMkKA07Iy9i3MgVS1xGaQkRAxxdo22XtwqnXJ45Jz7vrcIchra1GijWMOZmVU5hAvh/tZlPfV+lXkc+ocVveGS6fvl/3rvE3JKLZJJJ9TGgwZrWqqO3ywV8B5gcApYBYZZwisWpM/XjPLSWPzX7wfbV/Z/CSSEEBuCvMgEJSKafrW0z/7O4QIkwIV2XQotRyQrWWT9VZYcPpT8Met+L5VLXSCiv1OEbrP4nmbXrcFyqa6+SvWKb2Smn+mAZZJtcboLTZKCIOhZCyAzIBEFipRGEQQhZkg8DsOgwb3O5/OXL1/OL89j6k8vLxTw/fv93U377t276+trPeQhIgDJOe/3ewDgDG3bMmsiaySiJrQiwgLH/jw+5TSMiNC08Le/vX/35r5pGgxBIPQZzkN6ej76aJPVIl0P/HuQbHUmCuS/5msaQgNCAAQsIAycBRJj+un92w8f3rddjJgHAJasPvUQcNfuRXBMHALtdm23a//4+Fkv7A/98enhK+FVt4sIHBDaCGPOLCwBCEivN0QiFJ4viwIIiKg+QrheocbV12O/NFJff9EUC2JADKA8VVgkZQAIJRxDAALgyRUcUPj58fH3j78+PPSIEAKeTi8fP/7GzGM/IEpsiDkjSQgoInkKdaGkLkQUKRDFiNOuzYlzkpR4GIZhGPrManbOwoTN3W24vr7uuv35fM5ZZNoIprTATdOkMQPkEEIkPHQtIuYdtESfv3wZBYRBQ7sykCCxSHD77KJM7h4BeCMh3noL3kQsbHHINc49QVbEv+5u/foaPF/NwFtTwrqmf6VaeptvrYFcF22nOmNd8KtXh7am83U767cu7WKby2GJom2F2R92r3jyRptFgdSmC5CAlklrDVJFVG7I25DbpjM/mQ7SN/QrKfnG7OsmchA32Mvip+VL35x9P+9+4OqEaHAqbBUf80O4ZPSCyRtrXmKTihgIhQBZcDqJ1tbimFOA4BvSH4im8HekClKp4M8jDSB0gmM1YG8hgdW6qtiujZwwwKQ6T24IK5Qt0nHg1mUhWNEQOtVUfPAY3D5x0Voic+CgyVAG2aZHhQMACIHE5aNDZ75nV4wObHZNZ6BScApds5Bx12ivTJGGcCkX2Kjcday0RHUg8c1KEQpjyRRvVxatWSymTt9ajMGsghV75eKkao3knANubDC4VDxmqlguWs/ayN3uY+fahzibO+zUyq/kCv9heRvT2oES4McAI6KmaTQLsLVMxVlpvnUJmUp2eyMw7Uv1eR+Qxpx4qaAd3cGEuBtQEyqUOGVBD5tFRK8HWBgnAkCdNI+xakXYGi8cYHEy54nBgkJ5jBmqAebj+vUMAqgfMzJOFsWqBcHtE0py/h7VlMEW8/EPPfFULfum/EnnupH1MC9NwSaDRo0JLFOcnoZC1oBqIDQ5xUEAZJT5KqzbUnFL/vAcFQptbKLim8hZf5CVsFWv0FXx7B0ALu3W31mstc2BGxhlyU8UD6YDrPCvDDwiSbkDrO+GEABEigcAy5QMRmQKegwAfd+/HM9fv359eHgQgRhJUh5AkOD+/v7m9urm5gbkpds1FKCBhqa0PS0ink5nACjxqKJx+zRkpDAO/efPX3759dehlzd38d2b+w8ffrg+7FJKiSGLjJkfnp5//f1jSvMNbXHuJ9WE/lWcb1beRHtViEh4volERAgUEe/ub/a7tu/PHPI4DgiBhBExJYkxjiM3TUOEDHm/7wCBArZtHMf+y+ffCd8GvG4C3t/evnuTH56en88JAChAztMmwupXwgjlOi5NB1CzfiJL6QKWq3VN237I66VhX+2JiGTQQMEwpHFMPCYOKGVTYAR4fHx8ePh6PPaAEAKMY//nn39+/fo1xpiGPkRs21ZtcoiSc84Mqrx1bbmNDyHGOJ5HRJQs45AhAzOklIZhvHn3fkzCQoFiu2s1alFKnFISdxPM9kQWyDlzzhGRIAeklkI4tDldDWM+Duk0JmIYBdNK9tdA12XsUxR7QAKu7y8Zg13vrdW8eMRWLVQPKyFhcwY395pNwAw2dOfmawDsw3rr8WRQNVu188o2bTBcYuyXFmAFwytdWIV1U7IyZL0qUeiM1C6X9qvHgKFL2bJhGkCdipfbRMkiW20ufgrmKNwrGqjo1EtF1aavGyou0z+sEWJjEY22supXSpCeig4Nz4ZYT7QVnWhhnuVwWBKVf+vSPuirVaKL9s68SJeiyqBTEQNgFirKnUhUww4RKWQ2BlRHw6JaxGIz4ZJOQEogE93Y/AL2f+0GGjjqn0mhFGtkbgeRpzingICxibkoVjDxOBTJtlvXNOGKf2jALPkC4EpQtv/9cJiZJUGaqVMpnkpEyqYJal0DwBCm+AE5c9tOrkmFO08Gq6bpDJOqUSgqdF58qACbe0OCKdtq6sRlfBQtbdvmnFNaxFYB51KoxVZIKj6r1oUpqF6/Ypf1QZyqo/qnKpN6TwYsPI/Th5dI1oWxFS4cwOJOLsi6WCC9S6f+qukKTN31q8V3bWjEciezWmnkYr14dZdKyDgRUb9lHe+kEJLkPDWOS11L2wGAOZx9AazrOhFRU2HoJnpG9m9P1qCK9XgEVsUuGXvTq36wibDZ3AS4sIXFRBtD8Bb4qmtjmgt8CghNcQekQEIy31L2jBhYgMA/Wc+R/1wx03WdNV/yl6/WQ1gXz3krfG7Wp6ViDzM3ZwAIiJEmYZrdFkggLEACCAxEAFMuBxF3dcmQvDQSYjlD8ewCVjRjDyfYULVztFQWgu6ioCHHPnyHluHX4z+nCH7npPiaUiQPeygynaP4+nm0tJ+z77eIEEYMApEIJGeQJHpWPY5JRIAiAvXj+fn5+enp6fkZDgfY7XZv3txfXe0P+26/3yEJjym2065HRCSkdkKN24yIGgmd2YkXQH3f//nn148fP56Ocn2NP/74409/e69xxM/jkDJkjOdhfHg+f/zzyHKwjRXLFXRZ5QWd2/+WpOiLiJRQk7Uqfmk6EjMyqsGIiAJxg4Ew3F1f7XbteO6JMDYUAiqyqd0POZ2H/u5qNwzDn39+GoZz08Bu17y5vUl56Ifz8fh8e9Md9ruffvzx83BOKZ36Z0EhIpjStEq09BiECEgYGZGAoDhgb5JKhY1XaGziui4QgD4PRMLgThpQ1+rQp2EYUgcYgMGO8ICIrq6umqaRKQTu7I+TUmq7uNvtpkUlOaUUm+Z0OuWcm0gAMJzHnHNDAVsMGIRx3yBRjLFBgZylD3RYevWrPTCXA2icgr3pSW4WIZVyGJFzJs4YpSX88PbuOIztaYjn4TQwccaMWbjEXNDdc05wozKUcpvCXPT8ulauKnY0U9qruocsdQPf1CtvrWtWstMmA69WzeaHzfarkb5e1kNGV6rK1eq7VNYvektd1c56XwAAmtKgXtwKDcIyKbAJmOHWBFGZLQeLpjQDlTm72csAAAIYN4DEcuzln9v6q1Ch45EiMlXPyalz1eT6YRoFwqvnnuCmtTy5qGdutmNZGyoyMIuFJ9eKViuqFqcT+lkDd8ytt1D80JCDwJziIpodwNOQCoJYaMXsPCIScLb4GUBeS7S4I+hEz4pK1tYk8/RTF0TmKagYokpFapkMCKhu69adiEy7b1Fd/H5ZPYSiRBlIKs1rDFEoSR51/xaRMU3WHmY2Xx3LCeHn1fQZjS+naSULT154Xc6on27ZoWHPx6iEpQHQYDbVC6fI1CMA6LV1G69RTEpJT6O9pUuxoUkUzUIFADmPOWfAKTaMXy2mDUoRoco2E/vhJCKq5EAJtSIimnbCx+nxerjNiM2LJcq0X0MICGHkWdDxNJPnkGsL/CghGahYVLimac7nMzOrCGU63vl8phJH1/Z+RNQ8k1psUnLOIWiSeifkiUx3kHARtBOnNBUzDWgvOss6ZXp10NazTgoRIU8SHgFmAEkZEAlRiFAAaXF33POIimuwc0AttLfwH7C/FowHl1ng/DK3n/S0wtIxqZF8yrE2RZaZBCKDpGmapCmYERCRzDDutgR2JJeGEZYpHA08g8eP3VRcW+/V6YMso6T6A4KKEVeY8WX9xPI1GUjV1FSvBJ1EFMkMccoFNKFUJAMQAhHBdJEoM6CR0LTJ0czoUVTBnhEieQG/x5X/YIxovX1Vr1QlhLD5azVM30g1NeLO3ap3RQTLnQp/bLHeMo0yk3PFLyMSEbHM78yMJTcsEakHNiJSDISg5zjjkIdhiA01SJlTYskCRMCCIqKqed/3n/748/Pnz+dxvL7G9+/f3d9ev3lzdzjsAkHOCTOHEADG6SxGSCQ3TRNCk3OOMfbnkZlDaIim7YMoYhAZ8HQ6nU6pbeHDhx/u729zzqGhYz8cDoen4xkkXN0e0h8PQ4Jm14g706zOfdYo5dVRrOLNpqCaSqNb+W5lchzHruvyyAgMgEnGv//07se/fZA87ved8AghqLkMAFIaiOLhcMg8fv368Onzxy+fn7su/PDh3f3dbUrDy7M8Pz78JuNPP/3t+ubw88/3L6fjH1+fiRCAFZM5DQEbAEQQANIAp4QkSMivScDVYH01j5xNjBlXKfnNEIGQACkQiiCNLBgbzicWiCHkMYlIE2l3f4sUVaSZCDuXvSBiOQUuFuM0XF9fqxEv53x9gBBCG9qcmCCIoCQZx5RGBhF1Wx3HkZEx4jiO/blvmuZwfTUMA/MstqWUhJACNRAP+30ehqHvG0IC1CMShtSFSFeHGJs4jG2Sl/P55dgHiiNnhEBNRKAxTzcSEVG9GUpg7ilMoeYprVDtJ8IQa0RoX41paKkOIiuZwX71smhFxuuv1ojezuBi5FjTwBpav6z80thklR5y344fstZkZ4RY6lEIy/ULTtTBC7piXkbd32Tm/usa7egOTKtXfKfVeqnEbHsYY9S8PlY58yh5SrwORXPTdwOEkZPNiJ28s3PlgGq/CItRTGhUOKWeJgJIICaUytKPz6agoqiKDdq8rPHvm/UzRc52x+7+FBbJ3GPeiN+68AuBlo6jNiNepbLeZcUDy+9TckgAAATEKMzAnDNHRkAEnQxPPX4NoLMszcmmV7cHuVzTArc/eUGfnZllE92ICCXIlQo+RAQYEJGRcjnKNetJJdix87GsgmpAyRFHNJ/5GTC4tTEov2hiJ+sCcyYomJffAhvraa5oqMA2J5syfHqvQl8MjYZ/Pwprs1qca8lYJn/FwfQiI/Gck10i8pB4K5nGVACn/lWE6CPl1Aum+Csvf53ZCrglBwJcvLh8g34x4JIzGvDikgsbvdlcV6SbXdY+fWKhJqp2Qli8OD+PCDCTnM2Uv4pparxqfT7WjhZm1pT0xdd9YmpYtkBE1BtQIhIACTEvpXpf2diH/0kNTfrIJkCnxBaeF7rtSMVjDJd3RA0/zAxA5lfkpzjnzDDjnaREO/Zk70JlIqKsFLNqpJ56KmIzVuZ/qlnNsvL6J3tuBLMuayJ/vYzjiChNaNq27WIQ0d0yC2QBNbYAAxPVW+9cWKAc1U0VBOR7LHf/Z5bLeEDPJG31ef7pWzCvTViyOG8B86vPK6jaFrqgyeCmUpdhu+tyGs/ngZlHkJSzAAjh4/NL22hycD00QYDAEBMzAPT9+PsfH//1X39NCd59uH/79u1+v9u3XdMEEGGWgBhCDAEFWpCEYNwYVIl4fHwsGYygpI2eDoZOf355fHwkgvfv73748K7runE4NbFFlBDCfr8/pfDn0/nLwzMLuDW0QN0l+rThw4V1VE2V/8HT/6WWiYgaEhEkaUNDmCPSbrfrmigypGEkHJtAITSS8zAM+oak/OXx4Zdffvn8+ZkA3r69/08//3R9fX08veTUvxwfvn7t94f2Ol/v2kMTaJqVzMIJY+zavXACAEZyEIrFWnoF8opQKw4DS+palwCYEUFTLwogEIMQUk788nI67sK+xRhJQEBoyEPT7EMIoqn+OAcKMQZpLEsHCqPgdHCvnHiSamJU5ZMAkQgSZ2FOkhKnYUwj60HikadY07oQYoyC0Pe9iABMt36yMocMKSUJKCmnNAaiJgZOk2uSBslrI1HctbvdaUxNpK6hxND3/ZgychaEQMCyYNpl7wDRKAYyM1vPhDeR6ZFvaPebnbGLi8zwW+VS11WzFUj+6+ZGY8O/BI8fu9/iKwBguTarTXb9kz337/rnl4C5jDfb3AVAEDcG5ZslqtmO/1VWGkhKqYo8BI6x+6bsg5fP7YOHyj6LTMnpjFSWe309EH0HlvNimPTEZr+uFfg1+13iWk80Ydp+7EVDUrlmiEAgGAKabM+rO2LVeOGSJx2ihYF8nQZED50BAAhxboqIsggixXkOUEDEHNXADUIrECC6aCJ+VuwVf+6oFcyH0C8hT7vLKZwkm8mSAAHIvEln1RmAWJI4P0ADwKAyTdof/5ghTquZ4i7FRGnEoZOUcgrU+eAs5k4prDnEPL0SAPi0Cp6YTID206m17LOpDTpkXsaz0mL4B+fP6Y/KbIJtaJXiYTjJeVQ1b8l8Z3ugb7YcaZPmqYeSKcG6s1fcNG0sG4PQCBQRARbTZNgTRgjby8+vW2tWXPQU/6v9ZIRhVB2cszsujRiGcC2mYvnFqV9DRHbm33IIpHibVnuVloNL0BpxShciRiAAyEbS3r1+Wgh27IIhBHHHEBU3qTBwidcYLW3OFDtbuq/vg7UYwnPOTQCZeJ5AOT8ShCmCsXY9+awzIGu0GVMgTSdEPedbXu4VrK+1rEdUw786H1nXfKUdT12vFFntuJd64TRo3oLDriWicRwURQEJETKxiJAEESZEAJxibK2gqly+jU5eH93/eeUSimiL1VdlQb2yQKbnY/bXmKG+7IBYtIllRwYXnzoEFehBZArhmxKnlL8+PnZN6LouhMmnABAhQ4zx4eHp06dPnz5/yRnu3918+PDh5uYGEQJgzhkkI0okZJacgWXMUwYjZRqISMwcqIkB7ZTTTtkenh8/fvzt61N/OOCbN/dXV1cB8zhyziMipiwYGsnw+cvXL19fJExitw3we+hTllLaK2wBAAAXTzxv3J47khAC5wQiIbacc4x0d3MVY2wDEKFk4DSmlBBD2+y6pn18fPz48Y/Pn788P50CwvV1pzF4+uHMnPb77vr6+nQ6Ho/HlNJjlv70AprxL1IYSUQkIAAKg6l/eszFwhZYtsLJpeVpQ/NPXkEmAAekDARCOKUnQRE89sMfn760Adp31xAohBjbKL0G+J73BRGYSMtjWFAAAUnD84RABCgijJwhS+aU8unUM0POWRIyAwvzFB1JW578ZTAQMw/DoKZUImAEwtn5qGm7YRiAQVCYoR9TGsamQwDIKRFgbJqmiU2AhrrrXTyN+RTgeO7HLAJZVUAL61AcSidWCzCHITWU+jW7nosK+bgU672txhP8983UXGfdtV8CFSSbW6cXJPzQ/LjW/a6/ensUuPVlspZv0KPC94sr/xdY4XkT4HVZL/YKgEsjulSqajkz4nIUOCOhAkpEuBhBNgEzmOcGYbF+pZwMVoxrPZBLT7xUA8X5v8LJmnguoci2Ku8ii654w5VsaVVr+H3j9tfLY/qrQ3K9akQFXSHXjugZdDTtSLdTH+bB3ld/ACxuOwY3uVQH1rSO0MuX1QT4ytVCRdS0BGpSEQTAoMEKCXGilAmDLLqKsku0sLmkxcmFzCwy+RBX3oyVomuKjbqA2qzkcpEREQGCmle95gzFfYudgRud15P9VHSPeW17JUrd8CoCWrMwb022qfEAp1FSyplH69HUG3UpqfBvPuW2HmR1BdRQKiIUNHBCNB/UyX/SiG9ZcMlbfV8eEsWGAALVPNeasoFsUppfV0o2WK7ceD8rKlFVjUg8tNklM/TkRM61A5CBKaWkbRaroC2KOfu8J8tyVXMR2gcRlSNOdzhZuJzR6KoohsMpRQwzl9hLG/Zt++tpI5a7eRXaK+Rbg7BavB4PsNI8PYnO3gQCbMwOEadkhVjWeznXRwA91jaE0AyPlLAr1UihbKhrADy9eeqVZVQ0TzxrnBgC4dWyxslmNURsmqbbNTFG5iyZ9WxLXZgCcxaWyVIIs668bHMdOcbGfgnOS88vmRa/Od6qrAWU0pCs61Q0uQHVSjTx84WusDjMux0dAhld+TLmlHkU1LNuziBjTue+H4f+HKk5D22MiCI0Me0G4u+///7l4bGJzc8/f3j34Yfr62tbWQiEKCGo12DmlDR1hfoF61VxEM6ZY4wAE6vRgZ/P55eXl3//t9+enk5NA2/e3F9d75kTIrdtm8chxtj3fUbuB3x8PvUDNPu2T9nGXiFzE/2bi2KTOKcWcPFi9e66pDSE0HDKCKAGrN314ebmCjjp9VhEJNKtYWSGP3759eHh8dPHr30PbQuHQwyBjs9Pw/kUYxTJh113fX2923VNG5j5/PISI7UtjJyEYwhBEHPOAQExgEyhrabYvJfpeRMt1c7i8eCRXGFARCxssiABcyYc+vT1y+PNvrm/2beEDULbtK3guR/9eZ9uB0CY07SPaBAEIhJChiB5AICSpXBMKeVRL+TnnEQDgCIGJAQBJCJhEcgZxpxIKOKc7SkExBhCuWuqm05/OuecNcsxEenN9ialpulSGjDQbrejJgpzR7JvAiJG6AJBP6RzEkk5C3G5WGh7HABPQQuX1jDbGgzPr6z36gkvzz4qT7/vmWXfmp9rXMYg9AAs2OzlXr7J6v2583pnMRrzfLuSGG2f8gvWD2Rzq/IP12v/lfKd1So+7/ffdVNSVI+i7gpMC2CJ5As9bwoV/omIxjlaUJe+xczeZXTGocNZNSOvsFA/la/T3iUpAlY8x2Z53bunT1laDtd4NjOSrTU/uop4Fl1Y1zBZh8XuEMIiFdUkJU9rXQCwBquaFVulXj9ZrwcPoik/slQSZm4iChUgMyKrOAhG9AvtdjFaPyXWi0FVpXko1suMS+uwlDtgwugHjoghNLpIF+FcQc+hIcbJEmgav+HEXKJV2aMpEGi0W5fe/UkTHxt9m+ehJ6P1rFtfpuXmhCmlrHbLMG2feoYNLhthaScTNZ7nWteaddeHctEhxDjZDBHR7F0adEScQmIIiTTH40FXfBSpGSoIjKs7Qku6AmdGqLizEQwv76xa+7TMsSHucs7aHqivj+PoMxawJEmzR2UZaZZZ4QebWS5hRRFR8YnldMc+6/bPy8iiOJ8jlFMAtH1ie33ZQCpsGEesCKkKSjFjb+n5Q0vDezlkmb8uFrKbOM5MpCnBJwEOXYD76a3i8kWIOmGyXNp/tXjmsB7aN0vFQP9S15e6CEQNhYiUVcrjUV3CYiRmTEyYc4KMGQCAUVDmVGB+viRPzlme3b0C5CV4vIX5nxvp68WzpgqYCr3TQJZgGElX24rj9q5N91NR2ObWpqZAXZeziJ5VR8EAFGLTIiIznPpRpeRxHBNn6fvnI7cN3N3d3dzcQLkljojCnFIWZmTiJkQibJosASFP6x009JqkxDFOO5GdnZ1Opy9fvjw8npoG3r29//FvP1xd7UWySG6bMJzHtm3HIQ9C50GGPjHAmOf0Px698ldE5E2qrl6vNtNXip4AFgU4IfL19dXtzZVIPh1fgIfdvtl3OxH588+vf/7557/96+e7u+7HH983TUMUhz5xSjHGpmnGsX95eRnHvmkDEbZdjDG+fXd3uLk+ntOvn5/G3CNN2XoQwhRQVNTZ0hbCt0cNjq4u4c0Ipm6Hs0AAEUAQ1YMQkSGEOKTT8aV/fjryOfQRd10jmRFhBCibY2H1FDFyCE2METHk6WKhMKfcj3rxfBiGMfXMnJPknDUoq4gIAQBqsBc1pxARxUBJBRJlnNA0AQJxCfCmH4ZheH4+Hw4HYRiGAUMM1KSMwzgADAJZYwG0wgBMKE1oIkrsQhP3L3Hk4ykldXJHmQ7upkP8EpQKXUbh1Vn/ZVq6xGltgtaE6vneP1EuvW4c5hJ4VVnvsxW0r2893+Tbl8ol+tws1aA2dvnVgdG6Ppbz7nXjm1+N7RMtZl9EqlXq4THBrBojubjKi0EVtg8rGsN1L7NUNs2+t6as6xORRks3LBltm7JwCQ/rst6pZXVZqSoVtcMyxgG4mV0LgbCa1goYPyh7skgSqL8JmWCnqs78vp8qWC4G68B+Ymf+rvaw6ifTRQGA1MCoYbwYwURhmqRPKPKBtqdHX6ZvGBh6aRuXCg8AOIfmeVxS1B5wViMipQW942dzMFE5M1su6Cl1lZvFiux0PzAM2ECq2UJnsfTKoULoUWoWKsO2p2mvjOXsD6jmU3nVgU2BnC+tlWSPpqKQK1iuwImI7uKacE871b3HhlwRhiHHU6GvLMtruKr2VC5Mhi47B/VnEBXf8S+aLdGorgKgWkgq9pm10/CMOCt7AKAhP3ChEGZZbmP2roaQAae3GzEbBsCtNcB5GXtyiuiCiWOYNCyYwi/pY/28xWUWPvG4tU+j28bY2MKKoVRjtHmcnixbnUYnAlkEwe4Tav7jbHWsd2cREpn8TRFxDoa5xforuvIDlG/phJ4A1iS3flL1uG5wswRAQM45s3pcC4cQ1E1RNxpQ5olZ41ELikgJvbMcjunP31Mugle0Sj+b/0S59KJ38r2EKE/kntL8RuVr+glFig54ApfvZBM21byzcBbNPU9N03Rdd3f3QTLnPI7jeD4Px+Mx92noM48cCPb7XdO053E49meEIJB3u52klPIAMjaE3a497Lq2jYIlLhRiSoOdjvV9D25zOZ1Omr7i5qa9urq6f3d/dXUVYwQWFdyn5RRCkDCkYRgZCIeeKYZ6eEscrse+ifBN+WON5DUC60IAIEgQMYjwrmv0giVK3zRNQCKSp6enL18ePn78+Pjw8uau+/mnf7x//z7GNufc9+MUJybnlIbHx6/DeD6dTn1/Pp+PRHT15ma323W7JgQACEJBVPAIBAAIYfIZQwS913dh6teC1KXRVT9tMIpJ0/f1EYA44+PL8fNnPO+aDiUSEqKeczJL4gwAbdu2bUsY79++QcQSME+GQTeHzEOvFDIMZ+91onHCAQk0SqPSPmEbpzvzMdI4jsMwlH0/Z5icTfSAI+fMDMjYhhh2bWy7QI1QeH45no/H/nza7dr9PjQxtAFABIGJhwDq/kMppSMISkYWKWhnhOBEC4DplNkv5E3ieYUVSzmL92KhrOLobr57qVRSh7IUL6QZqJu9wAXKgQuLpZJ5fM3KurCGrdqG1h9gyTCr9qut8JUpeGV0r2C1Av6VXdUqcDn5LWiRzNsxJkQWefxMEPIyRi3X6fXcJdgIk6jguRzzFG+qGs4mK7AP5OSQ6q01MNXX9Yuw5L3iTtKr9v14jZwQUXmCKTWGnEv0FpSBrJ6vSFTjmUvMPIX/2iTHKU58aajam23CzColRaY3/utfMei94aWay1m3AUCibFijYPFCAGari2/Q64R+OJ6kPJxEs9BskrpFlcw5C4CGscZZE1YLSRWibaF4oFMIjQtIib9qaob6hABMIZXMfKTqlrqQ+ZAkWGLvWmwSa9+GI84D1q8fCwMDyGWykl+TM+NwS8Kf0xjSuPi7mkJoLii6pylXZVdmcrq8eHywTUMdAwhNC8Amy7dWNSgiuLKY+fZt4dmgQB0p43yZVt/Se/kenjz53ky0gYgU1iIs+C48ddmJg3VtM+tx20CBpCQVmBROZiIKCl65MEZEvKWq+YMDz1aUQq2mh7zSlo0kbAOrGJAnmyry5Jotgt6RI9JLA2aVIiq+kfaWyJzzwJ9j2YTy9l7l14KnfFoGDVsDtm4HVyhdP3nl3dd7sUhFKqYRijntT4QBqNYCRcY8QQA+QcX3wPP/z3JxyAIVTqqaNU+guZqNVNzhF1RKvtueJyNFeVVXiv61dgiRy2X6aYEEAopN03BgSChIMWNsMoURKe923MROtbUMQhjP5/Pj01cA6GJsuxiDMELK49ifmyYINMMwCExCeYxRk8qUixWsku4wDOfzGRHfvXtzc3Ozuzowp2FIkRCBh+HchJhzZkYk6vvheDyDIIZoLv3fxvzy19ffmhfvlvTzCv2HiCxJWDACCO/3V2/e3IUQJEHXdQTp8eHrv/3b//z48Stn2O3Czz/9482bNyEEvX/edR2U6Nkh7Ns2jql/eXl5enpMaTgej738GZr2eDwiQtd2fYKxqEkVMaDqhDI/9x+qV6qxby75NTOcX5+ILWjwGEHOOZNIfx7P5+H26tA1ATgToB4gQgErpZRG5ftqK26BgoikVPb64Vyimidmtvjnfd9jIKI43cAWFBEQig3p5isiQ5+Ox6MePQiVlJsuFPZu11zvbw+HA1DYMUGIw5jk5TwmhhC63eFwOHRdE2EEAWIRTm0MRJBzkjxCzgCM6p1CRQfUxYUAK3PEGqVr/G+i1zDvmbauX92pv5/7XaJzX6HSPGGLZrzg4dv5JiTVqHnpqgoOV7iKeImO4a+33TVyqn43N+JNCA02J8lc1Ng9PDZH3sLh31XRpmzKgHZJxKPFNW8mHw+JoaLa4gEm1zyAxearMcyLzXpWXBGAtyizhsdPykqSXL/uH1bi3xqT6JRJbxjzkhg59zRxt882l4lvFhxVTPawUsOwumJrJAImbMRJgSlI92PT7xWCYEmCNjchhCoLuT2shrrGlG88p4SI4m4yqjwQYI7JAUWu0sN0a42WF8DAibPWrw/GwCW4rWXb1PwEIqKB9ZFEQ8MZ5AVUNfFtXptZVPYImST+prGs5XrZDIvCZvXt1+kWgTO7UcmRYBkj/EhlCpPgfVDZotQAgKVGULQVVM3tIwWbVirJN61lP7NaYqQKTinSz7Y26JaSX4eVD7Q91xMRnbUSJ3bhe2ZF+wrF+lrxEcOVZ7siwiU1hWccVDLLmzOn50SmyAWYNEmY7ahYDY0o+OuCpqFVi9zQBcuDe2aWMlUGpCn8psV5TlHhcL2uN4vRj/9boRGW/M5oNZRELDlnzzg9YCEEDCTTdDCWBTsFiRDIWJCGc/szzFvKgB/jmqJsIOj2Eo8lD2f1k3+Cl6XhV0rVppXYkB3oMDNMV6Pn6LiqEAaZbKiSt4WqS8O51O8li1CWTSb2l8tFFG2JKbKSVKpNYT26ihorMij1a78Db0/WJZZTn6fMPUgh6FXDlNLH3z8ZDylGwvP5fO6Ib364u7+/R6QhpxjaGOO5P379+rS7a+5v725u9wEkjeecs0juh6QJRYdh6HsOEbougqZdSclz47u7u+vrayK6ubkJbXM6vQzDsO/aGCggKYcfE0DTnU6nl9OA1Ox2u5wXLvTfs7Qr5BtyNl9ZL67XCxHlMUsGiAAAXdft93s9aDudzufj4x8ff//ll68vL3B7A7e392oia5ompZ6nfHhT4qUQpuu1Nzc3b97cn8/HP//88+uRzy8vT0/nvoeWUt+PDPFwOPCYEKeI5IgIOAUlLtyiljEu0Zvnex4Dm2M3flIUQgQggCwAIhhCg5ia2L19+/7t1T6nMVKIzbwvM0hK6XTsz+fz77//3jRN2+5C02pqKE1RyFmsui6UablMVEMiksY8WKyyHp+fn/u+117O5zGNgAjXdwcd2m63UwNs03T7/b4BEsR+GACbEOKYWDMf7feHw+Gw2+2IhBMHEQBGyW1s1JVVT3sJMCAxIAKqiC0IKKBJVmDFn+E7KHNzauwUz693XvlVfZM5b/ITcBtHBa0/AF1vBJ48qg/VWPzG9Po68qJFtVWZQGubuP26ZqHrYVZb+Sso8jYrT/+VXPHNsawxIHMqyynnIVswqBXwOOkcdYNYriN5VLgxuhYWu8PKeimTt6pHYzXkCs+mVdpzL7ltQltBXiHQf5USHM6+VjDYc3Y2IRujh1/VlkpEx4IdVOPKknhEBIq4KTjFOC7p6cpsLcYsTKup4+JPaBnAq4UqRUbUyt6l0yRpLCf34I4lJnkXQ2YGEI0vSTljIGqQ84AUIRAhZeGUB4JARJLmWCmZRwAoFxI0H50gTrmnmqbpujblM3DSI3iTsBFJIMfQmiPihHHB2OohmGRJFsmGplwRIYSAEHLO2fQWOXHOICb7ioggBCJSES+xyKhKL4ampZz0Ot8wnHPORFOYH0vpDkCIc6oc5lk/lKJ6GcLzKglEd4heO2XmkukLRESN64Qla6JIQ9J20RoAERDOzFIifWgImRACEbCkl6c0jqPqz0QUKTAzJ56iy0AQAXZJzB0BTNqdm4X1XwkSiYiQUJDTtMjL4UdgkTyyEBBRpEYxYBu7LSpCpGIDVFJJJZ0jCCESAoEAuDi8+/1e6V3Xo0pxIQTL3zIdSQgQBQ27asbAsvI1UWRvyNFZ8NZOMxTbW4MM03VUFmEmYA0eDsKQRqZir57C3XOJQgnOHV1CgOmOiWC571mogq6YWe/vqYWTUFiS5IwBysX9jCx6TJSg58xjFsSAMaCgCDJDCDSFQxAGBhAhQcKY8yAaSoMIKRI1AMAIp9Npt2tCJM6ZcwIQCZKyYIAxJQqRCHNiRkIiZki5UFQTEZFTBmFCyrzBVcFZOD0zxRKOGZ3Ob2unLCi2BWUiV7UpajE+A+6ACack1MQa5QJAADKDBtIAEUTNKZ8jUozx0HWc8zAMyLKLTaRARMCQz0wxEOk9SyZhgZxBQgkyAUQExAiZgUEwBjFXUhacI3rHCgmF3Yt9tSXGAITLa58XFDYrgSa+4XcXcCfohroCxiKhsHW0NvXLdFA18wprkErIZTtPsTrZnEoEEEtYEREEysIoApr1UYRBgBBiMw6jEHKAIY8ZBubm68Nz1wQNdZCG3Pf9MIzC3EXKgqPg4/NJZe79fp/HPg/j1a7p2qgK3u3Vgfm6H84AcI0xpfTnn3/+9ttvSeCwu7q6ugo4sei2jd2uDQEJUPUfDmHk3B9HEW5jDEjA3FDDYw4UeUwx0tCnGOmcURKDzFt+NWubUzbTgKNqI+N1QZnOxWFKeDmd4iEsD87twK4PIbQCgwhdNfvUn//tf/wft1et8Lk/Pz4/Phyfckrw5jr++OGnd29/uL5BlpSyxAZyQuZRBIgoJQGkbqeeLBLCAQDu72Hswu//8j8zABJkxl133feJexYWFAHUbGnMSFMUYxVxDMLZ9X2RJmoOTkixIGfGAQAI0OJJMWOMFEQEeQQZAEDjggIQY+y5yTye8x7wqumu2nhuSLiD4/EoSfb7/29v77YkOW4sCPoFIBkRmVlSS5o1zZqN2f7/96ztw66dp5nRkUbddcuMCJKA+z44ADrByKxqzZmBtVUzGSQIOBx+h/slcrjfF8jDGD6t9/j2evv181Xy9XS6nC7nGCIAJA5rAhHkcLZzlWlZmQNhuN1nkRRCmK/5y+evVv7xdn0FACIMIcRxeHl+snmN43geMzANcWxCJxFJIFUNgUUkrzdZrgFuEy0npkuQgWRdV8kCgYli0uU75NP5MufXz/P9dUnEQRBVtCjEVkTKsgeZuJ96tcHvVnQNijxgz/QPi2SALcVdExJbQriH6O3Zqyc7fgz2b3MMNKpSPtBCsm2ETUEFPUiqW4SRJ2KecB3pnh8GVl/FETjtxeNcPBvyPe8w2OnP3QSPTfcpAKy1d2Gv2BydLn5qfmW1iElavNlQYkcRGdmsyYCgKoKoAICEAlnF+odm2VG1cyVYz6kCAFj1MgIQ3Uc2KYBFgCgBbuYerUpNtPQWxQ1GoFlFQAXRzuqjLTyCgEVcY80cqQ7fENqRlhbuZHeaA8Pr2FgFP7803uTRrWC3BFhD2ERkGIYubFBq+GGHYyV7GeQdvlWd27JYO6VPjKcHLcdzqoZgE5A+MKkNuglPHiOxRjN6JPMXuJcAwJ3pasHiRQ0L3BxTWlGjQmdVZVX1Vu0YBnBqattONdnpdrNJ4c3t0zwt5XnzM9f1KA5JKeGpWL12ICpa4jNzzkQKULYcM6/J5rVtIQCAymUNKOLOswWXjsXjgXOPYAc93bvgGjw7amKUUWqEZ4d8beE8vdB6Ng8chW24i6UQ31boDwDmefbI7SmFdd4m4lehvd4e7jDN3+9IDzizbvvTf8L35vHtOGVTNdscAUux9ValvQ21vRLjYBhrW05quK/FPnVHhIkoy3btheY2njZsv2VIoeWAaodkcM9OyrsUoN9utsRlulDrC9mfqhlABZURAajpt8S2U7QiKZgJwEwSDAJA4GAizUOu9TO6nb4oQC7TQhSNMea8iiAzTtOIpBZjvOaUkjBnpk2T8cjsV6FDgw6ZPfJ4NNjtRPekH+p7j7WHO4LmRqLlFEMtI9b2uOqDpCYlq5AoOgUVfGgxKFTiE6zumUiSXMsV0pZ6tUGAto5kf05GneyO78sEP9883u7hsLUPINneFRc64WlC4Ad4Dk7t6e9vmjmUXeK9gkYhVYxwhRCyWWGgqj2iSBBCGMchLetyn5dlEYEYQ0A2fhQYFTIHXNf185frep+XZVGFeZivV0JSUhnHsdS4zxu5YJZpmn755ZfLaXRuSVTNIEoEMca3Zam8gNEIiGZZF7aEY5UQEREpEVGuArdnBz8D859s7/XTIY9fO6OniLgsi6T0ZWTS8zQiIp5Op6czDnF6efnllz/+5eX5D9+v/946pCosmqBjZ2uhruM0TTHGb7KqACKEQIZ5Jega1BhvGY8phAB6OFb7Mc4ff+3IzmMQHRwvWSRUISGlJKDncTwN/GV+JQqC6Xa7XTPkJOtsxS8L6HJOb29v92UOPCCiDnMzO5rNPa0zIs73dZ5nABqGIWAALMGTnz69bCINU5V22JgUIlIMRKRSZLBZSlI9sy/V52FZljmtJznZ180aEONIxGnN87ymJM2IzMzeLoeIttvaufEjTvqdu+fsj2lvR2/bha+D5xu59GZ+Edu3GpH3JPEhAvjxHGlRz268lOc4VHfRBtMLt3vzSje2HzbfufVpyWN3XNgF1r3XA+w3+ENJ4zh93zoe2i5MMIQDnI9gr4/1BA2xDwrdfn3fTerH00hog7mHzwf7HRHJefCOEOjQCbEkPTH5vJrX23mBB0Buos5x6Tv5xB6w6D+v75ArWNIe275yPFIP0N7t4KaqoQWL+nNxWC+kWF6x+zw6yR4enbw8Mu+Hn2+2q00n3KL7kpSSDCoIIQQgzLoLqrbWpHnvbqobb4tTbZvQw0K2qNGiKKuUTxiRlZSpZtdswhARIFKLhiVAJHOxlkBNc39lVzKuowsiNXZok1u2eGL7te0QI3ad+ifVPdum1o3TJp7SSi4ZT0MX7xXxaOeXw1q7JpdkpYHCZ231z/ud1ubenvHU0GOIX6lOyez6gT21aiaujuB2S999iIhsuVW1qypZF2sjtfaVeV79R6F6y9sadWl11Cnn7U6LIIU9/RIpJ8fKKcFqKFNV3ivSXcODuNZN3OFJNgu4oGojF5i1VqdHRPuVrB9kJCEwYooK5pf01T4QsKR4VhUOqFJO27ZNpqCkQgzMhAgpLaqZiE5T0BlWcyqKlOqexf7a16CrF48ZT9ceQukIH7992k8dHTv25hGyFZD3CiFW47HWmAwUBQZElHUBAGIOjCFQI4AAYPluVRUQAmON2RDMJEmNzBlOmO0TqvLpw1ok63G08A6U/DPvQfL45Mf3fT8fdOvR0gO/nXrwj8HBA+kbVuHgwU2zvoG2XZZzJgUB4FqZV0QC6nK/LcuSc5qG8PT08vLycpouIYSvr99rsAm+fv3+/cvX+z0PA4iAiNxut3m5zfP8/HwZhsjMqhRjfHl5mef527dvFj1Bl5OR9BiZGHOGLBtfB79/iSBnRM45W3a+nIv112Nmm46nHv8hrdsOx18f7gWs5a1zlhjj8/Plr3/9U1quab0zUOCBKADI2/W7p65EpLRFD2VZnSWo1LxlthMQwMwpCYIQcR1noQZi9AJBrKDNB0hyaN1m7/jgexDAqhOaIUIBuZiY4Xq//frrr6z355FPA98xMcessN7v65ohgyrmVcZxRGAgeoXr7TbflxnhTkTX/O18ngAg55WIUEE0AUDgQVVVE0ocTuF8PgOACsSozBy4mMWlRtCoqkWiEpJU654CQD0iAYCKaAeTDQ5pzUtOkRiRs2QFIqLAcUl5vq85W6ENUrUAlc17Bq3UVuUzR9j6O46rK8BGhz9eMo97H6+ROhGx6Yfvdfuw/+PIvXjm+emxTIvvak/ZHqvH3bsdA/pggr4TrEKv1z1+OGX/aT/Bn2xHaiB7L1H7Vw9Cu//Vj0REjz91E29/Nqbw3vA6IKgqbKE0+PCxhx12tNdjQjdadKeB/NmQh8/jQVFHF6HTjcRvkybdtW69gdW/yLSbml8FP7XWQ3DaUemaAVsxH62yKbnjZK1H/+FWigD7SgYPDANH4G5Nt8rU/rwZMwsowkaL7QPlu5Bb9WH36SyyHRgrpIF2Ru722Q23qBSpIgIiCBCaMzClBFL0IvOQmBZtLMrk/BCjdW7ybeWXZUaEKFXjto/mdVGnWjQ9sA27mUzqkvfSalm1cqIv+Ic9/fXLf0SOtqAt/UnTIcERHax6i6c7TQ1rh0jVKSQeDezPhru6N3W0rmCjDuJf7/4l54L3E+k+1+kVrX/7tcSOBkSMdsemzyWXeh9bL9XC2tDSn6Czg5TNFtXOefqVatllmubcfjKFEACklldDsaLFYJJyxy3QEbKueTNh2xcAoNrO9KIqZFQq5jfJoAS19nuLQVRWNWtR0y29GCqIpTINoLCCeUzKFgBVTQCIIEwKCoQSAg3IgGT12eH1llUth17OWS0QnCjrbundKjyWgx9SG3CUt/Xge/vhu8dPNHiW8dQaze980eQwC2MWRCUCRgqBY4yRNmd7SglylpwELFqBCBHAPPaJkQQqcoAAUDkVYH+rYlXg/SQezsiTAngk+n+AVEegeZj88PmHwGw/tS3fvWvQ5r2ldiNZDb2tW/tXlCIxklgOrWpttDQtrQcmQsmSMgR6eXlRSQw4juPl8nw+n0MIBDhO8X6/g20ozV++hGXJIYTzJXx6fhFJ19vr29ubamZmVUEM5tp6enpiZslwvV7zOqvqMAzn8zROA5Ss1KqqDMWorKoJNCoSBYy43BKQCNBtmed5rbtjO6PbgPnxev1rrSF5o5PvUR7z2DSSHgK/vLz89a9/HQYIlHTggIRIkkFEU1qGaZeACkntLFrKS950FUgpA2RmWZZ0uy05g2WHRRCTepkI3m/v7WjfKhrsApdgL20/eg3rvi90E0AVNIQgeU2Sr9f17/+EdPt+Csqk4WKHJidUzUnUDpIjMvPpFCkEUEpJ5nVJaUFgjHg6ncZxXNcVJBORQkbEaZokg6oGpGk6D8MgAuu65nQnMgEEkJAUVBBKQe1iXsGthpGa1lxmkYUAA/EwRFW1LEfTaYwxchgBjcKEdVnWNYMSIYqAgFBVxRER0FiDSY25nUIC6OmzxyXY1KoN4P75xnkPDP2BPNmtu7/vGN+PbV5oJ9wO3wVPcKrobCOkdzwfOzLrKGQXYQHv7N9utD3dfoThdqcL4NK9qvBx83vcR/y9xzT9Tmnkwo/HA83fhH0SO9+hqvjH2lc7yGzs40d73HMow55OJIA9eI89NJnWT+dhJ8cvevHvuKYdlNpPnXZ32AI7mPve2ivH6+MDfgrtK31oHBz2WDk03KBTh2Gm6CLQK5WQHQVABNqSVULeJuabT+6iVVJHLJZ0qwsMpJgZCNlOpoGqSMpZoACFamMK1k/zFOVczI3eCisi48QmuLc10OouO27pnDNibKAgc4W4YSMiIxGDqmYLBqOdzxNoi0ssO0Fhi4d7J9S76WMPCVz3sBZOHFoCw4bBUhULj3nWic+r6ZUHU1e8vteaH16LloT9JhcXDXvE4yNq+knJPq+JJ7vtxY76eNODn0h7vo2cmX1G6W1UtHMjN+AjPojtBoCnp6eSgba6kQHAfAhtLbCqzW0R/Ud1qwMJ/mbBKNo+R7abPnSz4JaFRtu/9hLUbVKlF0REwQQ12Zfta0HzXZmtoSdzVWu1JO8AQAC2KAKAjACg5mdE54W2qWyISjiOwzzPiPn5dPrlT38YxzGlZZ7nz9d5WyQqR8IAoJUbKSDdYNUHy72HVA/B1V13FM97Po+IRIdjbwCAQK6Un+nSdr/G0mophEBEgfk8jEQUqXjam38krWsWARFAUBA0s4sqostBCmBmLwtZ5/2k6lx6UGDlgh5uDbb4I7m5ax1Y2r8frMWx/7avtePuAACPB9MR57bxk02qnHcoQjq4OAWz61gPmoUpArEqWGlWAGDA0zi8PF8CATMHYmZWWdYl2YfGwVL70jTEy3nKaUGQT88vf/jlEyIO36OIhEg55zVlELlerwBwPp8vlwsCz/N8v9+lFrxNeSUCVLAEkjlnBkCujuSsCqLZyskAIi/zOq8LIuo+ILaDPzxC73+hqaM273W4I/tmDKXNwmURE7fbV9SFCdQceoSRmTkCLobw24KSMmHOJRaUa/Jwe+z79+/3BQCAKFA91ldOGhWfvKgtOYCqmVF6Kdy34+7wjx0vjgACQFDpFEIRSZJJVQVTSksm0pyX+/27RA5PT09PT08BQ1qzaophlJyZgZnHcZym4b7MKaWUl9NlPJ+enp6eJK+WQEhVrbYTEZFQE3tEkmq2eJ2cxbIPQBVhzW/aDKxU+FqmcmoPVEuw9Pl8HqbT7XZb1/Xb69ua09Onl/NkXsr09nq/35d5yapIFFRQswLTe/v0vXakGw+BfJQWPEu1G93D1prg1xhxY8fvLONjTZLwsaHhyMQLs9jjzw/b8btdt8dn/Ajf+9A2/n2hBdirKB883232Fu4IB3W0vftw4of1guOf79088IKP2nvcpxtJfwd3n+7YyvEThkVekNtY5zso3YWDIWIIoZ3x655vWQnb60e89SJi51HcZMX9kUX3iHT3jzzX08DgPi+qStU52B5VVSBsdeG7unBtQN6i0D7sBY5uqu0Zrw4RUVot5gnJoilBVAgRiV0EsApgyf7S0l1CPZ9j0rzH4PYJdhXA/d5rXh2/6vYn1VciB0Gq+pJlqrCzrlqtjOYz3ATKoqyGgZktGXRZPLDjtNlSgvgv6l4bJCKArSInM9/v19a/H2fbwG0W9pbA7hNNDms6cGeS9z14jmJ/+rOL9fldOcomqDUnWGcjgb0ToLWOMYOzIXVo071OB1Oxx/KG+sxb9lRP6Sw7k9aqEnXWvWHGr2kDmiGeqqaU7LTPcTAN3z722XoXMYoKiEp1vhGiTwkCAGBpXBQA+B3G1tR1D3BEBHJMCHTTCe30d6ftiK4KRRvcSB4AoIgCqqoQoqoQSJXPqIQ1GinkMI4hhDCNw8vTGEI4n6fnl7OlRsizWI0sVQzMoCi5ZPFB3tk7ELZV862t4/GnI0y6t/zr3fXx+SPp7LraPuevFQDFFPAhxnEcz2NR8Kzi3LqueU0mTQKAIjCSgoCogIJqiKxKxBkFUdXyZmuXBt0yABXS/0Ax2xHz/Yz+5da9rgex4wN4wmELHFfQv3i8bigK4AJaykcVt/O9La4PLHVnHMe80DrPKSkDqAgHvFwuAIJIjAigklaom3Gd74RMzCKJA16exnUdb7e7SCKi0+kUQjH2rWlelkWSXq/XZVlut9s8zwjMzNMQrTbA6+t6vSGAEGCMHGMMwzgMQ6CJbUE1p5zzKuMwArIomqpAYeCcMTDskwd0kHlvg/x86xDj40WEerqhsjoOAQAl5WUYBlRASAia02qFA8ty6JYevHGZjTILElEMwXbH1++vKkAEAkAUFErZSTNMU8l+UBVC6Gf7ryH5h7ujSDUFOKqKijUbGTNzREQMIY5hmCWn222Z76oYwnCKnEVAUOw4pCJzmE6D6osiMKMJCTlnKSWOhpZDzkKd2znunFe1RAapcm3MapmnLGoUkRAtnZ4JqLZYSMpKKhktCmoaxnFUpGmaXq/Xt7fb9Z4w3k2+muf58zULaFohK6FacoTOh18tMgUDP5L+1QmjsN/FcCDL3UVF+I8e2FhbbR0N/LhhrfELh42zjV+NhRIAgKjVovZEr+Mp3QA8L/atmZh/uGGPzMtPzfN9rF4TdCnBj1P2vbXOHwpp7cn3lhidZOX7eW+Vj4zphz138HnvxSOGdFM+MqD3HsYq17UDU0fs8sOgGujXhLrOUeE79/Jk1zrZpgmKXYcNf/yfWyewAc3PzuOe/6Lz70t/HLtjOSagMLFfcnLV4Y4w0iIZqodXt2/92rTHSLf5A4gghpqpRe3FoqBigxHWo4DtDuwwSUzYbtk7sUZI+umIWBwWQg2QqzlKFcpMi5LDdhwKTOZQrKcHdR80SxSONhhEBAXNkkRob4SwkHpf88AOq3h3WYOzR1Z9lKdBav9+Ke31phB2NNQLmlQbVB2yjaT7etMJPbL6Ifk7HtcbtvjDVLp3/Xn8Kago7TQ8tiXWWvirQ7MjNWnQsOYt1nVS236uD9uU4Xq9tlFh9RC2i+OsCUtAqQVNIaIKgpJVXihPi6oiYdj8TQ4UANByRJYBA8Ce3uGe3KA7O9EmjnXjWLqubksSkkgrF182oKiilVK3zQ6Qi1VIAzXkF1Sx4ykAECiIiAIoQQzhdBovl8s0TQRyPk8xRslJlntKpLIQZkvFYSGjHns7IOCWqOmxwtbm0l188HCHhFRTWR6fgUc0qn8AwEQxRwMtegIAJHCIMZyGATXZB3LOOa2Sk2hWUAQlsnACAtoqPoc44LrmTICrqqAoAFqtijYG8vh8iCxALEGJH0OjwU1/j1zin9RHgoIH8r/WLVbK7Be3LQchbzavAn8AKHYPANvFFsECtnkQ0cwXiMXkGBhfnp4IQCHLmta0Nr7LSIpquW3HwJfpdJtuy7K8vb1dbk9WQYGIBAFWohDW22JZ/hExpZTWJcY4Rn56ekop5bwqiIiuKa3rTETjKaeUQrIYdWRmgsCMSCErpCS323ybVwgRqgbycJkaTfshhH+m+XVE5xvvtoCq2lEzUBWRQDSOcRxHIsr5TpgIMwAiAQFa2qoSmwPcHDmNj9h5h5wKXV3XfL8vb283ZhCilBIAI+Iq2fT2w6jf9e3A+9v22E+D5MPetLFvu6iBFcV/ywQga8pZhIdpgvPCYb3Py5q/fX3NF5ziwEy2UUQEUYjodB4Vn1QzoiphWtfb7TaO8XQ6qXLKCyLmZW1nGYjV1GYiWMVy6gKCaEYRBUJEiNGyjlttCra6VmgJqRC0JC1TIiZkIEaOyDErfPv6+vnL6/U2K+GyLHcZYhyIGDSLgCIAMSK6em7OWEbkpUgPwyMTh0f2tbY05HIBwA69Hye1amRca/M3j/0f73c76Ig2x24bAvsO92JDryD9PIfq9iDs9x08Iq2e1z+c5kMSfQTCB63r4bh8TVz8+LttkB30iHqRzz/Qs+B3RuvZXzcY/8bxK8fW5Nu26NXn1K+jdcV7Ef296XfD657/4Yw8YhvxbLk8us91nMJjyMPBBG7ppPczRNzCPn1fXdxd63pdV0/c/QT8ILw71Y9Yq4smDlN5nlBETPErJfTUgcZIkeaUiu+lxUzmvIps/iVEFEkARXFf1jtWXaud5qJyLDCr7uwWiBjIQkwXcPHZxEBFCjY4CKIlzMB5TsxkgV4GruYs7siHqkoWZVLQrJIkG70OIbA5lBAENGWxslY2AKAaZapARExW8ZtEJMsWx4h1AUNN4uqxuTNIeLxsykxHfUTE5tJiMwwaMcZOS2yg86S5QwZwhKyj4w19udYeVKebtde9L65NzUIxu5791LylwP7Nrl581X7LiSOnIWzhKCklY65QlWSoGmwDb3M2etj6lKRykNqhapWaVtUS84aiVocdK1piNc96BgCwQ1oPHzxuaiUwQy9AZa7IQAWTTX5AyDXLNjKai8sQD+tHKZTTytaXaiYQVQiMSiSSEDFGupziy9N0Pp8QIASSNS3zlZmnp5NQlGW+XC7X+z3nOeecUs4KIUbmsKTVA2c3hfebR8IOYT5olUo8PlXf0WuPotv6FgF1r8yIKgoAEiIzjSEOw5Dvt9KPiGomFURAogyKgdiqNbYVZDIDHC4LujPSsI+E17aygC1ExE/tIQg8zfcQ/l3tZ8D78BX/Lb9ZvOPo+G63p1TVqpIU3DBPIKCd6aJ6etBOwJqIOc+zdUwEzEQKlvdFVQSEVACkeLwAEaFU5QZhZg4UAkVi4vJWzpmoVEoAAOZ4Xd++f/8+z/Mf//jHP//5zyp4vV6XZf7rX/+aUhJJgJqzpTO955zfvr/eYwjhDsQxxmmapmmKcciW5GOV232eZyDKOWek3MUJw4er9i+sTuuz44M9wjc6wAERs8iyJEJAHBoFhopjiFZEJakWTYaZCcmHPxFRjBGBV1zNiipyv17v65KJQBFTyhYykXNmtkVXLDENAIBGqfUgsH480w6SnoW9BxtVRWSA3KxygJjSIiKMsELGNYlchmHCEJf7XTLMr6/L8qqKw6dfOHJOJdAGEc0iRghIIJpz1tfXfLvdnp8vwzAwo6qSAhG1Cjq22bFFlGycPasCAzNFClb/dhbJzEiWL1GVzAipBAJZijUSSJckgEwck+TrPc1rCkPMWYCYKXJEWLKK+ALRHoZYBvYu2I+sqlujDrs8ysGORMDxYXCROE2GwUNm+67/h+PxopEfM7uCZHQITO1Gq3vDup+pn2w3Kvg9G9b3cET4hhIfd9JJEf719yiM90T5Zxr39HNR1ZYV9ogSD7dbzcVWemj3G9y6776333GvCD1s3dwftm7YWg9MtX3X/YqsAFqK3JSiDiIq0J19bTMCUBA7W7whSMVyAKh6mBmeVNw5uCPuPYTGQyAc+Wzhpw6z/dG2kt6zfc93ansj1wLxiGghc+1L1p15zCTtso9AFY61eoG8Cg4ASAqW4CKJgKICFvdjSYXMzU0BgLWkO5asHqufJCLmvJrXaJomRJznWTQ3r1eL90sp+cOsFUYCAG9v343sMrMdl2BCEC62OmfPsGiJ8zhZrAuVJGlcNAQAqBQcakQTEXEIWk/0AUAVuVVErH4dQtGL7Oia2VmppBGjVuPeLK8Nki3TSaNZO7fVvgqFNm28xk9azwBgwMFacsOGGmNsdVE8MrldLbk2rfqkRx5tiqtzkHbGAq0GiDakjnw0HtDCLYZhaF7Nttst0cswDFqZBOxjWdtHzdNrLN+etyINDf9zzuu6FlkTQFWXZSGi0+nULJptnCmlZVmen5/v97stt2FFy0OjrqRHA2Owha5FCxCt0F0dQ83daQBRhLSW07A+nqEzUjZg1k1DKuWZwAERJWdEAAFEIrU0lsDISIis2YiRQi0EhACS08KoqqKQh0hP5/M0jcw8DOP9fr/dbjmnECgwgaa03oKVMyY8TQOApGW2PIrTNI3jeLvNIhJCAC2G85YeyU0qd3M5+t6PTR+x3u6mZ5w+chj2hkx7yx9+Ng88AEAtQuj6TIEZFUQSEk9xCJFE0xijpLIvxhAxDvbKsizMDExWU4dr3to1JUYYQrjeZ8jCkZlYFH1xX6xCkrqz2W2x/UXbFA0a5CBzRJsjbP2fRwTr6AA4bkQ1+zkd4mfaciCixarBnl/oQYhsnTf6gAqWn8QvbqEbCtCkZwqaMzMvCstdLp8Gy/6iaqEnwMwxWPQmwXY8WCweEADm+Xa/yzDA189fVPXTp0/jGIdhwAGv16uZzP7yl7/8l//yX/70pz/9+9/+8V//63+9nMbr9Xq5XEIY5+Uuki6Xy+VySiktS3q93v75z39O50scpte321/+/GecpmVZp2liEERGgvt9HqZLVkLZiVx4kHv89I+S63FBu4X2ER9+pZoo7LtCRGVeluU0DKA5z3dLqPP29ka4DFER5LbMiDiNTyEMIpqkRImbTmf7GkBDiK+vr+NwYub7/R7jGMKgqilJWgEjMZNt02KnI0AspSfMCiIiqjiEnYPCb4TjzUZju93RoNohNgAoIaqonVtsAERcJQciAU0pR8aUdVlzDHC5PKck3/Xt/pZAXgEoEq/zwswWdWyc/X6/rzkR0bKsqjqOcV2H2+0txkhg5z6EqIgHBEjMxoymUJPYBdiSV6OkRRBxHEfjxfN8axswhIBAIllElFABZBUFXNZlnpckigzIVFTOEG7LWg69E6miIuRGMFFsPPWQN6lKt1U76RH29NYLpnhQJ/xyVDWvNyi3fpoM2fjCkeh1H+pWuT3jXeLGW70jpCk/5lQAR6PQleA+4tVx03kIHH/toOFvNtrbcLWbF+yN5u/xwUYivBbtCTs6Cc133kDnKYanRW0B/TK1r3s26ntLact+vz3jjOwepFl6ZPML6he9XVjOXmveedAZ8RtIPYjsZgihCSdH1GrZSRqGgBNQwWVkNDG1S18CB8RuC1fGJuiX/og2fimhUsvugYdYZ38Ghyi7QeScLRG/gcPehuIQ2DCvveI3dvukVl+HHpiWAaL5MdqFpNxqgOYSpoeISAoZsm6pDktpP7ceO5jmnJl3+okNgCk2ODZxXFXWtSiKpvkjop0UIFZmtP8KmRBRhGWZmdlUPt5zDrJq3K5GeTtgqWBnDq1wE4cQRPuQRfuzKU6g29prjYo0naShcrdbaAtY7Yv+QR0YOjrin291TmzwJt80/dMURYuSstfzuks/09bdtNmO3Heo76+P+OOvYU87OhRvxKjJK97vZ/PyepcHTiPfZk0wwNb4zy27FNWSG6bOGVjMKW33bb6mfGKlGuhSqvpZy56QoSNYOSUxeqF1Q2L5qWRzs05qCt5hGKQ6b9tSeirQNcZAQECgmtEO+gmICFipeodsxRRMGaRu4VIQwuqApxB4jMN0Gi7TdD6NzIySKU5DDEOkZVlyXpflKumKiKfTOMYhxhb5JmY3+faW7ve7qhITAkouSag69oOOOv1HtUY0258Pr8ExRXWsF0rmyZaYdfdiK/CNZuQjJUACBMk11BbsX8vWDihgX8kZCEUBgQGVkQWAGdn6Ei1JXlG7GLmW/euIb3DYej8DnIf3u1XwHPHhAx/0iXuy2R7qbj5k7fCO4bNFXOecNeVjCgERSeuaUkIEpu2wQ0qJCQCUSimasi62lWyPv16vX758ud9nc9Ov65qWrVICK0binPM4jsMwrOv6t7/97b//t39f11WnskntcDIzBw5YzjfSL8N0Pl14GL+/Xj9//sz8/fmSx3EkHgHm+5JUYYgDhnC/rREe7GvdJ9/6mQXq2De8s3BHxu2fx2q+zjkjSGS2eoxEOsSBKalkIgKwbPhJBY3nExGWghPFyrMscxUbIGclyjmJZDWrqwKpomQr+ISmxagWrUwRVAELZ31g+oF38BkRH0Se1ubNne5fQCULjy+/IgBkRBSESARqBFWSalBC4hCHGIeF1vuc8Pv3gJTzSoBEFCKN48iBz5dJRABxOE3Gedd1/v5dxmimw3y5XJiZKKBCzprSUllYdVtVMUoQEDjGlrQJgF0dKRM2ABCRmQUpC66S5yVd7/fvb28CeLk8jadTSunt7U3BiuGUtyykDw2vIDvIvHsK6+OGTrrdQPoOSTFQtGc8ZjYS3XprkkDXVRNdYE9DWoddt/5J3GtcDyitE7h9kM5D+efnW7fvPnjgPfL7EM4PJ6KutTuN6z2UKxr9OcLKKmr6XalVWT2OuQPvNsJHTwIAIeYDPDu2/kHzE3zvrSMPskb7kGbfPM40UIATXL3K/dBTB+8gGACgO+P68Ouw16qaAtx9QvdlEot3ijmUmDRCxF55sNdzFWusU61Ba+qEJIuxIyMWiOpWMdSQ1I4DNRh1O7NQLiM4oIhhP5MsUmp2gwlFICZ01TF6s9CmH/u8jui0QaiUEYoJeSuiUMe8btMUrjrCRndCfb6ATrckIi3o3yxMiAiaVYvIXu6vJatnBYJIdUkX+DgqJiKI5UWbS1MGTJOh/WEPqnVp/A73G1JdHK/HYNWt+FXDM1OAmzZYYdKbH5yavUHJLcq7BPGI4n6XdtusKduttU/YTwZSn3y16612ZYN8EOPeLoozLYSmpZvabHf8WptCaI/Ztl/XNaXdnqRasbAbObnD3wUtkdAqUTm4dZSr5r8xnc26sdBBdZuiSE4AKBlLVKE9bsZe0VAyxwAiFFsIAimIJpWs5nYmjDEwD8QQQhgCn8Y4jcNpiCEQaE4JkDQMPPB5HXlZeFnuKSWVfJV1qeDSLAbDnOX7NVo4NCKWKi0A8GG4iyeyx588iH4XssFesG4/edRtN713+lH3VhpNVDIiBqzZ6HCr/9kmIuWYD2O1ae2/pRYTzsyIKYsoIiD7j22qIAIcZtSB6z0ucoRPG+fuwv16xMmf4cR6iLB6+OnjxXHdG8S6oFhsHgOyGJNq31UAgCTG/4DCxg5FBIvD3A7ZFiwNISzLEuOYFb5+/fr161dEOp0gK85zen19PZ1OQ2QmElURmaaJiO73+z/+8Y/v379//fL2/Hw5n89ca1IBADOHEGzsjIHj8PT8AhzmRe6LyOdvkuE//+c/IEeFBREpQEI7ECbvHJTbget4syO8HpI/XLJurWEvIOomi0uM8XQ6DcMQo8QIWskGAps6p2qqdSQqJVBFtu3go1pAURVV0Sr6FomiVYVFy15eFcJyRPSBBPYQLD+Jpd2W92/51OsAIlCjVwhRUBVXk2KQU5KUUhbJUE4Fj+PIPEUOHEol0hgjM4rIuq7KAwAsyzIvN01pWbJIsmDRaTqfxojE67pKBmYKMarOjaEXa6AWNtPuo6IAS1UFk8WDIDOTCqa0Lsv65cu3JaWcNcY4nk7TNN2XGe9UqL9IatlJiywItfasQduYxs5e1uHMO0vwLhJ6EuQkk/4Ba57FtwEcrcAfk8GPGYcn2g1DWodHknhkMT9Je49AOA7jh8TzvfF3jz2kA1LzvTf8Jxef9d7nOspcH+jVPOuz4+9eR9KqCMAB5r7zhmS/FwjvoeWR7zzsqvu1W/eW2aIbbXMGtsmqc5X5LWPtvegnerSIsKfJUFUGIpJcxuB5rl/Wjf0hqmqJVCxc032GmQHFLNjglse8pV7iP8JrtwmhgKPbmU22PrpQsMbFBeeZFBGpucQItGblxvcwFUAQN3m9gazMXEihnOkKocRkVt3AeIuoioJyMCErq6jWErgAOk0TM0fTzRQtPwgSWpkKVTUjYlE0y6FPAQBLUWN5xlVLme/mL0XcaXoiVqmb2kzHaUKn1tvD3vcN7qBgA6/fXUcOZy/aKy0AwBMC02G816uFg3Zr1yF0w782nqZ7dysOjug0TuBH6C/8rvZ7AKvL3qN4663bDNYshq2NxyneO1rfxt/05DZUWwifzMYesz7toJHdb1YA1U0x8INvEPOeOnTuzYLDsGWeEUkASj53qGYRrWphrxACYil7Qpa6r0jMMQbNJWMdakZAQiJEhcSkhIpIMdD5NF4up3Ecz9OAiKBCKkxAKgoSQAWEGWLgGMYYMAbMeVXVeZ5TWu73JJbVQwvoiMZGlZw4sYNGnTjAgaB7bIHf2XSvXXg86fZIR0xxH6VjiLNnDEpINb9LkX1VMypnFdRdh3ZdTDxakou2TaSqpMhIzByIchZSUFYQ7JOA/WiyHmjdnfeAc4Ttw7f8Vu2+0t56SCiOPeg+SyE6m7Q+kl1Ud1lGuwFUW6UCFJ+ti4ZgAFlyqnwaQVEQuB7/BjZbCwGxIr29fv/1n7+tq3z69DLE6X6/f56/zdf5er1Op3JwjhD//Oc/3263b9++hRBOp9M0nv/6178SFOIJIKSE2Aq3wDAMJfpXi26Qsiw5zynRmtYsSCGE4b4kURrHEdN8BO976/JwOTwMH8GzXyZ/3S0NEVkorcWgm+kNgZlYZM4pSVYGQmIiVrHMl8XB3Xqw3oLlJxWroEAAMM/z7TqnrEkUEUB3pQ7KqV3bdKCAbMJYJ2j6Wf8QRA+n77cANgMEFtdiu2NhAqqaAUkliwByCLGk7rzeVeF0Hj99enl+vowhBkZmNP8JB7JERPe78ngKIUCW6zUuy1IpZ/r27ds6p3xOwzAhMBEyR6KAaTZ1GRgsc4ExK655X2qi9MK5RBgAzKokiGtO9/v97Xp/fX2N43g6nTiGFig0jmNeQRWSCqCIKlheGQDmrRwXbkodUtFHNwpjf6Z9dtzjArXnPe3t0O9ImX/Yjm66j1GCYOc0VtUSo1VfQSz0xpJptzpkXT//k4jX3e82b/fAz9BzOIC3Q+yO8XVwfti5f/i9r3tJr/HND5gR15wX/ld65Jm0T/5evt+eP8704dQAHkPehwrvP9B/y55p8qHU9PuNxR+H0QjjsX+EDVzd7mgXWLUA2O9Ba+JOezaNdKPDbQSm6YhbCdvc6iLc0EXfedeQn1UbWTFZHaDdBKCcd86xotFa/AMi1ON5WZIqct2HZTIECIxOIdzDrh+bqoomIrKac4go9WgWbYlSi2dJYTNbTtOgqv4cFxERBVVlYESEKpfVY6bUlCXvRhCrItDACEUgtrivNn6ttexNkRARrtWrqYY4ojvRV10H2BQVrQGf2RWW0IMdovOXFqhVXGk3uVbdMG8Y1vOKW3YZ5I5Sv0dKbCRtY3T/eqz1f/pDzP7+0cTQtoGncW0AbS0alLQYCPp0z6pqOVG8oggAWVZIorI7rVdXdhOd2+lzD8wO+N2Y2/be/qyCjuGe/0QDNNoeqSjfesMPOVCpHFjLF6ICQkZS0BU0oSiiMlMIGAJH5lTOZRlgIZBMQ7icJ6sKARlVFESIkTkAUa5x2oBZwQwcDADDEOZ5XlcGoBgjIaeUctaFPsG3bzlfk2QEYmao6wuOnpjI3Gbhkfb4pwfm723vsQd0pK8hzAdwLm+BlgQlIJolU6ad7Aih2fWJsirWVEZqeAgAmlURUKNFravJLGjUZINAvfCmnzaM9/Slh+bVbtbd9ZGXP2zdAPAgCnj24fcCHkKJjt/dfchpg6QbIGo8jL2oIKpIACA1G6RKtWcRhjAAgCKoYEJhRTsboQq3eQWA5Xb7x9//x+fP6/Mz//LHP8cYr9fr7Xa73db1Pqd5wXGKMXBQABjH8dOnT+fzWURU8OXl5fvXz8MwIKJZaqqxqViIJAsRzeu6LtnO94LS6/frMJ5UcFnWdi6FGGTdBG4P4feAf7zjCenHi+g7f28Vcl6JiJBEUVVvt9vb21ukATFrFhUzaVvGI5KsRJZoTUIIdh7euJUZmu0EWuCQknz98v23335bV8lJibSk8azDQIBaFdiPlUTXjv4X3HgnUeoRmB/DTWv+KKyG42IMQhRQkZoDQEEEBGFd1/syr6ueJnr+w6eXl5dxCAwqeQWwLDI5qMW+KREwaCREDqqnOARVjUMgorfvr8uy5KwxLtN4nqYzc2AOmmuwgBhRqYKZVpcZbVRUVZHJCtwnzTmn62359nq93mZFNHsEMqWcl2WpyXsVy9F6ASxJp8v3ioVuk/FQSWEXMfHetu3o80PhoVsvL2Y0egLvYOZxff1F6/A4MM932qelZjXvngEAyY+zO8JB5PgAGh//9MPm332XTr6voKIT4LvWQUMendnrrjvyogrHm3AgO25lGY8ao0K3iB4VHo78g/ZQQHoIQy0qZ49mevBwOtFrC5H1cJN6BhvceVefRKNBz3NDP9pyRx8HqXoEkxqYKiK+rGbbNVjl51wTwbT5btK532BtgFjRRfcVBeAQc+iBqE79kH1Wom6beQ2Va+bPuixgc1ORrEDMBKBkiZMtyZbCY9OwBZHuRiKaVEBVhxiLm2vdkn/uUAGLU640TKpWhhFAlcj0AUypTcSgHxDtLFm2w1FYEt5AznmtVKNBmJltgHPK3qGnlaFQLaRhCmHDiev12mIgPeqQC0TUvd/Vf7StQlVfewri7QcNBgBgp7+kRqjmLbvmPnqtti6kE3e5WzY0a9d+wEeC9ZM3/U+wJx9tUm3P+JtNfw4hbIdFAaDRRMiqkHMehxPVg8LebW4ash+Y9RDCUINsbF0AwA7JbLHQdenBSoxALutYK8eX9WrzNSplegUHyjlb+b6Gt4iUUnJh1HWnICAhZFFR8zBpToDCiArEqDRAZIoxnsZhGAZmVjrbZO/LvCxLzmm+3whl4BACxRAAgRAIkFURKUuNNNbMBIGDVRTkBYloHCGO0xBHRFyWtK5rwpd5Xa/Xu6TVTDuwVx46Yufpv0ekIxo8vPkQT9qfbdWO/TSUaDjjft18F2VI2rDOQniLGUUhj4Z+NeQsiQQLLc5ZauFQAbU8ZcHxaaOQQTRVRvXx7I6zODI2/Dlu6pdDtde0u534QZ8/XJHWw/Ervud9P4fHAKBU+wAyzbla8VVVVQQBjGtisVqK6po1AggIUTBRFwVUwbyLb2/Xz9++Z4DL86dPnz6Z0ed8Pqf0La3r2/fXyIEuJyKal2UcRzvxlVJSQDsiC44mtOkY3SBkZr5+ff3n58/fv70hhyekOE6Xy/PbnN7utyVJOE1J6X6/x3fw8yeXsgPmEaS/tzepJ8wBIKX09evXf06B9Ol04mhsSkAyUCDJuq6Z2MIBkoiESIhG/TIimoaDyOsir6+vnz9/vl7vqlGtsAKCGU9FtVlh6zh3sdawpwwdJemn9ggl/WP+eUSEkvDZ/i6GG6MKIpIBIiII5ZznZYk3ut1uy7KEAE/Pz58+vZxPZ5WU18XKRKgWXUs0IWKIpCBrWoxcAJTK9YQciOd5vd+XdclptV+CJUQgd5xJs1gypG38CiZB5awigMQAOQOmJLf7/Pp6vd3u65qnaVIzKwtahp5S1TkMUnaUxQcrFQIo+zPMlodJj3XPOpirE5fbAjxk7v6VtrjdunQI7FU12GfU6Ebl73QdNqtfe+ZIuDa5S7cvvseJuon/rnYExXudfExd24C7Pt+jtE06+qD/PV/umUKVbR6s7HsjAQdtv3ObN/4BYryjoH4MpQ4TjpShm5T/9HG+WBsAyOEIic3IEgr6DluwWIdvTSY8jhBcyGi3F5pK33qzB/jRCqKzbmPVC0x9CD1yNHHzoV2QsInOTSyubHbzzLQJGI3ooKk1dNMPDqsrsj1j5KUm2a+DQZeE1bp6lCzer5DJY5p3Phyo4bNYqJvaYWVE9OX3iGhdWr0+LmcWJaky5MoWFHDzkmHznlWFuYj4FIMd9yqHCQGIUETsAJVu6nH5vCX8JCKE4kq18dtxgrbwfr7eK0UuUsJr3U1v1Oqtbp7GI/Rwb9LQqjh5IvvQI9EhZVuIhvcNPdrAoCMBFeu84mqNDj5h/0DTlh9as9p9twP7fYXYK6scsPnSuRSHLOGvrc8Gli0b0J4/tZtaFMXgN5ePv90urH7aXu1p8aIf1Go/3myNQZNmQCEkAKEAjIEDTkOMjEMMwxDGwDHGGAgRIUTDUkQl0HXN1+vrcrs+PZ3Pp9PAxASaATQLEKGaqRtUkZSoHgFCO4CKihTjEDiIAEWIFDhzcz6XPW/jpw04qj5s57Fg11OqH7WOM4FDv4c/eUzz+PaobwGAZikvuJplhVmUUVyIsigAJBRs5U8IiahImayIGplbAuRCJ0VFBRAV8iaT1YF0s2g320+/Cz5dD/5+xyAfvuVf777ebY02PNz/6anEw1G9N59yrLpQe1QogToUaaM2BMQsGa7X63y/jpGnaTqNk2WuFlQFoMBplde327Ks5xNeLhfgQMTjeHp5eQFVCxAVEdBPMcZ4ucQYx3Gc5xkROQQReX5+9hEcqpIhW5i/qgDouubffvvtt99+E8XL6fz8/Pz8/DzP86+//vr2diMCUPLlixpR7ajlwyXosFT1gajnKU/Hx2GPOR3aW8SHaXSq8vr6+muAgQXhzOcQmbOsCjkEAoC0rtMpIm6hKwAKdWliJCJalvzly9e//e3v3769DsPAteIsAORaeR4REdSkAZt0UwiPmNAIrEddN51yx0MS3wk91R8ZYipjwnVd397e8jK/vb2llDjwOI7MnPICkpkhhABKAEIULE0XgFpAVUqLRWwSkbHuwHQ+n0NIROH6dp/nWRVUMJ/0Mnq7J5ZjKzvuDyIK1WmwahZACyGa5/l6v6eULJRXzGOg2KaPiERYFl8V0Q4YgDbHOwCA1Zo2tySUszN7q9MH+NMphA8JSENFLfrztrJ+1ZrA3cSbBpkjMXy4cODkadhvAT8jv4OqZ3vX/NyPU3gPcz6479/6AMn1/eRS732oo8APv9vm8sH44Z1F9FIf7GW8d0j6dt9Tue6t8gAA/FxEbmttXx/fOtLS46Q6ftQ92SGMn5rnZbDP/9wBze54+rPbQXs0eYjhHe2CR3jbVtPDk8xNcYSa/YZcSIPRbkXQDFZGws+2aXFtB7apigjV3rqZ22NebzGZeIu4M2CZo6zt7aoQoki27Hw54z5Vrl20Y3WmU1X2sxELPwYtya89bbW+c84rIhJZUjhGBDuWUgJYgEqpV7AKKtk7jvwaEJEpVkbrmanpLc31ioiWerRpQUQEusv0bXzFI5O9btAT2Qoc+w2sVe9qE2wMUkr6u4KgVGNQW+d2YcP2qmlbfb+UZiw4muU8thB1B652z/idj87yAfvt18yiD/DW1WnxqO93sh+/5XgwTa8tE+0d4H781qFlW805L8uyLEsbdtsvWGwcuTm9tTpddc+92q+qGgCzwUEBCQl/QN9NTGy9tTVqYQBd44CAZHUEOUQGjSGMMZynIQQehzAGDpFqnJFy5JRUIg1qGYmX9T7f1jUGioSRCQg0i6W/YWbgduJRlFBVUkpJcoyDAlmVoWVZVlEEDiHIuuXdKjYgVd3yWPUNfycD+KAdkcdvlnb/SKB960LIEBEAsVqs2iRUNWdB0SWvW1Io2Tz5xZrDFEKwsoQESkQUg4iQbHYTKRETtNlO3f8fj/8Ay38ZjEdFrnX4w3f9BqRDUIldWPxZx2j1kaDz8fhL5jSuBKGaGmIIOecV1SQPIkopvb4u8/16OY3Pz892wpOZVVFEkXhd59vtljI8nSfmuCyLEo/j+OnlBQFSSt+/rgjfpjFO05SIAMCOhMUYmeLXr18j75ZDVbICABLRNAz3JV2v12/fvi2LPr88/fLLL3/+039S1f/23//93/7t324zjOdhyTmldHl+SddrY7LvQcOTU79S6uSYh/DsQPozSBJCEBGQTEQouizp7e3tdpvOJz5NBEzGKs3s1viLfVw1qwoRMHOySJm8vr7e/v73v//3//bvOcMf//iHsCLzrGgZp3qUawohQnkANxlkp4F4XtAuEDHL4wk+ZEnlU/XrJowKmh60aVCktGa93eb1fpvTKgIhYM757e2NUQPhaYyqWa1CIDEApLSqaogU4ylXkaZJCJoF1AKOWDK9vr7drjPoq2a4jGOdzmY5Mu80Ilrwgg0+IAHTvBYOnnK2hGeqwIFTSsgcAituNbFijAJFKkCtFmuTI8FSrVJVCMGyn1HwyltvPDpCGA4eQieAbUvm8HZHkDukJRfV3727/+hj7QXN3LY/YkNbOWLwnL2IAVxyani5yBO0Hbr+foVwG9gjYenn+/G090g60MmTx43T4IDuLGhHWPyf6FqTo7qV6vYXbOu+G0kBNeFDg47x2d/Fy45U7r3HKkzKF7vFPd7pJt61dkQO9ukDPTlqHXYyNhyx9B1XeSPssvnJHtB/66EFnRn22sACoiXAVARKsnIwHrmmtLIwhQiFxEREBIUki32nFb4jIlTNKSkiY62Mp2rxFHlgy2mIRjkQwOhHFkYLxaz11gglC1Xl0+LfFK0iNqaUeIgqAoDMLEA5ZUSkoHGo5f5UEEGZTG8k5sABADQpKJkqmfLydl2HOBENTEwYVGBZlnG85LzGIcTBKtRnJM05I1nCAJKUpTioqYSuCiEFAASlShCEdChUZZW1nBhhJs7LmvO6yqpque9KYDSxlbAMknPKOYYhxhGRU0oInErqyqSQFUQVENB0M1vRVi0wu1In3kvmNTSpCVeN3Le97c8TMqNI6a0V3ENEO9rOzExRtSixKSWrAW12sscZF0Vb6QxCVLLU4VsGXiJUQBEx4RraTlBVYjjUaWmzUGdraRStEY6qEQuilkoJpAhgOeXrcMAEERNTmt8vBGqHJLUGmRFRqLm87dMtepZqOSlPdqXVEgTJOSPVQ5iKHIb7PA+RVRU0D4RZNUlmxpxKfQFAUKBy8Mm2NwcE4FagXBAVGUnLYTMsAQIIBKIpM3MkRMiASljOksjrMo1wnk7TNE5jHAMHRiKKjIY1gAta3iVrNyVKZ5QhaBhIl+G+hmXWdfp0WxFSGodEMSWlBAMrBVYMCAo5l42nsKqkdVkBAJBEACkMjBk0Zf0jTjxKDusKkjhm5BJkIkCoNQEOGhwUCFxZyI7dHhmJZ3Xeb9xiJI7MtTGbrufjA+VdrkHaFtyvYOHsMY4KCVTXDMw4hCkQA+op5pTSuqSelSIqBUJKyEAkqip6vc/LkmxVsiIRxTBmXdGKc1i0EoL9Wwbp0klrNUAIbGi5jdw2QA0raJqe1AMz7TGt1jTeV2emGpFlKbL8PrULAQiws6dgjdD2LMoPLO8r6MA+wTe6ww9Y0moj2mEAC2LTjIgIykMMkUIIeV0sCmMaY4gUkO55JcAhMoHklGCKxPDyh2cEvc63+zpPt+l8Pht9k6x/+9vfXr/N0wSXpyEOMsRMpCktHGE6j5f5KeU8p/yPz68hLOfh+8vLS7rNSCHHPI56HqfbdSWiXKY7hBCAeUmyruua5j/88Y/r8grxAnRbIQwv/+lG4//z//5/v37++nUNEGEGBoQxcp6vfvnauiCT5CoBFEMyKkAGZS5HUs3oaKlTdwivYPHL1mlzue6MelgQrCEVEgFhRuA0Y0kTRYlYib9luf39cxqG0x9+UVUMxAJMeHt9DSrL5++r5DiG23Jb0jxMwzov99sbiiLy29vbl2+vGML/+X/9H9PTs4Au39I//+/PMQhkiTCQEpARas4MGQVFzc8WneB+DA/xpcOaxRZNSdUmypVj2wZMgGJnabYDACDNgIBgFWMCqFLdd2OMktYkEgKtyF8SDDwOjEQ6xvDt2/X6ens+TQQ6Bz5NEzMLo6xIPCGeFHS+ZUgJBAMEBAQBkUQCCjhMU84aYuDxjOP06+evX+blKm9hxGkap2EUyBkyMQCqaAKUlDNTVGLJFnRA621lPInq9T5/vcqbPH1HWmAdkAnygIqYAlIIxIo56/2+8BhTWiUlUlVFAlYAQhYRsI2pikSBubCMepSmmbmPhHpPXY00NdIKquI1f7XkLZusvKNUjdS3g/3eYtXoTCc0g2wjLKiuamcfEBEe2apyzpZBEhDL2R4zQ6zZTVaJNvHa00NEBEBVaLbadnyLaqhak4h0X0quo4d+VI0kostWcgTyHuA9o/SU1n5tKfE80Pwi+nmpKmLTn80zIapgiTJEdiG1lvnJ+jBbkgjahaVnz3ltvGZj4rDx3DZaEUHVDGtza2k1feKDkOYyWXYe3e6nDRkcFlFxQmnL7oHFbS4meLauDHo5acsaaD8ZP7WAEe+SsX/ZGWtbwhithTaMGuf2bVXCbWlMGjdcMrWrQAukZRm0DQmqIigZADbSlytrxmbmUmipETccOl7YjjVYtBLq5PIFU4WgVdcp0AFtxBegJHnb4eV+teBDPPZ7YNdJpQsiIlqygKKzTIATOxCgpTJ0UysySpGAxRKZCMJ2tAxAJGVLkEgUQJUZa22MZksBVWXitksVW7LNMiQCUrWsgzuVvQo9LZpRcs5QLWFtbA3twFM3RwXa1I5I37Wmz/u3/LXUGFG/iA3srZm7w0RDIoJ9wc1u+bCZdP8XN6ymkf2dHUGx8dtG8nDLOVeN8YEm4A2THseaYVL38hY8WJeyWAWwKkA1F24lBN5/8sHgHSbbMLT9GEJEkJRXOyUYCCMTEfzxz88hhGkYhyEMMUbCwEgEklLhj4WkQvNkKlmOJw2BYtQYZU15nmeOPOTGVzYXZYMPuqEW+m4Oz5LIUUEl73Uwu98mtQGtJnb7X40/R0LUSIR/rOOI+1cQjd+LcFWcymFgAnZfAbdPBVRKfDuCmU5UEUmz5JxFQWoi04KZrZ+qCuIhNGv7xAfV1n4Ejf3S/O7WIe1uCh9aaj2E/7VPt05awAVY8UAzn0k2TjEMw2kK4xgR1Lz9b29v9/s957yua04yz/OnT9Of/vTL8+XJIvnneY5hICI7Ligi+na93W7fv8/3CPM8Xy6XEMdhGMZxBICUJMYohVXWg4tZU0rzcl/W9bevX9/e3kxS/PXXX//Hr79ajfsmQOScO1NIB71mudst2R6SH4Do+Izfzt19tyvtAUM8O5ZZHElfvnz59RQZEqlS1u9xePv+FpAw5zmtwxSXvCRZ4xjXeUnr/OnpGUBPp9Pl+WU4ncfLUzydKfA/b3+L0eRmqBlbdubzI719bwpHsvzw/gddfYC0G/yrmmHoMTG/vb6+vb0hwPnTy8vLC0gGybfbzR5k5jhM4zhSYGZWuTdK3j5a+U6BvTGp+31Zl3SdCEAJsBoGxWMCVMXJlF4AuN/vq+r9Pi/LuqbcVKYYY2BgZqrFbomQ4VHVy0Or2PiRvPG/oXnaAgfE2C03bmjfrb5URg4Of8pjTtZrz2dX0hke+XY+aD+PeN0c++n8dG+w39G6t8c18vJxV4+YY/fnfzwatG53fAHg5zlbeX1/2t/36f99j/r95IcaznQsz4/k41Ye3v4pzdtq/VeaiC6y3ScXi9e+6RZoM6m0/otCSLQbt28VxUtgPdV6Cf6gZ1MILQGGn1I9Brit3Cap1D8b4SMicyYV1e4RrGGPvv2vAOAkobyvwM5Ea358tCylVB5PKmIuU68O2eFJU/CoqfLFEqkbxHf7kzbksLIWSiqSAQVAjLERB9yONW5BrV4h9AEJ3R62zrMrbPBDwuRx3bOcZnho3j+LmN2m6XqojI98gGvrc3MdVAZmTUSQd9LwwxE+HPMHD+9oxIe2MajY0Z70DkbY8H/XuVZHPBGlvBmKwJnoOmCavxHq94pNvfYmJSRVcs4IoloPaqqQbrG+fmN3IRPbyDcdU6vIqCZngAJzYAox8jjEKYYQ6RTNHxjMMciobNpHjYFuIFDNGZSFBQVAmTkADgPGmGnW+/3OMZwuAczOVDdFtiJXZVEOMqulIEJz0G2n7I6WM0QzHvzHsJbjlv8hnvj19Vuv72czXravIJafytSK0QQJUBG20+RYjQIikmrIulYqR0RqtQxFchbzNTNzMCr7qPJqW8BuRrpXFD1z+CHEPP00dHv4/Hvbs2MHH4Ddszf/Lj5KWvbBF/1jtgeHYQAAK/hTj1pL40shhHEcn57OhJBSul6vlu7fhnE5jwDw8vT0/PxkyYGo1l5CxGEYXl7COI5xevvtt99SessZrtc55xyHyYVtEwBg4FaeBKH4AQAVEL+9vYngMAxrSr/++uv3tzeMw5rEO3htd+uPsNcT9p9ZF88uAX6AEv67qiqq7CStAnKQnOV2u339+pUhRSLKuhBfX69THALAbZkp4DiNpzCNpxFEA+Pz+bIsiZmn8wVDvK0p5YxMjbQSkcp2VvBIoivmv6vZNrLZXlFVeoSWniM8/BXKvtg6b+MMzESgWYjocrk8Twiqy+1KiM/Pz3/5y18CIar8j3/8I6V0v1/f3t443C+Xy+lyDiFwZd/eRUMWMqAgkudlnefZ7BqCervdiDAQc0DVTDWkQkTWdQWlDLjMCQCRQkrpfk8J4Dov9yUnQKj4Pw1MkJmQAaXktQZmyip+4h02bfxHxFhWE9T/N7eG9p0woE68hIYGuOG//k4dpvVjfx4jGrxz7+Hrxx36wcPwDs30w/gY4B3+615R8Y/59nFvbSKeOxyf+Z9pHalBJ9bunCK/R0BQVdMsus6PDMXt8Z3lt1t9/7DHwBavh672rB6cNw9H6H8t4T+g4lghVNUDAFqG3zaFFnOL2KvQHphH8cxaaCM4qhBtnlo9hCYAFVIqbogNfJXSNoBudE136CsicKC56AQOv088xLsNT7RF2xdEccJlWex9RLifURNezSsoIjmrSCYCBDuSZ53vwELYXEPWv42KtJYs71ZOVUIMlnuGCBWySKlAGCK1h4lyHUxvmHvIk8Cpi03ubLP2JO8I5yaCkytyiIiqufE2G4m4BCrtPtXWPlHFrK2+SvmWbr/CR/LGD9pxK6ILC/wAVq093BjZ1SHE6vBEVB/FAY6MekRqFxbzUPniFsIhImsVLrvxmzdVRAg3Ap1TikjNCNTIytHjusHfeSyxJURCVMhMeJpOl/NwOk1DxCEQM0O+EiCWbZxFBZHqESexcGhARcSs1NRDG7UF01phiGVZxgSqTGQZcwlKAtVM9IA3GIqiUxS7CTbMxHrWCDy1aR7C359d84d3js3Ta//88V3HJ3C7VwLpkd2J3CxZQTLmbuNAo3WFrKoogPlUiSwRpdUUY2YkFkRRlVxzKe9NnjUH4G7Mxz3SfvoYmm5RNgr/wfMfwNZ39cPH/LD9WnQ//XAkIkIAwzCo6rpkVaWqomfNKluuYCIaYhjHMYRgOaJtjc6nS0opECHCcp9VNY5jjHG+L/bMMAQiymAVUDnPr6pq5WQt3bRI3a0yUMvtBmWPz8s9xBhjHKdxXtLr97dV1pQSWBVgbiE9CGBw80wZGw30iNSA08EIEeEAyYcavn/P9+y/CLaN8cGqbcBHDSEwASkw8zAML+dzuF1PlylOEUiREVCHIUzThLh2y4qI67quKwAmH6RCVselMN8HZpr3sKLb16pbvVc/8g3bP6QVeJQXi51OswgRTdO0rq9tUUyLm86n0zggwOvr6zzfrter6A0AwhCZ2bKw+rmgRTwhpiTzPL++XUuiGubA0Y422MBzzpbkGQDWdbX69auoKYTEduJdMuK8ppQBQgwhhCGez6dAipoQ1IpPQ5Vz4CAWNgLu72iVhboQoAbVj3fr/3zbEdKDKtWEMfvT8+LuyYYfnmWXbg98DasBovXssQj2+6hrHX95yFne66FhrMfnTsB4bwBaLdTgzmEed9B7I++2Z7sEB09sB+QedvET7bgB0ak3sq8C8kHzE9HCXqVJvK3DtkPbt9oAvC/kIUHobnY4g1XZ8fXboZqQ+kV0/RSMNVkIN6dKN2w/wYa5bQp4sOA8BJF//f8H6J2UQtiM4xEAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from PIL import Image\n", + "Image.open('./mhp_extension/demo/demo.jpg')" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABLAAAAOECAMAAACGszjIAAADAFBMVEUAAACAAAAAgACAgAAAAICAAIAAgICAgIBAAADAAABAgADAgABAAIDAAIBAgIDAgIAAQACAQAAAwACAwAAAQICAQIAAwICAwIBAQADAQABAwADAwABAQIDAQIBAwIDAwIAAAECAAEAAgECAgEAAAMCAAMAAgMCAgMBAAEDAAEBAgEDAgEBAAMDAAMBAgMDAgMAAQECAQEAAwECAwEAAQMCAQMAAwMCAwMBAQEDAQEBAwEDAwEBAQMDAQMBAwMDAwMAgAACgAAAggACggAAgAICgAIAggICggIBgAADgAABggADggABgAIDgAIBggIDggIAgQACgQAAgwACgwAAgQICgQIAgwICgwIBgQADgQABgwADgwABgQIDgQIBgwIDgwIAgAECgAEAggECggEAgAMCgAMAggMCggMBgAEDgAEBggEDggEBgAMDgAMBggMDggMAgQECgQEAgwECgwEAgQMCgQMAgwMCgwMBgQEDgQEBgwEDgwEBgQMDgQMBgwMDgwMAAIACAIAAAoACAoAAAIICAIIAAoICAoIBAIADAIABAoADAoABAIIDAIIBAoIDAoIAAYACAYAAA4ACA4AAAYICAYIAA4ICA4IBAYADAYABA4ADA4ABAYIDAYIBA4IDA4IAAIECAIEAAoECAoEAAIMCAIMAAoMCAoMBAIEDAIEBAoEDAoEBAIMDAIMBAoMDAoMAAYECAYEAA4ECA4EAAYMCAYMAA4MCA4MBAYEDAYEBA4EDA4EBAYMDAYMBA4MDA4MAgIACgIAAgoACgoAAgIICgIIAgoICgoIBgIADgIABgoADgoABgIIDgIIBgoIDgoIAgYACgYAAg4ACg4AAgYICgYIAg4ICg4IBgYADgYABg4ADg4ABgYIDgYIBg4IDg4IAgIECgIEAgoECgoEAgIMCgIMAgoMCgoMBgIEDgIEBgoEDgoEBgIMDgIMBgoMDgoMAgYECgYEAg4ECg4EAgYMCgYMAg4MCg4MBgYEDgYEBg4EDg4EBgYMDgYMBg4MDg4MCa7rFGAAA5WElEQVR4nO3d2ZbkSI5lUYvy/P9v7jZTG3SgkjJc4ELIsx+r1YUkSJwVVR2Z/vEBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADf/98N9IwCw5//ec98aANzZqRXpAlBIa61oFgCzvlzRLAA2A7kiWgAchnNFswCkmqsVyQKQRpArkgUghahXFAtANFmuKBaAYMpeUSwAgbS5olgA4sh7RbFS/Pf/ue8ByBbQK4IV6L933DcGJIjoFcWK8LZUVAuXEdMriqXWVCuihZOL6hXFkurJFcnCacX1imLp9OaKZOGkCFZ9I7kiWTijyF5RLInRXJGsi/r3w30jAWJ7RbDmzeTqgsn63wP33aT7t8F9T0rBvaJYs2ZzdaFk/W+b+7YSbdXqZM0iWKUpcnWFZL1p1aWi9b5WJ0pWeK8o1gxVr05erKNcXSFaB7k6S7IIVmG6XJ07WW29OnO0Gmp1jmQl9IpiDdLm6rzJ6sjVGZvV2qpzFItglaXv1TmL1durMzWrL1YnSFZKryjWiIhenTFZI706SbNGcrV4sQhWVUG9Ol2xRnu1frIGc7V0sZJ6RbC6hfXqZMma6NXiyRrv1cLFygoWxeoU2asTFWsuV0sna6ZXyyYrrVcEq0tsrs5TrPlerZqsyVytWqy8YFGsHuHBOkexJL1aMlnzvVqzWASrpPhenSJZql4tWCxFsBYsVmKvKFa7lF6tXyxdr5YrlqRXCxaLYJWUFKzFi6Xs1WLFEvVqvWSlBotiNcrq1eLF0gZrpWLperVYsXJ7RbDa5PVq6WKJe7VQsZS9WqtYBKuizGCtWyx5r5YplrZXSxUrOVgUq0Vqr5YtVkCvrhqshYpFsOpJ7tWixYro1SLFkvdqnWJl94pgNUgP1pLFum6wAnq1TLEIVj35vVqxWDG9WqJYIcFapFjpwaJYhxzBWq5YUb1aoFgxvVqkWASrHEuvlivWdYMV1as1ikWwqjH1arFgxfWqerHierVEsQhWMa5eLVasyGDVLlZksBYoFsGqxderpYoV2qvSxQrtFcEiWJ2cwVqoWMHBqlus2F7VL1Z+rwjWHmuvCBbBcj/fEUOwKNZ73l6tU6zoXpUtVnSvyheLYJVCsNoQLIJFsPzcvVqlWPG9Khqs+F5VLxbBqsSdK4JVvFgZwapdLIJViLtWn9wzaHLVYKX0imARrEbuWH1xD6FBRq8uHKzSxSJYdbhTdeOeQoOrBiupVwSLYDVxp+qbewzHCBbBIlh+7lJ9c4/hWEqw6hUrq1eli0WwynCH6pd7EEdyekWwSiJYZbg79cs9iCMEi2ARLDt3pv64J3GEYF24WASrCnem7rhHceCiwUrsFcGiV0fckbrnnsW+pF4RrJIIVhHuSN1zz2IfwbpysQzBolhb3JG6557FPoJFsCiWm7tRD9zD2EewCBbBcnM36pF7GnuyelWuWATrkyVYFOuFO1GP3NPYc9VgpfaKYBGsXe5CPXGPYw/BIlgEy81dqCfucewhWASLYLm5C/XEPY49BItgESw3d6GeuMexh2ARLHrl5i7UE/c49hCsSweLf3O0BHehnrjHsYdgESyC5eYu1BP3OPYQLIJFsNzchXriHscegkWwCJabu1BP3OPYkdcrglUSvarAXagn7nHsIVjXDpahWO4nLshdqCfucezJC9aVi+V+2B0EqwB3oZ64x7GHYBEsguXmLtQT9zj2ECyCRa/c3IV64h7HHoJ18WClF8v9vBW5C/XEPY49BItgESw3d6GeuMexh2BdvFfZwXI/bknuQj1yT2PXVXtFsH4QLD93oh65p7GLYBEsguXmTtQj9zR2XTZY/M3P3wiWnztRj9zT2EWwrh6s5GK5n7Ykd6IeuIexj2ARLHpl547UPfcs9hGsq/eKYBXgjtQ99yz2XbZXicVyP+gBelWAu1J33KM4QLCuHqzMYrkftSx3pf64J3HkusFKK5b7OY8QLD93pv64J3GEYBEsemXnztQf9ySOXDhYScVyP+UhguXnztQf9ySOXDlYOcVyP+QxemXnztQv9yAOZQXrssVyP2IDemXn7tQv9yCOXTpYCcVyP2EDemXn7tQv9yCOXTtY8cVyP2ADcuXnDtUP9xyOXTxY0cVyP14TcmXnDtU39xgaXD1YwcVyP10TemXnLtU39xgaXD5YscVyP1wTeuXnTtUX9xBaXL5XscVyP1sbgmXnbtUX9xBaEKzIYrmfrBG9snO36pN7Bk0IFsEiWH7uWH1yz6AJwSJYBKsAd60W6RXB+nT1YEUWy/1oq3DnapFeEaxPV+8VwSqAXjWhV5+uHqzAYrmfbBn0qg3B+ogKlvupehAsO3rVhGB9unywPoKa5X6ohdCrJgTrE736iCmW+5lWQq9a0KtPBOsLwXIiWC0I1hd69YVeORGsBgTrC8H6QrCcCFYLevWFXn0hWE4EqwHB+kKvvtArJ3rVgF59oVc3BMuJYDUgWF8I1hd6ZUWwjtGrL/TqhmA5EawGBOsLwbqhV04EqwG5+kSwbgiWE8FqQK++EKwbguVEsBrQqy/06oZgGRGsFvTqE8H6RrB8CFYLgvWFYN0QLCOC1YJefaJX3yiWEcFqQa8+EaxvBMuIYLUgWB8E65ckWBRrDMFqQa8+EawbTbAo1iCC1YBefRCsHwTLil41IFif6NUnUa8o1iiC1YBefRCsG4LlRrCOEaxPBOtD+V/Z4H6SVRGsBvTqg2B98N+JVQHBakGwPuaL5b7/ecpgUaxBBKsBvfogWPxFFCUQrBb0iv+dUNwrgjWGXrUgWB+X/0csdbAo1hCC1YReXT1Y8l4RrCEEqwnBuvr/TqgPFsUaQa/a0CvBP2It3KyAXlGsEQSrEcG68l+qGtIrijWAYDUiWPK/t36dbgX1imD1I1gtpmt1gmBF1Oq3WbW7FdUritWPXrW4fLDianVXrariekWxuhGsBopeLRys+FrVblZksEhWJ4LVQBKsVZOVlauyzYrtFcXqQ68aiIK1YrFSc1WyWdG9IlldCNYxVa/WK1Z+rsolK6FXNKsDvTp21WB5clUrWUm9olmtCNYhXa+WKpYvV4WSldgrmtWEXh26YrC8tapSrORc0awG9OqIsleLFMsdqxv3FCy9IloH6NURbbDqF8vdqT/WMdhqRbN20asD4l4VL5a7UU9sczDn6sb29JXRqwNXCpa7Txssc3CX6o7l+SsjWPvkvapbLHebtuXPwd2oF/kjKIxe7QroVc1gubu0I3UO7ji9kzqEyujVnoheVSxWYn1G/kwWd5WOpA2irtBeLR+smF6VC5a8SnvxGfxj8dw1apUwisII1o6gXhUrljZIx+UZ/oOR3BXqFDuMygjWe2G9KtQsaYxauzP1hwO48zMkaBbVEax3QnNVpFnCEPVEZ/oAJXd4ZohHsQKC9UZ8r9zJ0vRnqDeSQwTcwREQTWIZBGtTSq6cyRKUZ6Y1soMmuFOjMv0tLIVevUqrla1Yil7MdUZ30hB3ZMQkH8UaCNaT1FpZkqVoxXRmlGf1cuclhO77qI1g3cuvVXqyNKWYj4z0sA7usMSRfiZlEaw/plxlNkuTiSnaW+l7fHdTYqk/lpII1jdnrbKSpWnEJPHddDy+Oyjx9J9MPQTrk7tW3wKfcPQ/EyP3d0fi8w64Y5Ij5NsphV59lOnVp5gHFMVBQn1TTQNwhyRNzPdj91cTglUpV9+0zycKg4r8xo4n4K5IJu23U8RdTq4eLHeb3lE9nyYJklN+DtPeW0Ow3A3Jpfpy6njoyaWD5a7SvvnnExZBdNS/h77oT9zgDki6+e+mkOegXLlX7iAdm3o8UQ3+qf+ZKOAW98bgzofB1GdTyWZRLhosd4tajT6fqgWPPdCellEsdzwsRj+aUt4W5YLBcleoy8DzqTrwkgPxcfHBcqfDZOCbKWanKBcLlrs/A3ofUZWB1xyIj9Pd6ZtJuMPh0vvFVLNflAsFy52eQV3PKIvARg3Ex0UXy90Nn/7VqOO4KFcJlrs7E9ofUpaArRiIjwsOlrsaRmMLUkFLUGKCVaxY7uTManxMWQE2YyA+Tnm/r6NwR8NpfE+sWnty+l65ayPR8qCy/d9ugfg46Q0/j8LdDK/JhbFoL8q5e+UOjczxo+rWf7sE6vOkt/x4rrsYZtNbk60vKcHBcmbMXRmpg2fVLf+bDsgPDAuWOxh2it3J05uU2GC9/X8I5w6M3t7T6nb/TQUighVULHcv7CT7k6Q/KQHF2j07Zw7uuIR4/7i6zd9ugOoScbf9d6g7F36qJYo3kJSeP9V/eMvlI7jTEuXN4woXf6MAwmsE3vfPme5aVKBcpUBjTXEEKzhZ7qwE2nxe5d4/77/2KoE3/nOmOxYVKJcpynhU5MVqPDpsFO6oRNp6XuXav+y/+jJxd3470t2KGuQ7pTYVFXWx2k+OGIU7KcFeH1i69U/rH3CZuFv/OtKdihoiFktoMipjJ7Sd3Xkf89xBCff8wNqlf9z+kOuE3fvnie5SFCFfK6XpqAwf0nJ2/61MceckweMDa3f+cftjLhR38/Tql3SppBRRmTjn8OyhmxnmjkmK+wcWr/z98j8KOVp46O1kdyfK0K2Ulqgqk2ftHT16N0PcKclx98Dqjb/b/UcxRytP/f/clShEtFBawqwIzntz8sT9dHOXJMvfE4s3/s7TaGPOlp5Kr+4J1klM2xXNkRsHT91QH3dH8vw8sXbhHzyONuhs7bHuRpQyu0xi8q7ojp36d7ymhuLOSJ7vB9bu+6PH0QadLT3VnYhapjZJTdOVzTrIz1XcVBt3RTLdnli678/uRxt1tvRQdyKKGV8kNUVTdtqgPVd2W4fcDUn19cTSdX9xP9uos6WHugtRzOAaqc3n5DgNymOV97XLnZBkX48s3fcXf7MNO1p5pjsQ5YxskdpsShrDoDxWe2dvuQNiodz3DT+zXeNkdx/K6d4hucmOdHRBeKr61t5wt8NCue8bfma7xsnuPpTTuUFqcw3pzoLs1IB72+BOh4dy3zd8zzbuZOXR7jwU1LNAalP9GKuC6NC5Y1qr5U6HiXDht9xmG3Yw/4AVq7syMhNL31IF+VVl9/7R+M9Z7nC4KDd+w224YQcTrFgjqVGYWvnjJgRcWHr3DRNyd8NGufEbbtMNO5hgBRvKzbTpjR8PguTIyBv85u6GjXLjN9ymG3YwwQo21JtJ0+s+FYTRE7V/jdjRjNzZ8FFu/IbbeKPO5V8bjTZcnWHzyz7Zg8ETxX/v4cGU3Nnwka78q9t4o87lH7DCDYdnkGDXJ3sweKD6L2rdH5M7Gz7Kld9wG+8S57rTUNNMfPopNn06B2MHyv+i1r1bdFfDSLrzr27zXeJcdxpqmslPN8Wez+dg7DyClUO6869u8w06V3ukOw1FzSWoi2LNFTkYOU78LzYc3KK7Gk7arX92m2/Qudoj3WUoaq5BPSRbrujB/IHBN+huhpV26599TzjmXOmJ7jBUNd2hVpIl1wRh+rzgG3Q3w0q69S++JxxzrvREdxjKmk9RE82Oa4Iwf17sDbqbYSXd+hffEw45V3uiuwtlSXJ0RLPgoh6UCdabO3Qnw0u79s9+ZhxxrvZEdxfK0hRpn2i/NT0Yvp+AB9q8O3cyvLRr/+xnxhHHao90d6EuTZP2qNZb04PxG9I/0ebduZNhpt37Jz8zjjhWeqK7CoWpsvSear01PRi/o4An2ro5dzHMpHv/4mfIAadKT3RXoTBVlt6SbbcoCMP3FPFMr7fmDoabdO9f/Ew54FTpie4qFKYrk7ANgUEYv6uIh3q9M3cw7KSL/+xnygGnKg90R6E0XZtUZRBS3lbIU73cmbsXdsrFf/Ez5YBDlSe6m1CaLk6qMigJ7yvkqV5uzN0LO+Xiv/iZcsChyhPdTShNFydNF8R0dxbzWM/35e6FnXLxX/xMOeBQ4YHuJNQmzJOgCnK6Wwt6rqf7cvfCT7j5L36mrD9TeaI7CcUpCzUbhQCymwt6sKfbcufCT7n6L37GLD9SeCDB2qdM1FwSomhuL+rJHm7KXYsClKv/4mfO8hN1B9KrA+JMBa31FMnthT3Z/U25a1GAcPVf/cxZfqLuQIJ1QB6qkK2eo7jBsEe7vyd3LQoQrv6G7zmrzxMGy92D8uSliljqWYJbjHu2u5PdtahAt/sb1H35UB9IsA7pY6Xf6VmCm4x7uL+D3a0oQbf7W8R9+XlzsgPdNVhAQK7KBWs7WaMHhN2buxUlyHZ/E8Fa3iV69d9msgb/eNituVtRgmz3N2n78vtJqA4kWA2u0avZYoU+Hr36I9v9bbWD5W7BEi4crI47jX0+evVLtftvhARLdR7BanGRXk0WK/wB3aEoQ7b9mwjW+i7Sq6l/uSH+Cd2dKEO2/ZsI1vquEqyZf8QiWGlk27+tcrDcJVjEVXo1UyyClUa1/W8oCyPuFcFqdJVeTRSLYKWRrf82grW+y/RqvFgEK41s/bfpgyU67B/BanWhYI3+21gEK41u/zcVDpa7A8u4UK9G/xGLYKWR7f82ZWQIlseFejVaLIKVRrb/2+TB0hz1yd2BdVw9WMc3TrDy6AqwhWCdwIV69e6/5b3nT4XclzsTdegKsOVDWCyCZXKlYI0Vi2Dl0RVgy89LlJ2lOOjGnYF1XCpYQ/91fgQrj64AW37fouqo+XO+uSuwkEv1aqhYCQ/q7kQZsgJsUgdr/pgf7gqs5FrBGvg/vBOsPLoEbCJYJ3CtXg0Ui2Dl0SVgE8E6gasFq7tYBCuPLgGbfl+j5iDFLd24I7CSq/Wqu1gEK48uAZsI1hlcrVdTxYq5IXcnytAlYJMqWKLs/XE3YCnXC1bnf0gn4VndoahCl4BtBOsELtirvn+7IeFp3aGoQpeAbUWD5U7AWq4YrK5iZTytuxRFyBLwhiZY6l4RrD4X7FVXsQhWGl0D3iBYJ3DJYHUUi2Cl0TXgDUVrPhSHPHAXYDGX7NV/Hf+fhRnP605FEboIbCNYJ3DVYDUXi2Cl0UXgjfnY/H4KsnsiWJ0u2qvmYhGsNLoIvEGwTuCywWosVsoDu1NRgy4C7xCs9V22V43FIlhpdBF4RxYs2R0RrF4XDlZ3scJuxJ2KGoQVeGO2Nr9fgu6WCFanC/eqrVgEK4uwAm8QrBO4crBaikWwsggr8AbBOoEr96qlWAQri7AC74iCpbshetXt2sE6LhbByiLMwDuaYOnuh2D1u3avjotFsLIoO/AGwVofwdp/vpSHdreiBGUH3iBYJ3DtXh0VK+ep3a0oQdmBNyTB0t3OP4I14OrB2i8WwcojLcGmyeIQrAqu3qvdYiU9tjsVNUhLsIlgnQDBel+stMd2t6IEaQk2KYIlvJ1/BGvE5Xv1rliJz+1uRQnaFGwpFyz37i+JYDUGi/+dMJY0BZsI1hkQLP8/YrlbUYI0BZs+poo1mbst7t1fEr36r7FYgdd3x6ICaQo2EaxTIFgEqwJpCjZVC5Z78xdFsP5rK1bg5d2xqECZgjemikWwiiBYnwiWmzIFb8wHS3o77s1fFMG6sT65uxYFSFuw7edNjv9h5d24F39ZBOuGYHkpY7BtJlgTqXvDvffLIljfCJaVMgbbCNYpEKwfBMtJGYNtv69y+M8Kb8a99usiWD8IlpMwBm8QrFMgWL8IlpEwBm9MBGu8dG+4t35hBOuP7cndtShAF4O3CNYpEKw/BMtGF4O3hoM1Xro33Du/NIJ1j2CZyGLw3u+7HP2Dsjtx7/zSCNYDguUhi8GOwfAMh+4d98qvjWA9IlgWqhjsIlgnQLCeOJ7cnQs/VQx2zQVLdhvulV8bwXpGsAxkNdhFsNZHsF4QrHyyGuwqESz3xi+OYL3Kf3J3L+xUNdg3Up7fT0B1E+6NXxzBekWw0qlqcIBgLY9gbch/cHcw3FQ1ODCQHnWvCNYcgrXB8ODuYpjJcrCvQLDcC786grWFYCVT5eDAeLBkt+Be+NURrE35D+5OhpesB/sI1vII1iaClUvWgwP99SFYtRCsbfkP7m6GlawHB0aDpbsD98KvjmC9QbAy6YKwj2CtjmC9k/7g7mg46YKw76O7P+JeEaxZBOsNgpVIWIRd3cFS/wMWwZpFsN4gWImERdj180I7/4DwDtz7vjyC9U76g7urYSQswi6CtTyC9Q7ByiMswq7fN9r1B5R34N735RGstwhWGmUS9vQGi3/AKodgvUWw0giTsItgLY9gvUewsgiTsKszWH11a+Fe9/URrPeyH9ydDR9hEnYRrPURrPcIVhJhEvZ1Fauvbk3c234CBOs9gpVEmIR9PcHq/MexJu5tPwGCtSP3yd3Z8BEmYR/BWh7B2kGwcgiTsK8jQh8dv23m3vYzIFjvEawUwiIc6KgQwSqKYL2X++jubtgIi3BgJFjK67uX/QwI1nsEK4UyCfvuvubmXyqv7172MyBY7yU/uzscLsok7CNY6yNYO3If3h0OF2US9j18z8//gzc/VF7fvexnQLB2EKwMyiTsOv7Gt34pvAH3sp8CwXqPYGUQFmFfy1f++jvhDbh3/RQI1nu5T+8Oh4uwCPuaP3WCVRjBeo9gZRAWYddYr/hLc4ohWO/lPr47HC66IOwjWKdAsN5Lfnx3OUx0Qdg12iuCVQvBeo9gZdAFYRfBOgeC9V7y87vLYaILwq7hYOmK5d51r48P0TEE6x2ClUHWg30Ey4xghUsegLscJrIe7PMHS7Kvy/ogWOEIVgZVDw4QLC9VsJqLFb6v9WQPwJ0OD1UPDhAsL4IVj2BlUPXgAMGy+iBY8QhWBlUPDhAsK4KVgGBlUPXgAMGy+pAVi2C9lT4AdzssVD04QLCsCFYGgpVA1YN9470iWAIfBCsDwYqnysEBgmUlDFZrsRL2tRyCFU+VgwMEy4pgpUgfgLseBqocHCBYVgQrBcGKp8rBAYJlRbBSEKx4qhwcIFhWBCvFwQACxuLORz5VDg4QLCuClWJvADGTcecjnyoHBwiW04cyWI3FUq7lMt4NIG407n6kU+XgAMFyIlhJticQORt3P9KpcrBvolcEax7BSrI1geDZuAOSTZWDfQTL6meU2tMI1ovHCaQMxx2QbKoc7Bsq1TfVPWi2dUk/o9Setk+4k8to/qaF13QHJJsqB/u6K3VHdQ+abV3Szyi1p6Xt5Crav2nlVd0FSabKwb7eSN1T3YNmW5f0O0vtaWk7uYaOb1p5WXdBkqlysK8vUY9EtyDZ1TX9zVJ8XNZOrqDrm1Ze2F2QZKIcHOh6m09EtyDZ1TXdDVN8XNJOFjb2TUtvwZ2QXKIc7Bt7q99E96BY1UXdT1N8XNJOljX4TUvvwZ2QXKIc7Bt8rTeiexBs6qrupyk+Lmknixr+pqV34U5ILlEO9g2/2E+iexBs6qrupyk+Lmkn65n6pAnWOFEO9k29XNE9CDZ1VQ/jFB+Xs5OFTH3LIcNxJySXKAf7pl6u6B7mF3VZD+MUH5ezk2VMfclRw3E3JJUoB/umXq7oHuYXdVkP4xQfl7STRUx9yGHTcTcklSgH+6bereYW5vd0XY/zFB+XspJVTH3IYdNxNySVJgcHpt6t5hYEe7+sx3mKj0tZySKmvuO46bgbkkqTgwNT71ZzC4K9X9bjPMXHpaxkEVPfcdx03A1JpcnBgal3q7kFwd4v63Ge4uNSVrKGqc84cDruhqTS5ODA1LvV3IJg75f1OE/taTkrWcLUVxw5HXdDUmlycGDq3UruQLH3y3oaqPa0lJUsYeorjhyPuyGpJDk4NPFmNTcgWfxVPU1Ue1rGRpYw8Q0Hj8fdkFSaHhwaf7Oa60sWf1VPE9WelrGRJYx/wtHjcTcklaYHLcZerOjiksVf1uNIpYelbGQJY99vxnjcDUklCkKr3vequq5k75f1OFPpYSkbWULvp5s4HndEMqmKMG7vtaquIVn7dT0OVXpYzkZWMFimjPm4I5JJlYTaJGu/LoIlQLBKcKckh2Tt10WwFAhWBe6U5JCs/boIlgLBqsCdkhSSrV+YslcEi2A5uVuSQrH0KyNYCtpg8bc/D3GnJIdi6VdGsCQIlp87JSkUO780giVBsPzcLUmh2PmlESwNgmXnbkkGxcqvjWBpSIMlnJC7IoncMcmgWPm1ESwNgmXnjkkGxcqvjWCJECw3d0wyKFZ+bYZeESyCFcEdkwSKjV8cwRKRBks3IndFErlrkkCx8YsjWCoEy82dk3iKjV8cwVIhWG7unIRTLPziHrZEehjBIljJ3D0Jp9j4xSmDlb+NtRAsM3dPwklWfm0ES6hisdwRyeTuSTjJyq+NYAkRLC93T8JJVn5tBEuoYrCuVCx3T6JJNn5xBEuJYFm5gxJNsvGLEwbLsIzVECwvd1GCaVZ+bQRLSRgs/o9YA9xFCaZZ+bURLCmCZeUuSjDNyq+NYEkRLKu+/Zf9XcxZNCu/NoIlRbCsOgOwWrE0K782giUlDJZqSu6IpHInJZRm4xdHsLQIlpW7KaE0G784gqVFsKzcTQml2fjFESwtgmXlbkoozcYvThes/FWsSBgs0ZjcDcnljkok0cqvjWBpESwvd1QiiVZ+bQRLjGBZuaMSSbTyayNYYsJgaebkTkgyd1UCiVZ+bY5eESyCFcZdlUCilV8bwVKrVix3QbK5sxJHtPJrI1hqBMtrqAXf8xYXRky08mu7Xw/VOSl7WJYqVqpBuQOSbagFfyMXR0ZJtfNLu9sO1Tk5e1iWJlW6QbkDkm6kBXsvQd2dYaKVX9vdi1Gdk7OHdY2UKXBQ7n6kG2lB8xtRR6iHaOXXdvcuRMck7WFdfbMIH5S7H+kGUtD/ZuQ1aqDa+aX9vQLRMVl7WFf/1x86J3c/8vWnYPQFDR08kKobzcJLTvH5m6PomLRFLGv06w8akzsf+fpTMP+u+k50Bmv6L8fy+hui6JgWmk0sq/trD52SOx/5HMHq5QzWx8rN+huh6JgWolWsSvNJq+7GnQ+D8rn6GCqWdOElhxl8SB6g82WpdrEoyQctuxt3PQwW6NV9sVrjpV34RZOluf3OdyVbxpoUn7Pubtz1cFiqV820C/+xZrI0N9/5snTbWNL816y8G3c8HOr3qkSwPtZrlujGO1+Wch8rmvuUxTfjjodF+V5VCdZiyVLddufLEq9kOcNfccTNuNthUb5XdYK1UrJkN935siL2spKhLzjqZtzt8Dhhr8KC9bFKs3R33Pm2onaziv7vN/Bm3OnwoFe9q6o5PpLubntfV+B61lBpHu50mJyuV9HB+qjeLOGt9r6v0AWtoNI03OVwqZsr538y52hXNRcJIL3P3hcWvKJ+lSbhDodLZ6oSI+bs1fGuiq4jpr3L3jeWsKZelcbgDofN3vY/v4r9mGmN9SotWB8VmyW+xd5XlrOpRpVm4O6Gzf7+37+L3ZaJDeYqNVifGg8S3VXLpfpur/O8fWnL6lJpAO5u+Gxm6iVNuymTc/eqZ1kbj1HdWdcd60/ck7mvFpUe350No60SNbSh+3tuNVwrT7C+tJyhureO+404873cjXUo9OzuahgNBisoWTO50v3XI4/d++Gfl91f4x3rT9yTvrTpCj25uxpGw8HSF2uqVv5gNZDe3+EtC67RwbC2yQo9trsaTsPBkhZrNlZrBOtD99fYNNyy6lJtPJubqdAzu6PhNB4sWbcUtVolWF/mb6jllnNHYFveNIWe2B0Np9f69Iai/+O+J6rVP/1f8TX3XG22rnZ8A203m/vsxvVNUud53c2weo1Pdyn6v+6PoescmdlQ2eImaLzT3Od2LnCSMo/rbobXS3sGUtHzZWvitGVmQ5W7G6z1NnMf2rrBSao8rDsZsT4+9v/fX8oz0orWz1oSpndmNlS7vZGa7zL3ic07fCk54XD5OCjWc3gGa3H8Sc8X6cDEhgZscJD2e8x9XPcSX0lSOTy+vqa9HzxnZ7IaWx/z5JGtJjY0aIsDtN9g7pO6l/hKstph8fU17f7iKTdJdQkwvqGxuyzVfm+5z+he4itJSofF7Wva/cn9zq/cq5RgDS+0Svt95T6fe4mvJKkdFt+f095P7neeYAXv9ayOO8p9LvcSX0lWPAx+PqfdH93t/Mq9Si+Wo1kdt5P7UO4lvpKkeBj8fk67v7pbeYKVst3Dem4m94ncS3wlSfXId/c97f3sbuUJVtaCj+i7ldzHcS/xlWT1I9v997T3O3dnZMZWdNboireYuJPkp3Ev8ZVkBSTbwwe190N3aFQGd3TW6I4fmrqR5GdxL/GVZAUk2eMHtfdLd2hUxnZUYXjPX6nuIvk53Et8IVkByfb4Qe390h0alYldnze+6w9Ut5D9DO4tvpCsgGR7+qJ2fukOjczMtk+b2PY/sjvIfgL3Fl9IWkGSPX1Rez91h0Zlat2nzez7F+EdpN+/e4svJCsgyV4+qZ3fukOjMr3xk2Y2Xnr97JsnWInSEpLr5ZPa+a07NCqKpZ8yuu7qy+fd+Q/3Fl9IWkJyvX5TOz92l0ZEtPcTBlY94Oop9/3IvcXXkVaQZK/f1M6P3aVRES7/mN49D7l8/G2/cq/xdaQVJNnGR/X+x+7QqIj3f4CxVj9Xj71pguWVV5BkBMvDV6v/Gw2WoFcEK0teQZJtfVVvf+wOjUpAA/oZQnV/6aE/RLAWkViQZJcMVo1ifTLU6nbZkT9DsJaRWJBsBKuA1Fp9Xm/gjxCsdST2Ix3BuiCCdWqJ+chHsNBA0yuClSGxHgYEC8dEvSJYCRLjYdFRLHdnZNz7vxyCtY7MeDgQLBxR9Ypgxctsh8cFg0Wx+hCsdWSmw4RgYZesVwQrXGY4bAgW9hCsdWR2w4ZgYYeuVwQrWmY2fFqDdZ5iuRuwEmGvCFa0xGoYESy8R7DWkRgNJ4KFt5S9IljBEqPhRLDwFsFaSGI0nF6+q3c/dHdGxl2BdUh7RbBiZUbD6PW7evdLd2dk3BlYB8FaSGY1jAgW3tH2imDFyqyGEcHCOwRrIZnRcCJYeIdgLSQzGk4bH9abX7o7I+PuwCrEvSJYoVKrYdQdrA9rbBTcIVgFwVpJajWMrhcsdweWQbBWkloNJ4KFbQRrIanNsNr4srZ/+L3vBOsqCNZCcqPhRLCwSd0rghUpNxpOzcH6LhbBugiCtZLUZng1F+u27wTrIgjWQnKT4XW1YFGsJvJeEaxAucnwIlh4pe8VwQqUmwyvrW9r84e3dSdYl0CwlpLcDCuChRcBvSJYcZKTYdYZrOWL5Y7BAiJ6RbDiJBfDbOvj2vrdbd0J1vmF9IpgxclOhhfBwoOYXhGsMNnFMNv8ujZ+d1t3gnV6BGsx6cnwIli4F9QrghUmPRlercG6FYtgnR3BWkx2MNwIFu4RrMVkB8Nt+/Pa+OHXuhOsk4vqFcGKkl4MN4KFPwRrNenBcCNY+BXWK4IVJT0Ybtvf1+vvvtZ9/WBRrB1xvSJYQfKD4Xa1YFGs9wjWcvKD4fbmA3v53deyE6wTC8wVwYpiKIYZwcKX0F4RrCCGYpi9+8Kef/e17ATrvAjWghzFMGsN1lexzhAsirWNYC3IEAy3t5/Y8w8/d/0cwfr4cMehIoK1IEcxzN5/Y08//Nz1UwTrg2BtiO0VwYphSYYXwcIngrUiSzK8dj6yxx/edt1dm3kfBGsLwVrP27/2+Mz2vrKHH96W3Z2befRqS3CvCFaE5xW9hP3v7O6Ht2V352YewdpCsBb0vKKXcPCh/f3wtuzu3Mz6IFibCNaCXnb0Co6+tL9f/jtFsD6561BPZKu+uHf7lJ439BIOP7XfX/47QbBuj+TOQz2Bqbpx7/YpPe7nRRx/az+//EewTiswVTfu3T6lj+vlqiVYP0P5R7DOKjJVN+7dXkXPqP53xV61BOt7LP8I1lkFlupb4I6fSde4vOFwafnabr/8t36wvp/H3YdyQhr1IHjRT6JvWt5wuLR8bbdf/ls+WD/P4+5DNRGFehK76CfROSxvOFwaPrbvX/4jWCcVkqhHoYu+kJ159A7Lmg2f42/t55f/Vg/W7xO5A1GNNE3bpFu/qP2pdA/LWQ2j42/t96cE65zmc3QsIgBrORhM/6yM0bA6+tT+fvn5H2xZ2O8TuQNRzWSLmgRVYBUBs/IVw+xgene/JFjnNNCfAbFFKCxoUNf8t7D+1/Ff10Cwzqk7PRPEKdAepxc5os/fupphtTu9h18SrFMa2ao58iDIDpyWN5zP35qS4bU/voefLh2sv2dyF6KY+R0boO6D6LwpqYP5+rEpGVaHE7z7rbs5U/6eyF2IWiRrNkTaCMFhmhvJmcnt17ZqGLVM8ee37ubMuHscdyJqUa3aEGUops+avoPEedx+7gyHS+Mkbz92V2fc/bO4E1GLdOH6KXMxedbcxVOH8f17bzos+gbqzs64+6dwJ6IU0aqNU0dj8rzBqyaP4vsPuOth0DdRd3aGPT6GOxKVSDZtTmg6pg4fumLCHH7+hLseBp0zdYdn1ONTuCNRiWDRZqX1I+1CQwZuxV0Pg86husMz6Okp3JEoZH7P5rk7knIRxV283I67HgadU3WXZ9DTU7grUcj8os0bLJW4JYZU3V9i6JHd9TDoHLG7PIOeH8OdiTKm9k1lLlgZ//QT4ekRCFaT3im7yzPm5THcnahiauN0LtgrzSO785Gvd87u9Ix5eQx3KKqYWzqZKwVr6lmfn9idj3zd83a3Z8jrY7hLUcPE3mldoVeCUr08sjsf+brH7m7PiI3HcKeihvHtUzt7r3S1+o9g9XHXZ8DWY7hbUcHo9kU4ca+ksXp8Znc+8vWP312ffpuP4Y5FBWP7F2R2d4sSx+rpod35yNf/Btz56bf9HO5a+A0tYJy51a1InqqXh3bnI1//W3Dnp9ub53Dnwm9gBUNNrW5BAbF6eWp3P9INvAd3gHq9ew53L9wGXn2wmc0tJyRWL0/t7ke6gTfhDlCnt8/hDobbwKsPNrO51cTU6vWx3QHJNvAqgroSc+xOsC5erIE3H45c9T+3uyDJBl5GUFZCjv1HsN4YePEJThKsuF5tPLc7IbkG3kZUVkLO3enVpYM18N5T0KuB53ZHJNPA64jqSsi5e8G6crEG3nuO9XsVmKu3D+7OSJ6RNxLUlYhj94N13WKNvPYcq/cqNFd7D+4uSZKRdxLUlYhjCdamkbeeZd1exbbq+MndLUkx8mKCuhJx7H6vrlqskZeeZsVcJaSq6dndNUkw8nqCwhJx7FGwrlmskZeeZ2plHXJa1fb47p6EG3lBQWGJOJZgbRh554mG99VTs6xUNQzgkzso0UZeUVBYIo49DNb1ijXyxnONLWvbPqullappBp/cRQk29JZiwhJwakOwrlasoReea2BVOxZaJ6lR7VP45m5KqKE3FROWgFNbgnWxYg298GTde9q50fPSAtU3h2/uqEQael0xYQk4taVX1yrW0PtO17ml/SuturVsrXfpzkqcoZcWU5aAU9uCdaFiDb1ug64dHdvpubsyab9Xd1iiDL24mLLEnNrE3ZEsQ2/bomNDx5d66I6s2m/XHZYoQ+8uKC0xpzZxlyTF0Lt2ad7Pma3uupkK2m/ZHZYoQ+9PX5aYYHU8kjsmCYZetU/jes6t9dj5Pu337Q5LlLG3KE+LPVinL9bYi65ufrFHTvdpv3d3WKKMvcmYtMSc2spdlFhj77k6xWb3nmzVfP/uroQZe50xaYk5tZm7KZHGXnN1mt3uO9ar+RncXQkz9kpj0hJyaAd3VQKNvebqZOvdcaZX82O4uxJm5J1+BLUl5NAO7qoEGnzNtQ0u+NutD8/NtPbZuLsSpf8ruQlpS8ihHdxVCTT6nisbWvKn/2FUWYI0z8bdlTCjH0tMW0IObeeuSpzR11zZ0JrHtSRF83DcXQkz+rWI0xISrO6Hcmclzuhrrmto00Mikql5PO6uhBn+YLRtIVixhl9zVUPbrs5HvtbxuLMSZ/iLCWlLyKHt3FmJM/yai3KHw6V1Pu6sxBn+ZELaEnJoM3dVAg2/5qLc4XBpnY87K3HGv5mItkjPJFh3xt9zRe5u2DTOx12VQOMfTURbIs5s545KpPH3XJA7Gz6NA3JXJdD4VxMSl4gzm7mjEmr8RZfjroZR44TcVQk0/tko20Kwwo2/6Grc0TBqHZG7KoHGvxtlWwhWvPE3XYs7Gk6tM3JXJdD4h6NsC8GKN/6mS3E3w6p1SO6qBJr4dCLiEnFmM3dSYk286ULcyfBqnZK7KoEmvp2IuESc2cydlFgTb7oMdzDcGsfkjkqkia8nJC4RZ7ZyJyXYxKuuwZ0Lv8ZBuaMSaeL7EbaFYCWYeNUVuGNRQOuo3FGJNPMJRcQl4sxW7qIEm3nVdu5WlNA6LHdUIs18RBFxCTiymbsowWZetZs7FTW0TssdlUgzX5EuLgQrwcyrdnOnoobWabmjEmnmK4qIS8SZrdxFCTbzqt3cqaihdVruqESa+Yoi4hJxZit3UYLNvGo3dypqaJ2WOyqRpj6jgLjIjiRYz6ZetZu7FSU0zsrdlFBTX1FAXGRHEqxnU6/azd2KClpn5W5KqKmvKCAusiMJ1rOpV23mbkUJrcNyNyXU1GcUEBfZkQTr2dSrNnO3ooTWYbmbEmruO9LHRXUiwXox96qt3KmooXVa7qaEmvuQ9G0RnfiPYD2be9Ne7lTU0Dotd1NCzX1I+rioTiRYz+betJc7FSU0T8vdlFCTX5I+LqITB3pFsKpyp6KG5nG5mxJq8lPS10V0IsF6NvmmndypqKF5XO6mhJr8lPR1EZ1IsJ5NvmkndypqaB6XuymhJj8lfV1EJxKsZ5Nv2shdiiKa5+VuSqjZj0leF82BBOvF7Jv2cZeihvZ5uZsSavJb0tdFdCLBejL5op3cqaihY2DuqESa/Jb0dRGdSLCeTL5oJ3cqaugYmDsqkSa/JX1dRCcSrCeTL9rIXYoiOibmjkqkyY9JXxf9ic3cTQk1+aKN3KUoomNi7qhEmvyY9HnRn9jM3ZRQky/ayF2KInpG5q5KoMmPSZ8X/YnN3E0JNfmijdylqKFrZO6qBJr9muR50RxIsJ7NvmgbdymK6JqZuyqBZj8neV40BxKsZ7Mv2sZdiiK6ZuauSqDp70mdF8l5BOvF9It2cZeiiL6hubMSZ/p7UudFct5QrwhWSe5SFNE3NHdWwsx/T+q+SM4jWC/m37SHOxRV9E3N3ZUwgi9K3BfFcQTrleBNW7hDUUTn1NxdCSP4osR9URxHsF4IXrSHuxRF9I7NHZYogi9K3RfFeQTrmeBFe7hLUUTv2NxhiSL4otR9UZxHsJ4JXrSHuxRF9I7NHZYogi9K3RfFeQTrmeBFe7hLUUTv2NxhiSL4otR9UZxHsJ4JXrSFOxRVdA/OXZYggk9K3RfFeQTrmeBFW7hDUUT/4NxlCaL4psR9ERxHsF4oXrSDuxRFDEzOnZYYim9K3BfBcQTrheJFO7hLUcTA5NxpiaH4ptR5mT+PYL0QvGcLdymKGBmduy0hFN+UOi/z5xGsF4L3bOEuRREjo3O3JYTim1LnZf48gvVC8J4t3KUoYmR07raEUHxT6rzMn0ewXgjes4M7FFUMDc8dlwiKj0qdl/nzCNYLwXt2cIeiiqHhueMSQfFRqfMyfx7BeiF4zw7uUBQxOD13XQIoPip1XubPI1jPBK/Zwl2KIgan565LAMlXpc6LqVcEqx53KYoYnJ67LgEkXxXBKm/6HZu4S1HE6PjcedGTfFUEq7zpd2ziLkURo+Nz50VP8lURrPKm37GJuxRFDM/P3Rc5yVcl7wvBUpt9xS7uUhQxPD93X+Q0n5U6LwRLbfIF27hLUcPEAN2BUdN8VtWCNfgY7qoEmnu/Pu5U1DAxQHdg1DSfFcGqbu79+rhTUcPEAN2BUdN8VvK+ECyxqddr5E5FDTMTdBdGTPNZEazqpl6vkTsVNcxMMCUiCRf5u5gCwSpu6u0auVNRw9QIMxoSf42/a0nUCtboU7irEmjm5Tq5U1HC3AhTIhJ/kb9rKcgDYwnWiYs1/mq93K0oYW6EORGJv8rftQQIVnHjr9bL3YoSJmeY0pDoizxcbJ68LwRLa3gkZu5WlDA5w5SGRF/k4WLzThKs8xZrfCRm7lgUMD3DlIYEX+TxYtNKBWviOdxdCTMxEy93LQqYnmFKQ4Iv8nixeScJ1mmLNTMTK3ctCpgfYkZDYq/xdLF58sCYgnXSYk2NxMpdiwLmh5iSkNCLvFxtlrwvrmCds1hzI3Fy16IAwRQzEhJ5jderzSJYpc2NxMldiwIEU0xJSORFXq826TzBOmWxJkdi5K5FAYIpphQk8CJbl5tzomCdsVizI/Fx18JPMsaMgIRd4831puj74gvWCZM1PxIXdy78JGPM6EfUNd5ecIY+L85gnS5ZipF4uHPhJxljRj6CrrFzxQlnC9a5kqWZiIU7F36aOSbUI+QS+5ccNl6XgGBRrGeagVi4c2EnmmNCPUIusX/JYRF1cQfrRMUSDcTCHQw30RgT6hFyif1LDiNYpYkGYuEOhptojPHxiLjC0TWHRcTFHqzzFEs1EAd3MNxEY4yPR8QVjq45rFaw+EesJ6J5WLiD4SYaY3w8Iq5wdM1hBKs00Tws3MEwU40xPh4RVzi86KCQthAsGdE8LNzFMFONMb4dEVc4vOigasHiX2x4pBmHh7sYZqoxxqcj4AoNVx0TkZaZXhGsR5pxeLiL4aWbY3g69BdoueoYglWaZhom7mR46eYYnw79FVquOoRglaaZhok7GV66OcanQ3+FlqsOKRcsTbHcoVGRDMPFnQwr4RzjyyG/QttlR5wzWO7OyCiGYeNuhpVwjgnlkF+i7bL9QspCsGQUw7BxN8NKOMeEcsgv0XbZfgSrNsUwbNzNcFLOMaEc8ku0XbYfwapNMQwfdzWMlGNMCIf6Eq3X7RZRlrleEax7imH4uKthpBxjRjjU12i9bqeQskwGS1Esd2dkBLMwclfDSDrHhHCIL9F83U4EqzbBKJzc1fDRzjEhHOJLNF+3E8GqTTAKJ3c2fLRzTOiG9hIdF+5DsGoTjMLJnQ0f7RwzuqG9RseFe8SEhWDJCEZh5e6GjXaMGd3QXqPjwj1iykKwVAST8HJ3w0U8xoxuaK/RceEeNYPF3//8Y34QZu5wuIjHmJEN6TW6rtwuKCwES2V+EGbucLio55iQDeUl+q7cjmAVNz8IM3c4XNRzzMiG8hp9V24WVBaCJTI/Bzt3OUzUY8zIhvIaXRdud95gnaNYgjlI/D9H48RprCL0ygAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Image.open('./mhp_extension/demo/demo_instance_human_mask.png')" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABLAAAAOECAMAAACGszjIAAADAFBMVEUAAACAAAAAgACAgAAAAICAAIAAgICAgIBAAADAAABAgADAgABAAIDAAIBAgIDAgIAAQACAQAAAwACAwAAAQICAQIAAwICAwIBAQADAQABAwADAwABAQIDAQIBAwIDAwIAAAECAAEAAgECAgEAAAMCAAMAAgMCAgMBAAEDAAEBAgEDAgEBAAMDAAMBAgMDAgMAAQECAQEAAwECAwEAAQMCAQMAAwMCAwMBAQEDAQEBAwEDAwEBAQMDAQMBAwMDAwMAgAACgAAAggACggAAgAICgAIAggICggIBgAADgAABggADggABgAIDgAIBggIDggIAgQACgQAAgwACgwAAgQICgQIAgwICgwIBgQADgQABgwADgwABgQIDgQIBgwIDgwIAgAECgAEAggECggEAgAMCgAMAggMCggMBgAEDgAEBggEDggEBgAMDgAMBggMDggMAgQECgQEAgwECgwEAgQMCgQMAgwMCgwMBgQEDgQEBgwEDgwEBgQMDgQMBgwMDgwMAAIACAIAAAoACAoAAAIICAIIAAoICAoIBAIADAIABAoADAoABAIIDAIIBAoIDAoIAAYACAYAAA4ACA4AAAYICAYIAA4ICA4IBAYADAYABA4ADA4ABAYIDAYIBA4IDA4IAAIECAIEAAoECAoEAAIMCAIMAAoMCAoMBAIEDAIEBAoEDAoEBAIMDAIMBAoMDAoMAAYECAYEAA4ECA4EAAYMCAYMAA4MCA4MBAYEDAYEBA4EDA4EBAYMDAYMBA4MDA4MAgIACgIAAgoACgoAAgIICgIIAgoICgoIBgIADgIABgoADgoABgIIDgIIBgoIDgoIAgYACgYAAg4ACg4AAgYICgYIAg4ICg4IBgYADgYABg4ADg4ABgYIDgYIBg4IDg4IAgIECgIEAgoECgoEAgIMCgIMAgoMCgoMBgIEDgIEBgoEDgoEBgIMDgIMBgoMDgoMAgYECgYEAg4ECg4EAgYMCgYMAg4MCg4MBgYEDgYEBg4EDg4EBgYMDgYMBg4MDg4MCa7rFGAAA+R0lEQVR4nO2d225cua5FjQMEhgN07///29NVdrlu6yKJpCYljfG0sztei5wkRxzn9vEBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADwzf/dUBcCAHDE/+2jLg0A4IEDW6EuAEhEqa1wFgCIqdMVzgIAGQ26QloAoKBZVzgLALpisxXKAoBuOOgKZQFAF5x8hbEAIBo3XWEsAAjG01cYCwAC8dUVxgKAONx9hbG6QNCwIgG+4o4CIXRYmQhfcTwREDxAjK84HG/IHiDOV1yNK4QP8BHpK47GD9IHuOItKU7GH/IH+MbTT1xMCAwAqph5+mYlcS/BMIIqlm5+e1vUNXlivQbOJRhmUMza3V9YYAXOt/2fC013MldSEtpzX2sKq/d/YYkITjf9nwdKr2PGoDS0Jb7cFIigKAN1iR4cd/jPFmXbMVdMGmqTXnUOhLBM+4fdbeqqQV7qJgeldAnLUXcUw+IprNX8QWsFuirUlrrJMalbxDLUPUWwcg7L9b7fVoWuzpWlbnNEWnaxAHVb7iwcxIKd7/VUqaszY6nbHJDGZTxH3Zgz6yaxYuN7HdX76lhZ6j7Ho30bz1H35sm6SSzZ93Y7Tbo6Vpa60dGwbOM56u7cWDiKJfve7qXZVwfKUnc6FsZlPEfdoBPrZkHbdyy+wlgu2NfxDHWHLqwbxppdb/dt89WusdStjoTHOp6ibtLOwlms2XWIr/gUy4zLNp6jbtPKwlms2fVm32Zf8SmWGad1PEXdp42Fs1i07a3GHXyFsYz4reMZ6k5NrJvFom2H+QphmfBcxzPUvRpYN4tF2/5AWDnx3ccT1M02s24Wi7Z94b14H2FhLAPe+3iCut1GFo5i1b4/Nlp38hXCasd9Hc9QN9zEwlGs2vdH4CdYCKudgH08Qd1xC+smsWzjH4GfYO0ZS93wAITs4wnqnusp62v/R86Bk6ie7iyNfwiENUguSmL28QR107WU9HS8iMMm0TDdORq/8Fq0n68QViNR+3iCuu1Kzhs63cRBg2gb7wydX3itGWGpidvHY9R913Hez8YqFrpL3dsxhhEP3vmV15IdhcUXsVqI3Mdj1J1XcdrNxiYe/0A6ShLGKQ/c+ZWXgj19hbAaCF3HE9S9V3DezL6wCpSlbu8A44zHbfyHl4IRlpjYfTxB3Xw5p61sbOLpZo6QhHnGozb+w2u9rsLa3gt1y6mJ3sdj1N2Xc9bJ1iIirALU/Z3xWm8HYaXPREj0Op6h7r+Usz42F/F8M/MnYR/xoI3feKnW11cIq5bwdTxB3X8pJ21sL2LBaqZPwmPIQzZ+46VaZ2Hxc8I64tfxDHUCZZw0sbOHEwjLY8Rjdn7jpVhvYW3uhbrnvHRYxxPUCZRx3MPeGlYJK2cSHiMes/MbL7W6C2trMdQ9p6XHNp6hzqCIww52t/B0MdMH4TPiETv/5blUf18hrArCV7HgTkeYzmmTCMuEus0jnisNENbGYqh7zkrcBhaNZZzp1PY6i7Aqp96Mus8jniv11xXCKidi92rmMsx4Ghre+E8nyamb3KB+/G2o+zziuVIvSR3vhbrnrDiuXNtgRhlPQ+Mb/+0sQ3WXbzTsQSPqTg94LtRkpuLDUPecFJ9dM8xllPm0dL/xH8+SVLf5Rv0ytKLu9ICnOtuEVH0Y6p6TYtsxj8EMMp+WFLb+40mi6jbfaFmLNtSdHvBUZ8PStxyGuuecGPbLazCDzKcliK3/eJKqus03GrfjNZuS76xudZ/tnnwZJw0l5kW0D2aQATVFsfv733dRt/lK64JsBXP2Aepe9ykeuIVRwlDitYqGwQwyobYoqn5rR8oUGjdkJ5exer9TPnEDo4ShxHEXWwczyITastj//aR7qPt8pWlDWn5fS8bmf6kYeTsvCap7TonrLrZNJv+2XvEM4yhedZ+vuK/HSM3/Yp955Vpcvq1uOiG12+g1rLG29YpnHkcJq/t8xX8bBmr+F/8TOA7m+m110wk52ka3BT0ezQjr+uHrq6GMFbAMw/R+J+IIDoL5+ba663wcHY7fgh5MZox1/fAW1jifZYRswyC9PxB0Bzu53L6t7jodB2fjuqD7oxljXT92ovIOIGEKMeswRu8PhB3CViy/31Z3nY79q/Hdz93RDLKuH9tR+SeQL4WYfRij9wcCT+E9lt9vq7tOx+7ROO/n/mzGWNcPhOW7D2P0/kDoLbzkcv+muut07B6N73rujmaUdf3YjCoggXwpxCzEGL0/EHwN26ibzseuo3zXc4uh1vVjK6qIBPKlELQRQ/T+SPA5bKLuOR9Hnoqez1DrGiGsMY42aCWG6P2R+IN4R91zPk5kFTqfodZ1I6qQBPKlELQSQ/T+SPxBvKPuOR9ntur023rzr2uIsIb43WhBK4GwClD3nA+hrwYTVkgyIxwtwvpGICx1ywkR+mq0v3UfYfkuxQi9P9HdVwjrHaWvENYYn2YgrG8QVgKUvhpbWFER5IsBYX2DsBKAsIpZ9moR1jfdhaVuOCNKXyGsMa42ai8GaP0ZhKVHKqyx/iG2mGwGuFqE9UOvs7ih7jcjCKsYhNVBWOpeD+ksLHW7KcklLHUahwRlk/9qwxYjf+vPICw9CKuYoGjyXy3C+gFh6UFYxURFk/9qEdY3CEuPVlivK6tO4xCE5d59/tafQVhyUglLHcYxCMu/+/StP9NVWOpmkyL11bDCCgwhYQ79hKXu9ASEped7AgjrnG7CUjf6TtxmpG/9CXyVgIcBdPfVUMIK+5fpXm2g7vOdwB/KhtqAj36+SrgFSdhdnx5TGW1bY7J5kYG6zw0Q1jf8Nnc9e/vTZSqDbWtQNi8PVve5QZywxloBz8aPUXeal5396TOWobY1zuXPj1X3uUWcsP4ZaAMQVgK216fTWEYTVmAO92+o+9wkTlj/jLMB/Yyl7jMxm+vTay4j+arfFzDUjW4S5qt/ftdA3WIB7q1vo24zM51GsM1QwvpYWljR//TxEBvQ6VrUXaamzwh2GEtYvVJR97lDrLDy9v1IVOuDxSClxwz2QFhbqPvcA1912AF1g/kJH8EBQ/lqeWEF/05vdXdFhHU/UghaokdwBMLaQt3nPggrrPuBMlATPIMjhvIVv0QU+yul6t7KiOt/nAzExI7gEIS1gbrNQxZt+05cAONkICZ0BMcM5SuE9YGwEJae0BEcg7A2ULd5yKJt34kLYJwM1ITO4IixfIWwLiza9p2wAMaJQE3gCE4YylcI68KaXT8QFcBIGagJnMExQ/mKP5dxZdG27wQFMFQGYuJGcMZIvuqUk7rJM9bs+pGYBAYLQUvYCM4YylcI68qaXT9DBlpC8i9iJF8hrCtLNv0KIWiJyH++IRHIhTW7foMQlPinP+OQCOTKkk2/QwhKvNOfckoEcmXJpt8hBCXe6c85JfK4smTT75CCEuf055wSeVxZsecNSEGJb/qTTok8rqzY8xbEoMQ1/VmnRB5XVux5A1KQ4hn/rGMijisr9rwFMShxTH/eMRHHlRV73oIUlLilP/OcCOPCml2/QwpK3NKfeU5kcWXRtt8gBSVe6U89J6K4smjbb/ik8Ffdxpj4hF+DuuMWSOLCqn2/4RHCX4TVhEf2dag7boEkrqza9ysOGfxFWI04hF+HuuEmCOLCqn2/Yc7g71+M1YrDClah7rcNcriwbOOvWDNAWAY8lrACdbuNkMMHwvrlry2Cv38xVjsuW1iOut1GiOHCup0/859tDAn8/YuxTHisYTHqZlshhQ+EdeOqm9YA/iIsIx5rWIq612aI4cK6nT/y45u2AP7+xVhW7GtYjLrVdkjhA2F989dgrL8IywH7Hpai7rQdUriwcOt37sap7/9JWH/UnYyKeQ2LUXdqgBA+ENZ//Pnzt91YfxGWB+Y1LEbdqQVC+LCGMMPPgp6EVWmsvy/CwliN2NawAnWjFsjgwxjCDF+2+fMsrCpj/X0TFsZqw7SGNagbNUEEdmGNbqw/r8KqMRbC8sL4W3eLUfdpgww+TCHM8Ctj78Iqv51XX12EhbGaaPjyYRPqPo0QgSWDvxMI649BWG++QljN1P5kvBV1n0aIwJDB7UrVDZjYElbZ5bzr6ltYGKuF2p+MN6Ju0wwRtGfwK6zPT3ULzfzZFFbJ5ez6CmO1gLAKIYLWCH7P9PMJdTdV/Bim3lhbuvoVFsaqp+JTWwvqNu2Y/5S+ugEH2pvfENZI3vqzK6zjtdjWFcIyUP6prQl1m0a+V7W1+b9TCOuql9bm94X17azc3vpzIKyDtdjT1V1YGKuaoh8ozKi7tHJb1abm/04grF+5NDZ/KKzsn2n9ORbWzlbs6+rvwwPVvQ3H+Y8THqi7NHJf1YbebwGrm2jn0SuNzZ8LK7GzzoS1sRcHtnoWFsqq5OzHCR/UXRp5WtXK1h+2VN1GGy9WaWu+SFhJnfWnQFh1/MFYzTzEWHmJFaibtPK8qlWtP22puo8GXo1S1//LoRYYK5+znu3i76sx90LGY5B1FqpA3aSRt01tOtgRN/NdJ1X9v15qkbCSKevFLSHCGnAzZDwl2WSjc9Q9Wnlf1ZaDHW8tt2RSE8DbpRYKK5OyXsUSJazhlkPFc5TtUjpC3aORrVVtONjRlnJbJTUBvJ9qsbGyKOtdK2G+Gmw9VLyEaRLTDuoWrWxuav29DrWRuyIpD2DrVsuFlcJYW04JFdZAG6LiNU27n15Rd2hlb1Or73WcfdzXiKX/SwQVxtIra1Mo7aJ6XAOk1cp7oF6i+kHdoJndTa0910GW8VAixYuyd63DGGvXJvWK2loDnNXIRqL46pHDRa261hFW8UQhxV/I273WGmEJlXXkknI3HWwB0mpkK1N89cDJnlYca/5PsU4FUviFvINzrROWSFnHHjl10iEVwkq+LRo2Q0VYvxTsadmp3pdV3dIeBfoo+kLecQT5jXUqkeMhn1AprMwLI2E7VXz1g9uePiyruqdNiuRR8NernEZQK6zOyioRiH0FkFYze8F66GrgP+/7g9uePm2ruqt3yszhEcJHZmOV2sO+AlirkYNozboa+S8o+MZvT5+2Vd3WK4XecMngv0zrjdXFWRXiMG8A1mrlMF2jroYXlt+evuyrurEnSp3hk8FHo7GClVUpDeP8zcSGkZmTgC22+ju6sNzWdGth1c39UC4MpwwuL20zVpy06oVhHr+doCyyc5qxQVdjC8txT3c2Vt1hja4+3SL4aP4cK8ZabbqwT98D5yhGoCDmdl+NLCy/NT1YWWmHdZpwi+AarkVYjtIyuMI8eyeckhiGoqgbdTWwsPzW9GRpVQ3WGsItge+dMBvLQVo2UdhH74V5F4aiMO2lfOW6p2dbK+mwWg5uAdyWQmwsB0+YJ++Iy1KMQXHg9boaU1i+a1qytr07bHGDWwCewmpVlpMlzIN3xW8/clORea2uBhSW95oW7m3PFpvE4Nb/fSlUxvKQQ3UUji/dx3VN0lK3bHW+Gk1YRWtRF1jx5nZqsdELbv0/LIWLsCqNZVRC6yr4vnYX72VJSe26VehqLGEVr0VtYqWr26HHViu4tf+0FN2N1SoC8yq4v3gP/5XJR+PanbnqeTfTU7EUjYF9o1u4T4Mg3Np/WgofYRUbq/78vXYh4s17hOxOKswLWLCa6anaCUMiso0zKcGr+9e16Gmsqgk7r0LMu3eI2R8598bsC3i+mcmp3Yn2RDRbZxSCV/Pva+HiqxJj1U64GM+hO+G7O0l4aMy+gAWbmZiWnWhORLB3dh9cinHofWMt7LVdCZlxIb5Dd8Frc/Lw1JjLvwx5tplZad2J1kR6b56HDn4Ksba+uRYe9X2eGat1yIU4D90B+94k4rUxn3/K9mwxc2LYibZE+q6ekw2eyvDYFv8SD41lmHIxrkO3Y1qbTGz25bCCp4uZEONONCXSc/m8XBAqrA7GMs65Br+hW2ldmlTstuWwg6eLmQyHnWhJpO1NDf15eeDz+0vuD8E5L4ZXlZGTrsJr6EYadiYZB1157ODJXmbCayfqA2l+VW2LXhq48Pz+rMLaM5bXsCtwm7qF2o3JxnFXHkt4vJdp8FyK2jwMr6rq0U0CV55f774YbnWGj7scr6lbqD+NPJw35bKGh3uZA+elqIvD9q7yJt0UcOXl5e6L4VZoh3kX4zf2ZtoOJAMlPbms4SvZQgvYiro4jBS26WaAb15e7bIZMfV2mXghnnNvo/1OpJT25LKHL6TKLGgtKtJwoKRRt/v/5vXFLpsRVHCnmZfhOfcmjAcjobwnlz08WkspgWtRmIXT285b9Tv/b15fG7AZfsV2m3oJroNvwHw1vanryWUR99dSmWPYSlwpisLvdSe9+h3/N2/vdN+MMGH5Zd6I0lYXPG6nH7UtuSzi7lrqkgxYhFdOg/B93VG3frf/zfv7vDfDt+rOk8+Ny/10oqEll03cWUtZmp7z3+ckB+/X7bfrd/k/vL/NeTO8y+49+sx4HVE8bS25rOL2VqoS9Rn8OYc5BLxvp13Hw/9m41Xeq+Fdd+/RZ8bzlAJp7chlFbe3UhOpfeKlHMUQ8sLNfj3v/putFzmvhnfh/YefF89jisLSkscq7mylIlXLqGvZTyHmfVv9ep79D1HCenmua839h58X95vyxtiRxy7eKS8sRRQ29k8zhveGXa/+h433uPxZwpcGfItWjD8pEYfliL0jj2X8paa0DFmY2L/MIF4b9j36H7aE5WGslwZ8i1aMPynuZ+WJS0cOy/hLZXX6MEzsHmYUzw373vyNIGG91u9cNb76xfWoXPFqyL6Nv1TXJ0/DxN5hhvHYsPPJ33ifyXajlbzW7102wrrhd1K+eHZk38cfGiqUx2Fh5y7DeGjY++JvvE1ku9FK3uoPqBuuOB2UL94tWffxRluR8kAMbN5lHPeO3S/+9/K3ojXuxXv9/mXDDw7n5ExAS8aFvNFapjyRdrbOMpBbx+4Hf7/8rWiNe/FWPsKKw3pMzgS1ZNzIHwyVykNp5f0qI/lp2P3eH9iM1rQWb+UH/YltuGC6JG8Ce7LK6oKpVnkqzfTz1S0l73t/YitaH1/FfYbYKf0haD8kb4Jb8hVWv6TdYhmBa8fe5/7MZrY+vkJYHWg8I2969CQWVlvWrsHk59Ky97m/HP9mtghrGFquyJtOLTkKq7FkdTLp8T71rePfzNbDV1HC6pj/AFTfkDv9epILqzpuz1RGwPnUN49/O1uENQiVF+RN56bcjBVQW7d0MuN861vHv5Ot3VcIqws1B+SNoCm5sC61lYbulscwON/65u1vZ+snrJCi4Ua1ZdzQdCUX1n/FFabulMVIeB/71u3vhJtVWB1SH4oW1Xgg60ovrJMC+ySUE+dj3zz+vXStvkJYfWjSjRlhVx7C6mEshxTGw/nat45/N12ENQRNvjGibatVWI/PDC2wV0YJcb72rePfjzejrxDWK83WaUbeVgphnQXv8IYB8T73jds/iDehsELTHpNm8TSibyuHsE6C93jDgDjf+8bxH8Vr8NXvcyNqhgcs8qknRVt2YYUby+P5I+J77lu3f5SvwVcIqxcW/VSToy+ElRbfc9+6/cN8rcLy7iAo5bGxKaiKLG01+cpdWEclujx/QFzPffP2j/Nt9tWvsFxbCEh4fGwOqiFNY/W+en1gcIEujx8Sz3PfvP2ThBFWesweKiVRZ02+chfWfoE+jx8Rz3PfvP2zhBt9hbD6YVdREZk6a/JVR2P5PH1EPM998/TPErYIy7sF32inwUVHZ+TqrM1X/sLaqdDp4SPid+07t3+acZuvEFY/fIx0TLbWmnwVIKztCr0ePiJ+5759++cZNwnr4ZkBRcMTPk46Il1rTb56fFxogV4PHxG/c9++/YKMG3z1KCy3FvxCnQwvLe2Tr7Uswtqs0O3hA+J17bu3XxByva8QVk+8tLRLxuZafBXxc8KNAt0ePSRe5753+wUp1/sKYfXEz0zb5OyuwVcIqwNe5753+yUp24Tl1IJHmJPi56YtkjbX4KsQYb0X6PfoEfG59v3jL0q5yVcIqxd+ctoia3f1vkJYPfA5993jL0u5zlevf+DHu2Z4xk9OG6ibsxjr4EmB9Tk+ekRcrn3/9gtTrvJVhLBMGU6Oo57eUPf2xyCsoycFFuj56AHxuPaD4y9N2fAJlksPhgTnx9NQCY9vr7haXyGsLjhc+9Hxl8bc/gkWworGU1E5b2+7vCTCeq7O9ckj4nDtR7dfnHOlrxBWP5w1lfH4Nsur9FWYsPz/rq2Rcbj2o9svzhlhpcVdVAlvb7NAhJUQh2s/vP3inOt85SuspuSWwd1UGU9vs8QqX8UJK/DJ42G/9uPjL8+5yFcIS4C/rPJd3naRNb7qIiznB4+I/dyPb7846CJfISwBAbpKeHoVwjp9QFht3g8eEPu5H99+RdAFvkJYApbw1Z/NPpMIq/xrwfNjP/fj268KuthXrsKqSGtN1vBVsbHOPzqqNP/njoeDpI5vvy7pY18hLAkLC2vDWCUfHVOb/1MHxMFSx7fvOsCthYgoGh5ZxFeFxir54J5FL4aDpY5vH2GNzyK+KvvNDSUf27XqtXCw1PHppxeWV20Ts4qwSj7FKvrQnjUvhoenjm8fYQ3PKr4qMVbRR/YseTE8PHV8+54T3NqHkKLhiVV8dW6ssg/sWPBqeHjq+PQR1vgs46tTY5V9XMd6V8NDVMen7y8s1w6cKpubhYR1/LuxCj+sX7XL4WKqw9tHWOOzkK+OP8Uq/LB+1S6Hi6kOT99zhAhLw0K+OjRW6Ud1q3VBXFx1dPruwnJtwKeu6VldWB8IKws+sjq4fYQ1AQv5aq/X4z80hrB64SOrg9P3nCHCErGSsPaarfigPnWuiY+tDk7fc4juwvIoagWWEtZOtxUf06fMNXHS1bmwHKa4+ZyQouGZpXzVZKwxGx0QJ131E5ZrA/aaFmEtYW23W/4RnapcEiddHdx+sLAsHdhLWoW1fNVgrGE7HY3xhWVowV7SKqwmrGpjjdvpYPQTlnmMe09BWPGs5qtqY43c6lhE+ypeWO0tWCtaiNV8ZTJWvyIXZBhh7T8FYcWznrC2ey76zh1rXI9wYXl9EevgKfgqnAV9Vfe7G4bvdhRmEFZjE8aClmJFYVUZa/xuB2EUYR0+BGGFs6Cvqow1Q7tDEC8snz9OiLC0LCmsCmNN0e4QjCGsk1VAWNEs6as/Fb9YOEm/+UFYUMCqwio21iz9pifcVx7GOlsFhBXOor4qNtY8DSdnWWEZylmRZYVVaKyJGs5NB2HZ/9rR01VAWNEs66tCY03VcWaGEpZnF4ZyVmRhYVUbS1HjMowgrPNNQFjRLOyrMmNN1nJaEBYUsbKwSow1W8tZQVhQxMq+KjHWfD3npIOvdv/uvVLOFwFfhbO2sM6NNWHPKRlIWK5dtFezKGv76txYUzadEIQFRSCs4/6mbDohCAvKWNtXZ8aatet09PCVj7B822ivZlVWF9axsabtOhv9hNU+xwBhNdeyLqv76tBYM7edC4QFZSCsfWPN3XYqhhGWbxvNtSzM8r4qjUBd5dx08BXCmgKEVRqBusypQVhQBsLiU6wE9BNW6xyLPhhhhYOv/hSGoC5yahAWFIKwEJaeDr7KJ6zGShYHYf0pC0Fd49T0EJbNWAgrCQjrwrqd52AQYfm20VbI6iCsb9btPAM9fGX7558RVhYQ1jfrdp6A9MIq/EiEFQ/C+mHdzvUgLCgFYd1Yt3M5XYXVMkqElQaEdWPdzuX08BXCmgOE9cu6navJLqziD0RY4SCsO+t2LqaLsAxfxEJYiUBYd9btXEtyYZV/IL6KB2E9sm7nSvoKq3qWCCsTCOuJdTsX0sVXzZ9i1ewAwgoHYT2zbudC+gir8U/nIKxUIKwX1u1cxxDC8m2ksgi4gbBeWbdzGZ2E1fbXjiKsVCCsN9btXEUnX8ULq7yTyiLgBsJ6Z93ORWQWVt0KIKxoENY763YuopewWv795xhh1dUAdxDWBss2LqKvsOqmWbcCCCsahLXBso2LQFhQCsLaYtnGNfTylUFYzp1U1g2/IKxNlm1cAsKCUhDWJss2LqGbsBqMhbBygbC2WbZxBfmF5d1Jbd1wA2HtsGzjAhAWlIKw9li28f5081XDHyes+/74KhyEtcOyjfcnsbAqvz/CCgdh7bBs4/3pLqzygSKsbCCsPZZtvDvdfIWwxgdh7bFs493pL6ziiYb4CmEZQFi7LNt4b/IKK+YTLIRlAGHtsmzjvVlNWPV1wy8Ia59lG+9MN1/VTrR2/AgrHoS1z7KNd2YWYQUWDr8grH2Wbbwv/YRVZ6za8SOsHiCsfZZtvC9JhVW99/iqBwjrgHU77wnCgnIQ1gHrdt6Tfr6qkVD93iOsLiCsfdbtvCcKYZ2PFGElBWHts3Dr/WjzVT9heTfSVDj8grD2Wbj1fmiEdTbShrVHWF1AWPus3Hs3FhNWU91wB2EdsHTznejoq/e/SHZvv1vWHmF1AWEdsHTznegorPIdb9l7hNUHhLXP0s13IpWw3j/v8v4nc5oKhwcQ1j5rd9+HZMJq3nuE1QmEtc/a3XehzVe23+ceYix81QmEtc/i7fegn68Q1hwgrH0Wb78H/YTV6iuElQuEtc/i7fcAYUEdCGuf1fvvQDdftQurbPII6wynE0JY+6zefwcQ1jIgrHCWDyCcJl8hrBHxuiCEtc/yAYTTz1cIS4zbBSGsXZYPIJzFhNVW+RwgrHiWDyCcfr5CWFr8Lghh7bJ8AOEgrFVAWB1YPoBw+vkKYWnxOyGEtcvyAYQzj7CKWmmtfQIQVg+WDyCaEXyFsBxwPCGEtc/yAQRT7yuENSaeJ4Swdlk+gGAQ1iogrC4sH0Aw/XyVQljNxY8PwurC8gEEM5OwCpppLn58EFYXlg8gGIS1CgirCycBrBqLG/18hbC0IKwuHAWwdjI+TCWs827aqx8d30tBWLvsBUA0LiCsRUBYndhOgGycGMJXCMsOwurEVgJk4wbCWgTfQ0FYuzwnQDjOdPNVF2GdtmOof3B878R5bBPhv9PwCMJaBN874SZ3CNhpeARhLYLvoXCT20TsNDwymbBO+jGUPzjOh8JNbhG00/BAN18hLCnOl8JNPhC90/BAN2G1TbV+uAhrE+dT4SbvxO80/FLnK4Q1LM6nwk3e6LHT8AvCWgTnU+Em/xhXevZwgkBYi+B8K2vfpGmXZw8nlvmEddiSof7Bcb6VlW/StMmzhxMNwloE51tZ+CZNizx9OtF08xXC0uJ8KwufpGmRp08nmgmFddCTpf7Bcb6VdU/StMfTpxMOwloE51tZ9yRNezx9OuEgrEVwvpVlT9K0xtOnE083XyEsLb63suxJmrZ4+nQ6MKOw9psyNTA2vsey7Ematnj+eOIpt9X//me8936TRVjv+N7Kqhdp2OEV4ulAoau+Mb6r22QR1ju+t7LqRbav8BLxdKBCV3Zj9fqLOBDWBq63supFtu3vMvF0oMpXdmFdiZ8rwnrH9VZWvcja1V0snniqdOUlrA2cx4qw3nG9lVUvstFMy+QTTpWuIo3lC8J6B2E5gLDEVPpqcGGpy5KCsDxAWFoqfTWKsRDWOwjLA4SlpVJXoxgLYb3jeinLHiTC0lLtqzGUha/eQVge+AprwoCCafDVEMpCWG8gLBcQlpQmX41qLHVNWhCWCwhLSpuvBjDWv98grF8Qlg8IS0mjr9Ib699f8NUPCMsHV2FNmVAkrb7Kbax/n0BYVxCWDwhLSbOvEhvr3zcQ1h+E5QbCEtLuq6zGetfVj7HUhakR+GrOc0RYQgy+SmmsTV19G0tdmhqE5YSrsOaMKAyTr/IZa09XF2Gpa5ODsLxAWDJsvsomrH1f/WcsdXFyEJYXCEuGzVe5jHWkq3//VVcnx/VK1r5GhCVjImEd+wpheV7J6teIsFQYfZXJWAjrGITlCMYSYfVVHmGd+AphISxHEJYG8ydYaYR15iuEhbAcQVgaENY6ICxPEJaEZb6ChbA8hcUxIiwN0/wSIcI6BWF54iiseUPyxyAsdemvIKwTEJYrCEtBq7DUdW+AsE5AWK4gLAGtX3NPGTHCOgZhuYKwBDT/ImHCjPkM6wSE5YqjsCZOyZnPZmPl+0khwjoBYfmCsPrzKqyRjYWwTkBYviCs/liElcxYZ75CWAjLF4TVnTdfjWusU18hLL8T4RQvOApr5pg82RDWoD8pxFfnICxfEFZ3jMLKY6xzXyEshOUNwurNlrBGNFaBrxAWwvLGUVhT5+THprDGM1aJr+qF9fUVUKoSha+mPkSE1ZltXyGsK19fsxkLYXmDsfqyI6zBjFWkq2phfX1NZyyE5Q3C6suesKqMJc87RFhfN2JqluB1HtzhDS9ZTR+UE57CkkUe+QnWXMbyug7u8IaPqhYIyoldYdUYSzyEQl+1foI1k7K8BsMd/tJipiWD8iFcWB0m0kFYsyjLaRbc4Z26LBYOyod9YVX8zVjayZT6qk5YX++4Vq3BaQTc4Z367V8zJyc0wiocUNkHdRPWf8oafamcroNDvNO6/YvF5MWBsIqN5T+rmu8d46ttYX2NvldO52Gb7mRUb/uSKXlxJKxSYzlNrHG4HT/Bugpr6N1yuo/2aU2I/0rDPnZh+cyrabxXhwiENe56+dxH07CmxXGh4ZRDYZUYy2Vc9Tw4RCGsUTfMp/zKWTnVnhWPdVb3MA7Hwjo3lse0GniUiEZYYy6ZT/GVw3KqPSv2bVZ3MBJGYdmH1cZXvbGqYtnx1ddLGUFDCcOp8Mph+dSeF9sqq6sfjBNhnRjLNioDzxqRCWuwdfMqu3JYPsXnpXmL1YWPyJmwDo3VPCkzLx7RCWukrXMrunJYPtXnpWmD1UWPikVYTYNy4U0kQmF9jLJ8fhVXTsun/LzU76+64oE5Fda+sern5MWGSfr4akdYHyOsoF+1tePyqT8x5NGPc2HtGat2bf3YMolaWB/Z99Cx1Np5+TSQGNLoR4GwNo1Vu7SObKtEL6yPxMvoWmftwHxaSAxJ9KNEWJ/nqiq5Zid2VNLFV+ctBo3JiG+VtRPz6SExxNCNIl89GmvzbMvv2c6uS3II6yPjYjqXWDsynyYSQwbdKBTW55OvXg63+qIN7Kskj7AunL6i8Lt50FJe5fOMYYzO8gH0o1RYnz8/G3y73eaTbuLAV8fGqkvFLqyP4y0t/G4eVNfW9sTWJKZg8fZ7Uiysz89r+IfGCDbW6ZsfBPUqsKpQHJvbenzJ93Fhv17TY6sH59ROYlbuvS8BwgpSVtGbf2X1IrC6UNxbuz/69Dv4cFax6eGG9mdl3c47U+GrCmH5G6vstW9kEVYBTgPdiMv9bareErNo2/0JEpbrXTfK6sqPrz6rQunV2CuWQR4EFvmq+MbGYMWeJUQJy+28KwW1zecYwrpSO8HzyDzecvxG757GY72ONdT46nPrGM6oX+5Hat+2i5+wXPoq417Qxv+3+1FlxVqWxtLJrKzWr4pwYbVedvV7TqgyVmxrsRRWalma+qIsbxuExdpVES+surtueHwZEcKq7K0LpWValqa+KsvbRmGpZmV8fMQLq/iqW55dSo2wqh5cf72RFFdp2plqDG8DeOCjxlgHB2G+6sYHl1MRSu2jGy44iPIabTtTi+FtAHeu29RBWEenbXxkKRWptDy+4YoDKC/QuDOVtL8N4IHrNnUUlpCKVJrfYdSNnfLabCtTS/PbAB743qYKYTWfsp6KWEzvsRnHSHldxp2ppPltAA/8rBPCesbjdSbvtFJRkXFlKml9G8ADt3UqFpbHJcsoz8XphRb1tFFRjnVn6mh9G8Cd33VCWM+4vdLgnhZqirHuTB2NbwO487BPCOsJx5e2uqeBulLMO1NF29sA7jzuU6GwHA9ZQHk0nm9tPfESDJU4LE0Nja8D+OVpoRDWI66vbb3xU0yFOOxMDW2vA/jleaEQ1gPub26+83e8qnDYmRqaXgdw53mhENYDES9vv/UnvEpwWJkqmt4HcOdloxDWnZC3G679jlsFDhtTRcv7AB542agFhFVsrJi3W+79imMFDgtTR8sLAe68rRTC+iXq/ZaLd32/y8JU0fJGgDtvK4WwfgkroPXcvV/vsC6V1L8R4JH3nUJYN+IqaDj1gLd7bEsl9a8EeOR9p+YXVqmx4gqovfOQ13ssSy317wR4ZGOpENY3kRUIbXV7u8uuICzoC8LaJbQEna2+WoXl4CuEBUa2tgphXQkuQiCqx1e7bArCgs4sKawyY/UoRGCr79f6bArCgt4grD061tPVVpf3uewJwoL+IKxd1FXGgbBgVJYUVrpPsTrjsiYICwQgrF3URcbhsSUICyQ0GEt9b3aKklEXGYfHkiAskICw9lAXGYfDjiAsEIGwdlAXGYd9RRAWyEBY26iLDMO8IAgrG//+q66gJwhrE3WRYVj3A2Gl4t9v1GV0ZD1hfZbEoi4yDON6IKw8/HtHXUpHENYm6iKjMG4HwsrBv6+oC+oHwtpEXWQUxu1AWHreZIWwENaswjIuB8JSsmkqhIWwLqiLDMK4HAhLxZGs1hbWmbHUJ2enSFhzGsu6GwhLw5muFhLW+14hrCvqKkMwLwfCUnDuK4Q1s7DW/TmhdTcQloICXyEshDUh5t1AWAoQ1gMIawd1lRGYdwNhKcBXD2wsFsK6oK4yAvNqICwBfIJ1Z/NfQjgWVpe/dzyUz2WNVbccCCsHCOvGZYfXE9YnwioDYeUAX125LTHCOgloHio3BGGlgE+wLty3eGOz5hfWWsZq3GuElQKE9efpFhFWQUjj0r7Y3r5CWG3gq+dLRFhFMQ2JabcRVg5WF9bbVlcaax1hDW4s63IjrBysLayNxV5SWLN/imVfb3dfIawmlvbV5m4jrKq4RsBhwf19hbCaWFhYO9u9tVsI6yCw7LisOMJKwrrC2ttvhFWbWG4KNvx8yQN8hbCaWNVXBxteKazRjfVZbCx1pU2U+epk0SN8hbCaWFJYxyu+tVwI6zS2lJTr6mjTQ3yFsJpYT1inS46wWpNLR5Wvdnc9xlcIqwXLjzsjUrLlm9s1vbAKjKWutJpaX+1sO8LKw1LCKlxzhGXLLw8NwtrY9yBfIawW1vFV+ZojLHOEOWjy1fvGI6xErCKsmj1HWA4hpqDRWC87j7ASsYKwatd8e72mFdb2V93/98PDt9YR1tPWR/kKYTVQ/aPNcLTs+ZrC+vyV1B7qQuvxMBbCSsTkvmpc80WFdearAY3VLqzfzQ/zFcJqYGZhta/59n5NK6yvUl8tZqzr7sf5CmE1MK2wTFu+mrC+5hWW2VgIKxVTCsu85DsLNrewSnw1orFMygrUFcJqoOwHmZFwWXGENZOwDMoK9RXCqmcqYfkt+N6GIawxhdWsLISVDBdfPS5GeMWnFXiwnLC+ioW1lrEQVjIchLW1HR0qP3q9md0Vm1dYHx9zC6vNWAgrGXZh7W9IcOmRy72/Y7MK66NYWCsZK9ZXCKuagpmdPKFoVVxrDl7rKwgLYSGshDQJ67K0t//dtjrVdfpu7jkHSzansD4QFsIagKKZPX7AfW0v31KvYBhHWzarsEp9tZCxgn2FsGopHNrtuz+v7by6OhbWm7IQ1iAgrNEpHtrlO8+ytiWcLNpswvpAWAhrBCqGNs/alnC2abMJa3vCswmr1liRrrqivv/RqJjaTGtbwOmqTSWs3REjrFjUAhiN8qnNtLUlnO8awhoQhDU0xVOba2tLKFi2hYU17OyT+QphFfITVenY5lraIkq2DWGNB8IakVtcRVObb2lLKNm2aYT10w/CQlgpuaVVNrXpdraIkm2bRVi3fhBWb18hrBJuYRWObbqdLaJg2ab5GtatIYSFsEQc5HEPC2EdcL5r0/wq4W9HCwirxlcIqw/HqTz83zZfDbuzZZzvGsIakGS+Qlh7/8LHxn8uHNtsK1vK2ao9/cZRdbEWfjuq8NWw059cWJfROD8ylKqsENYhJ+l9IqwBqfFVF2F5Kus2G78nhlIbVPnYplrZYo7T+0RYI5LPV9+Yr/9pOOl/otkSUc3YZlrZYg7T+0RYQ5JVWN80nv/7dIwPDMAeDsI64Ti+eYR172l+YdX4SiCsj2rF7I2n9XkhuARTNbWJVraY0wQfhaUu1sK9I4Ql99WVMg0cjqf2YWF4ZVI3tYlWtpiSFGcQ1kM7NcIacvw1vhIK6+NYM0XjKXxWIK55VE5tmo2toDDJ0YX12MvswqrylVZY25qpGM/ps4Zx1YXaoc2ysRXUBaqutp3HLiYX1lC+ulqmaiBHxvp53oCqulI9tVlWtoK6RNXVNvPcxtTCqvOVXliVP4CcCuuFUWT10eArhHWKutxWnruYWViVvhpeWKfGeiCpqH5oGNocK1tFZajqcht56WJiYQ3oq37C2iGBrD6qfIWwilGX28hLF/MKq9ZXCOsXgap+hnb9301Dm2FlK2mLeDRe25hVWEP6yigsN2N15nFuHwirEEvK4/DWxqTCGtNXwV91z4lpdr9PmWBnKzHnPARvbcwprGpfIaz+OAzv/rDxd7YWt7xT897GjMIa1lcmY6lLL8ZvevdnDr+z1XjHnpKNNiYUVr2v8gir3Vjqwgtxnd/9scMvbTUh0Wdjq43phDW0r5qNpS67BO8BPjx69KWtJyz+RGy2MZuwGnyVSlhtxlIXfU7ACB+ePvjSNhA6gSRs9zGVsVp0lctXTcZSl3xCzBQf3zD00jYRPwU5O31MJKwmXaUTVr2x1AWfEDTIp3eMu7SNdJmDlr0+ZhFWo67S+araWOpyj4ma5fNbRl3aZnpNQsduH3MIq1VX4wtLXe0xYeN8ec+YW9tOv1GcFRLz2KMOZxDWTL6qMpa61GPiBvr2qhG31kDXYRzWEfLYr8mFNZevaoylrvSQwJm+v2y8rbXQexoHdYQ896jB8YU1m6/KjaWu85DAqW6+b7CtNdF/HLt1hDz3sMHRhdXuq7TCKpyJusojQge7886RttaGaCJbdUQ89qTBsYU1o6/KRqIu8oDY2R68eJi1tSGcyWsdEY+dWVhT+qpoJuoSd4ge71fVNvtWkwb9YH7riHjsWX8DC2tSX52PRF3gBuEzvv0fJ3UMsLZWsownaupnvSCsdKCrA85qSb+2VrIMKGru5QMeTVjtxmqZeUeadaWxWd+hn5aTfm2NZBlR1OTLBzyar2b5E4TvtH1yVfBdAug99POK0u+tjSxTipp9+YAXEVbTwPvS9MlVkdR8UQy9pK7ke2sjy6Si5l88X4SVh9pPrj7Kv6cTqpmXVZd7b21kGVfQEpTPdzxhjf939u1SZavn7x5dmnbkpVVmXlsbWYYWtAql4x3RV0P/qxMnVBmoUm/tqOddc67qSqPIMriwpxYxqLCG/Hfpy6jQz+vEgipSz/rKYOUGkGV2MY8t7mhMX1Uaq2nWKord8z4z/2LUY/5lwJKdyTK/kMdWtLSAsJpGraPZV77GUk/4mVHr9iPLFEOeWtPSmMJq+rehJmLvxxmfp6uH+87ItfuQZJKBTy1lTGGVGqttztnZ85XdWOqx7jB6/XaSjDPwqcUM6asyYzWOOTv7vjIpSz3RA2bowUaSkYY8tbqrIYV1bqyv1jFn51BYjc5ST/OYSdow0DLTgDziHlrZ1IDCOjHW9bs0jjk3Z77aUdZuGOIxFlCejbrSKOq3JCaPuIdWNzWesI6M9fM9WuecmQJfvTvrJQ/VxBopzkZdaBityxJTR8hD65sazVdXDmy1tK+elSUckAfF4agLDaN1W2LqiHhmU1PD6eqbHV3NKKxyXT04SzYYL4rjURcaRvPChNQR8czGnsaz1ZVNXU0orEpffUtLNBNHSuNR1xlH88aElBHyUFVPIt5t9TWfsFp8NeBnzG+U5qOuM47mlQkpI+ShopZS0TzmpCCsY9R1xtG+MxFluD4TYT3QPueMtPlqHWGpywykfWkiyoh4pqijZLTPOSGNvkJYE9C+NSFlRDxT01E22gedjlZfIawJaF+bkDIinqnpKBvtg85Gs6/GF1ZpROo6A2nfm5AyIp6p6Sgd7ZPOxcK+QlgIa5Exf00jrHZfIawZMKxORBkRz5Q0lA/DpBNh8BXCmgHD7kSUEfFMSUP5MEw6DRZdTeArvuaeTlieD5U2lBDDqHNg0tUMvkJYCGuVOX8NLyybrlby1dSLbFmhiDIinsmcr1hGLceoqyl8hbC+ZhaWtJ+MWEatxuorhDULli2KKCPgkYp+MmIZtRp0daE0LXWdkVi2KKKKiGcy5yuWUavBVxdK01LXGYlliyKqiHgmc75iGbUafHWhNC11nZGY1iigCrdHIqxXTKNWg6+++F0NF0xbFFCF2yMR1iumUatBV3yCdcW0RQFVuD0SYb1iGrWYt38vckFfIawLxjVyr8LtkQjrFdOoxXy1GUsduS8VYc2LeY+cq/B6IsJ6wzZqKd8NLO4rhHXBY5E8i3B64hfCesU2aS23Hha21RfCuuKzSH5VeD0RYb1im7SWexfr6op/RPWK2yZ5VeH0xJbGvF6dE+OklTy2saquENY3jqvkU4XTExHWK8ZJK3luZEVZXWiMazJcV8mjCqcnIqxXjJNW8trKgrb6QljfOK+SvQqnJyKsV4yTFrLZzlqyumDLaxZClslShc8DEdYb1knrUCeXA/K6kmOXAtLWNZMT46CVqKPLAYFdybFLAWHrmsmJcdBK1NHlgMCu5NilgLB1zeTEOGgh6uSSQGJXcixT6BOZ8wXjoIWok0sCiV3JsUyhT2TOF4yDFqJOLglEdiXHMoU+kTFfMA5aiDq5HBDZNzm2yT9rXS9JsQ5ahjq4JJDZNznWyT9rXS9JsQ5ahjq4JJDZNzn2yT9rWStZMQ9ahTq4JBDaNzn2yT1qXStZMQ9ahTq4JBDalST75B61rpWs2CetQZ1bFkjtSpKNco9a1klaHCYtQZ1bEkjtmyQb5R61rJOsOAxagzq4JBDbN0k2yj1qWSdZcRi0BnVwSSC2b5JslHvUsk6y4jBoDergkkBs3yTZKPeoZZ1kxWHQGtTBJYHYvkmyUe5RyzrJisOgJahzywLBfZNkpdyTlnWSFYdBS1DnlgSC+yHJTrknrWokLR6DVqDOLQkk90OSnXIPWtVIWjwGrUCdWxJI7occO+WftKiRvDjMWYI6tyQQ3Q85dso/aFEjeXGYswR1bkkguh9y7JR/0KJG8uIwZwnq3JJAdD/k2Cn/oEWN5MVhzgrUsWWB8H7IsVT+OYsayYvDnBWoY8sC4f2QY6n8cxY1kheHOStQx5YE0ruRY6n8YxY1khaHMUtQ55YE0ruRY6vccxb1kRfzjEWoc0sC6d3IsVXuOYv6yIt5xiLUuSWB+G7k2Cr3mEV95MU8YxHq3JJAfDdybJV7zKI+8mKesQh1bkkgvxs5tir8gZ36yIt1xCrUuSWB/G6kWCv/mCVtZMY4YBnq3HJAgL+kWCv3lDVtZMY2Xx3q3HJAgL+kWCv3lDVtZMY2Xx3q3HJAgL+kWKsOD+zRRmZM4xWizi0HJPhLirXq8MAebWTGNF4h6txykDzBrmPKsFfuKUu6SI1pukLUueUgeYQ9x5Rir+Kf16OL1FiGq0SdWwqyR9hzTi5bFSAYTVO2NjLTPlot6txSkD3CnoPyWCpzud4PFLWRmfbRalHnloLkGXYdlH2l7OV2eWB8G6lpjkSMOrcUJM+w66DsK2Uvt8sDO/SRmfZIxKiDS0D2DLsOyhyGQ7XeD1T1kRlDJlrUwSUge4ZdB2UOwyGTHs/r0UdmLJlIUQeXgOwhdh2UPQx7ud4PlDWSF1MkUtTJJSB5iH0nZQ/DXG2nB3boJC+2SJSok0tA8hT7TsohDGu1nR7YoZO82CJRok4uAclT7DsqhzCsxXZ6YI9W0mKMRIg6uQTkTrHzqBzCsBbb6YFdesmKNRId6uT05I6x96hc0rAV6/5EaTc5sUeiQp2cntQxdh+VSxqmYvs9sU8/KfGIRIM6OT2ZY+w/Kpc0TMX2e2KvjvLhk4gEdXR6EucoGJVPGhFfcZI3ZSggGz6BSFBHJydzjoJR6eMIeKa+qWQ4BSJBnZ2azDEKJqWPI+CZ+qaS4RSIBHV2ahLHqBiUPI6+z+zVVTK8AlGgzk5N4hgVg5LHERKxvKtkOOUhQZ2dmsQxKgYljyMkYnlXyXDKQ4I6OzGZY5QMSp1G54f2aisXTnlIUGcnJnOMkkGp04iJ2KWreW7FJw4N6uzEJI5RMyh1GjEJu3Q1z634xKFBnZ2WzDlqBqUOIyZgl66muRWfNESow9OSOUfNpNRhxATs0tU0t+KThgh1eFoy56iZlDqMoIDVbaXCJQwV6vCkZM5RNCl1GEEBi7vKhUcYMtThScmco2hU6iyCAla3lQqPMGSow5OSOUfRqNRZBAWsbisVHmHIUIenJHWOolGpswgKWN1WKjzC0KFOT0jmGFWTUmcRlK+6rVR4hKFDnZ6QzDGqRqWOIipfdV+ZcMhCiDo9IZlzVI1KnURUvOq+EuEQhRJ1fDpS56galTqJqHjVfSXCIQol6vh0ZM5RNip1ElHxqvtKhEMUStTx6cico2xW6iCi4rX3Nc2lOEQhRZ2fjMwxykYlDyIoXntfsxyKQxJa1AGqSB2jbFTyIKLilTeWBXsQYtQBqsgco25U8hyiWjc3Nsuh2IMQow5QReYcdaOSxxDVurmxWQ7FHoQYdYAqMueom5U+hq6P7dpZCuw5yFFHKCJzjLJRJYih62P7tpYBhxxc+H+K+Be4j8BiOAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Image.open('./mhp_extension/demo/demo_global_human_parsing.png')" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABLAAAAOECAMAAACGszjIAAADAFBMVEUAAACAAAAAgACAgAAAAICAAIAAgICAgIBAAADAAABAgADAgABAAIDAAIBAgIDAgIAAQACAQAAAwACAwAAAQICAQIAAwICAwIBAQADAQABAwADAwABAQIDAQIBAwIDAwIAAAECAAEAAgECAgEAAAMCAAMAAgMCAgMBAAEDAAEBAgEDAgEBAAMDAAMBAgMDAgMAAQECAQEAAwECAwEAAQMCAQMAAwMCAwMBAQEDAQEBAwEDAwEBAQMDAQMBAwMDAwMAgAACgAAAggACggAAgAICgAIAggICggIBgAADgAABggADggABgAIDgAIBggIDggIAgQACgQAAgwACgwAAgQICgQIAgwICgwIBgQADgQABgwADgwABgQIDgQIBgwIDgwIAgAECgAEAggECggEAgAMCgAMAggMCggMBgAEDgAEBggEDggEBgAMDgAMBggMDggMAgQECgQEAgwECgwEAgQMCgQMAgwMCgwMBgQEDgQEBgwEDgwEBgQMDgQMBgwMDgwMAAIACAIAAAoACAoAAAIICAIIAAoICAoIBAIADAIABAoADAoABAIIDAIIBAoIDAoIAAYACAYAAA4ACA4AAAYICAYIAA4ICA4IBAYADAYABA4ADA4ABAYIDAYIBA4IDA4IAAIECAIEAAoECAoEAAIMCAIMAAoMCAoMBAIEDAIEBAoEDAoEBAIMDAIMBAoMDAoMAAYECAYEAA4ECA4EAAYMCAYMAA4MCA4MBAYEDAYEBA4EDA4EBAYMDAYMBA4MDA4MAgIACgIAAgoACgoAAgIICgIIAgoICgoIBgIADgIABgoADgoABgIIDgIIBgoIDgoIAgYACgYAAg4ACg4AAgYICgYIAg4ICg4IBgYADgYABg4ADg4ABgYIDgYIBg4IDg4IAgIECgIEAgoECgoEAgIMCgIMAgoMCgoMBgIEDgIEBgoEDgoEBgIMDgIMBgoMDgoMAgYECgYEAg4ECg4EAgYMCgYMAg4MCg4MBgYEDgYEBg4EDg4EBgYMDgYMBg4MDg4MCa7rFGAAA/tUlEQVR4nO3d7XrbtpaGYe003Z00badOstux286c/1lOJFm2RBIkPtZaL0A+z68mtUAAJO5LctP4dCIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIiIrr2X7fUEyEiWuu/0qmnRkR014pW0EVEHZWrFWYRkbgyrjCLiGRVcAVaRKSomivMIqLQ2rSCLCIKy4AryCKikIy8Qiwi8s6MK8QiIucsvUIsInLMlivEIiK/zL1CrJD+9T31HIiic/AKsBz7Vyr1xIgC8vAKsTxKSoVadJh8vEIs67K0Ai3aeV5eIZZpJVxBFu02P68Qy65SriCLdhpg9V8NV5BFe8zTK8QyqZYryDpov99ST8QhX68Aq70Wrg5I1v88pJ5NeL8vpJ6TZc5eIVZrrVwdiKz/WU49rcCWtNqZWdvi/HoOsDRZcHUEshJWHQqttFY7IiuPq9cQKzorr3Yu1hZXR0Brg6u9kJWvVTVb6iWOmx1X+yYrz6s9o5Wh1T7IKueqAi/1IgfNlqv9klXA1R7NyrVqH2K1cZXJlnqRY2bv1T7FKvVqT2aVYbUDsky42iZLvcwR8/Bqj2TVeLUTs2q4GlwsI662xFIvc8CcvNqdWLVejU9WJVdDi2Xn1TpZ6nWOl5tXOyOrwavByar3amCxDLlaJ0u90NHy9GpHYrVxNTRZLV4NS5axVytkqVc6Vr5c7Uesdq9GJauRq1HFMvcKsUxyB2sfYpl4NSRZ7V6NKZa9V0mx1EsdKX+vdkGWlVcDimUB1u/qRZTn4RVvsZoL8Wp8sey8Gk4sE68GFMvDK95iNRcE1uBiWXo1mFhGXo1HlotXiNVYlFeDi2UL1khi2Xk1mFhOXgFWU3FeDS2WsVcDiWXp1VhiAVaPRYI1rljmXg0jlq1XQ4nlBRZiNRTq1bBiOXh1VLB+Vy8oPy+vAKu+YK8GFcvDq0HEMvdqHLHc3mABVn3hYA0p1nHBcvBqGLHcvEqJpV7wAMV7NaJYef78888/+xPLBazf1avKKxwsxNpMAdZwYmVq9c8OxfLxahCx/LwCrMokXg0nVjZXxWKpV7aVl1djiAVYvSXyajCwSrx6EyvTLvXa1vPzagixHMHim1g1qbwaTKwir/55+K3BxfIE63f14rZz9AqwKtJ5NZRY2+ikwcogS728lVy9+l29uu0Aq6+UYA0kVplX/0x/a1yxfL3qXyzPT4QJsdRL7jqpVzsC6x/AAiwjsBArndarccQq9GoG1rBieXvVvViuXgFWaYCVV6FXc7A2xVKvMBFguYLFZ8Ky1F6NIlapV7sBy9+r3sVyBmtRLPWa+03N1T7AmntVAVafYkWA9bt6kat5g7UklnrN3abW6px6D7Iq9WoBrCHfYoV49bt6lat5ewVYBblr9OOPP+5BrOI3WIC1G7HcwVoQS73mXnOFatpuwVryaidgBXn1u3qdawFWPwVRNb5YxV5deVpCDLAAC7AqC3FqWyz1NmxX7lUFWP2JFeVV12L5gzUXS73mToujavC3WBVeAdYuwXLxCrAyi4Jq+LdYRmAN95kQsE4hYM3EUq+5z2KxWhNLvRNbVXj1z9K/PBZY/7k1tliA1UvRWo37FqvCq7NOy2+7xgHLAqtctNRrTRcB1q94tV08V6O+xarh6jtOy2+7DgDWTKsMstRrTRfhFWBlJOBqX2Cte7UI1rpY6nVOs+Rqkyz1YpPFg/Xrr+o1d5mAq6RY6r1Yr8qrFGM7BitNVYZY6sUmCwfr/Gv1ojtMoVVSLPVmrAdYFTiViqVebLIIr36dXkS96A7TcDXiWyxLr4YSy1irdbHUi00WAtav02uoV91fEq0Aa1Us9Uofs+dqTSz1apPFgnX7tXrV3SXiKiWWejvWsvVqTSz1Sh/z8Cotlnq1yUK8+nV6CfWqu0vF1U7AavBqRSz1Sh9z8QqwAKsqmVfLYqm3Yy3AMvQKsNbEev+letXdJeNqF2A1eZUWS73Sx1y8Aqys1IvuL51XOwCr0atDg5USS73adIDVQzKulsVSb8da9mClxFKv9DEfrwALsGoCrOzsvQIswAKssoReAVZKLPVKH/PxCrAAqyahV4OB5eEVYAEWXhUl9GpJLPV2rAVYpmAlxFKvdiXeYHWQ0ivAGvSbWIAFWKqUXo0NloVXg77FAizAUgVY2QEWYPEtLHVKrwDr0GAti6Ve7FqApU8K1lws9Xas5eFVQiz1Uh8DrNei32Kp19tjgJUdYAWApV7rasFgqZfbZX2Bpd6N1XzAWhJLvdJJc1b8xFKvdTXA0gdY2fl4BViABVj5AVZ2TmAtiKVe6TTAugZY+rRgTcVS78ZqgGUN1oJY6qWuB1jyugJLvRnrAZY5WHOx1EtdLxQs9WI7TerVsGAZerUglnql0+LAUq90I8DSd5UJsLYLA0u90HluYE3FUi90I7zqoH+l+AjwaiiwTkFg/aNe5zw/sP4zFFgnvoOlLwlIOFjqndjKC6xHsf7pEKypWAcGK8orwEqVEiTCq1HBsvXqn8nA6nUu5AfWf4YCK4wrvEqWICTEq0HBMvbqJtbtV+p1LuUH1n8G8gqwOmiZkBivhgPLnKp3sd5/oV7nYn5g/WcgsMLEUq+z4xYNCfLqx5G8Ovl5NUm90MXcvHonS73EjPBKXpRN44N1OjRYJ0ewXtFSrzAjvJIHWNkFedUpWCdfsL6nXmBGcKUPsHI7OFgnvAoQS73A/usELPU2ZHR0sE6uXgEWXmUFWLkdHqwTYOGVvi7AUm9CTof36iyWm1djgOUslnp1QwRYmQHW6XR0r3zBUi9ujHoAS70HWQEWYAGWPsDKDLAAC7A6CK/yAqyTo1jqheWGV/JkYP04lFeAde7oXgFWB4nBUi8/N7w6d3SwHMVSr2yYZGD9OJJXQWKpF7nV0b1yFEu9sHFSgqVee0GAde7wYJ2czFIvaqBkYP04kleAdQmvTj5iqdc0UjKw1AsvCq/OAdYlwFIGWDkB1iW8uoRXygArI8C6BFiXAEsZYOWEV5fw6hJgKQOsjADrEl5dwitleJURXl3Cq2uApQywMgKsS4B1Ca+kAdZ2eHUJr64BljLAygiwLgHWNbxSBlgZwdU5wLoGWMoAKyO8ugRY1wBLGWBlhFeX8OqajVe/qJcxZoCVE16dA6zXTLwCrKoAKyfAugRY10y8Aqy6ACsnvDqHV69ZeIVYlQFWTnh1DrBeAyxhgJUTYJ0A661fDLxCrMoAKye8OgdY175r0+4VYtUGWBnh1Qmwbl24afYKsCrDq4wA6xxenXv1ptUrxKoNsDLCqxNgXfulQaxfAMsgwNoOsM4B1ukdrBqyHsD6pF7JqAFWRnh1Aqzvffr0S71YvwCWRYCVE2CdWsX69k09//YewCoU65cJWIhVGWBlhFenRrC+7QCsT49gFYn1ywwsxKoLsHLCq3awRhfr0xSsErEAy6qPeJURYJ2axPq2S7DyxZp6dQYLsar6+DGGLPU6G8OrFrC+7QCsTw1gzbwCrOo+BomlXmdjgNUA1rdvOxBrCaw8seZcXcFCrJo+xoilXmZzeFUv1htYX76ol1Ddp0WwcsRKeoVYNQFWZoBVC9bNq29fHlKvpqhXYcrFWuLqDSzEKu9jjFjqZbb3d4NVf59TL8CgNq8mYI3k1qckWOtiLXMFWA19jBFLvczGnp+fv5PT5NXwYF14afFqGayrWX279WkFrBWyUly9g4VYxX0MEUu9ytauYFWS9fcOwHrDpcGrJFi9v9P6tA5WQqw0V7/cDahe23B9DBFLvcrGnm9g1Yj19/Bg3bvS4NU6WB2btQXWglkrWj2CBVmFfQwRS73Kxt7BKjfr7XXPz+pl1DVRpd6rTbA6NetTBlhlfUKs6j5GiKVeZGsPYJWJ9fcdWCOKNRXluzvVYOWI1Z9Zj7rYewVZRX2MEEu9yMaeH8EqIOv+Rc8DijXn5AJPpVd5YHVG1qcAsDArv48BYqnX2NpzrVh/jw3WEiav9FR5lQtWT2RNYfECC7My+xgglnqNjT3PwcoT6+8ZWCOJtUzJtwKxZmDli9ULWXNW3LzCrKw++oulXmJrS2DliPX3wGAlIfmWLdacqxKwuhBryRRXsDBrs4/uYqlX2NrzIlibZE2/fCSx0ozc4VPh1benArH0ZC2C4uwVaG00BcucLPUCm0uBtSrWnKtXsPoXaxWRb7liLXo1klhJTQLAwqx0c7BsxVKvr7nnJFhps5a+9AZW32JtEPL0LVOshFdlYAnJWrMkwivQSrUAlqVY6uW1twrWIlnLXzjCW6xNQB7BSoqV4qoYLBFZ646EgQVaCy2BZSiWennNPW+ANTEr+UUDiJXBx9MUoDKuxhBrE5FIsEBr0iJYZmKpV9fc83OGWNl1DFYWHk8zsGZkrWpVA1YwWTmABHsFWvctg2VD1kf14pp79gCrQ7Hy5Hh6WhKrrFPPYuXqIQALta6lvDIg6/sQ6tW19vxsK1anYGW68WQDVoVYIWYVwCHyCrVWwWoj6zKAenWNPVuD1aVYuWY82YBVKZYzWYVoCL06OFqrYFWTdXu5enVtTb2yA6sfs/LBeLIC61TzqdAVrXIwxF4d2KwNsKrIen+xenUtzbgy/EzYi1kFVjwZgXUWq/I9lo9adVzIvbpkvBUjtAlWOVl3L1WvrqElr4zBEpNVxoQlWPVvsYzRarCiA64uGe3EMGWAVUbWwwvVq6tukSsDsWYDqhZYKsSTKVjtYhmg1QZFJ159OhpZWWAVkLUHr1JaGYg1H1CywmIcnozBUotl4EQvXp0zeSjGKBOsTLImr1EvrqY1rTzAiierxoYuwaoly0iJfrw6Z/d89F02WBlkzV6hXlxxG1q1i7U8ZuQSq2B4MgdLJpahER1xdc70Mem2ArC2zJp/uXpxhWVw5QNWnFmVLjiAVTuVJrFshejLq4OIVQbWCllLX6xeXElZWjWLtTJuwBprVXjyACteLHMhOgPrGGSVirUAV/KL1GvLL5srP7Cc0frSAETPYGWL5QFEZ159OgJZlWDlpF5afgVeNYmVMbrPAptIeDIFSyOWDw/9gbVXsT58uP0TYJVx1SRW7hVs19cIwpMXWHFiufHQnVc7FevDhzexjg5WqVZNYhVcw2p97R58V+o0NliOPPQH1g7F+nDt9RdHBqtGqxaxyq7Svj4LDs5enU4nD7BixHL2oTuw9iXWh7uuvz6uV7VcVYtVfJmm5Rlp8ArWNQOvHMBaFSuCiK682pFYHx67/uZBwWrQqlqsmgvVrs/KAlewAsQKdKIbsPYh1odZr//igGA1alUrVt2VKtZn5cCX67fc3we2ACv0LVY0FX2AtQOx5lwdFSwDrCrFqr5U6RKtGDg3BlgpsQRWAJZBS1y5/8kG5YJTWWl1LsyrQrHMELj0CJaFVy6fCZfFkmDxSe/V2GIta3UHlpNYwiUnMrPqWphXzyVkmRFw6WlksERYfAKshpJcvXvlA9b9BXrIBqmH4rx6zibLTIBr3mB5iiWy4tODWKIZ1J8TaWmtHsByEasrrwx0WirQq+c8sszO/7WnccCaiSWi4prYqzHFWuXK/S2Wbt3TLLBIFMjVc45Ydsf/2sQrB7DcxJJRcU3s1XhgbWnl/hZreTLBu3By5eo5Uyy7y22s1e7wX5t6NRBYOipeU2p1zuLsxJXBVSBYycu6Z0dFskCuzq2t1u7sX5ty5QKWk1haLTrI5PwElcXVoxyeYG1c2i9bKlKFerUmlt3Jf23mVedg3Yml5kKf1SHyL5OriRp+XqUm5L4R1lSkCvXqOUmW4cG/Nveqd7DexFJr0UOWR8mxbK7kYDmT5SBFomCvEmJZnvtrc69sxHKc+G1MNRY9ZHmYvCrQyluszGm5bYWLFImCvVoEy/LYv+YF1mRc0zlfh1Rb0UfmZ8q6Qq6mWkjAcjLLB4pksV4tiWV66l9bAMvk/yWcfNS0nfRlSDUVfeRxsAwr5mpGhQgsB7K8oEgV7NVcLNtD/9oSWBZiTf7jo+2kzyOqpegk82NlWQVXcyi8wMqYnelm+EGRKtirqVi2Z/6WE1jTP9xlPGu8esv0UJlWxdWCEjqwLMnyhCJVsFePYhkf+VtzsEz+iuTp/+5jPW3AumV3pGyr5GoRCR1YZmT5QpEq2Kt7sKxP/K0ZWCZ/qfuTM1g//6x2opuMDpRt9Vr5ilU3SYMNcYYiXahX92IZn/j3PMB6+/E7TtP/GbDeMzhOxrVx5SlW7TRbd8QdinSRXD2/i2V74O+bgGXyY3MAK7DWw2Rcs1bLPmjBajUrgopUoV7dwLI97489bq3FD/p6cgYLr+5rOknWWXCV0EEMVgtZIVSsFOfVTSzT8z7tfmstfjLh/c+Qvo1rOmHAeqj+IFlnxFXKBluwzKa1WRAVfXRZselxn3W/twY//PkJsGKrPEbWmWm1QoMYrDqy1IQEd17yi+l5n3b/A3Oe2sVyB+tnwHqs5hRZZ8pVGgZDsCqnXLwzakCiezlned5nvbPyYI2FV15gqYnoq+IzZJ6tVmsuyMEqJksNSHTuYD0lwKoUC7DCKzxB1lk6lcGCmVgOc1tI7Ud8cWA9PbWL9eQOFp8IZ5UcIOvsiMpWQQ7WeW65aqn5iM/5LdadK35gWU4YsGYVK2OWGU/LKiSSg/V9cpnvs9R6CNKBVSNWCFhqIHqrhhqLjGRKmZBOD9bGBG+p8VAkBKtcrOkAr/fNcsKANa+Km+YMUKoFwQKsCLHUeEgSglUsFmBJqvKmMQORGkCoBet+TNcJXlLToUkJVqFYAV4B1kLV6lTXftgbPegCrC2x1HRocgXrHpYlsMrE4g2WqGp4KjM4640e9AHWhlhqOkRJwSoRa/ba27iGEwaspVrwKc/ipDdz0A6Wu1hqOFQ5gjWBpQ2s+Wtv4xrOGLCWauGnOItz3s4BYHWbFqx8sRJgPRmDpcahy9oIKsrimFtwUOWVOVhrU1TDocrvm1jTd0JNYi288m1YuykD1mJtBpVkcsotPCj3ajqg8wTVbugKAqtRrBCw+ES4XLNDuZkcchsQqrwyBys9QTUbutRgZYm19Lq3Uc1mDFiJ2inKyuaM24BQ5VWgWGo2dHmB9QiLE1jXf7KaMl4lMuFoK5sDbuRBIVhL47nOUK2GsCiwWsRafNn7mFZTBqxENiKtZ3S+bTw4lYm1OJzrDNVqKJODtS1WyivACsrGpLWsjreNB6cisJaHc52gGg1lTm+xCsDaEiv5Bus2ps2M8SqZFUvprI63jQfnegFrcYZqNJT5gDWl5VQv1vJL7oe0mTJgJbNiKZnZ6TYC4VyNVx6fCRcmqDZDW+dgJV5yP6TNlAErmZ1My5mdbSMQrlV4BVgBubzFKgNrTawMsEzEwquV7GxayuxoW4FwrcIrF7DmE1SToa0HsNJirXkFWFHZ4bSU3dE2AuG1cq8AK6IewEqJlfryyZAGMwaslexwWsjuZJuJ8FonYM3mpxZDnANYc1u2wFoWK/nVkyHbZ4xXaxnyNMvwYNeWmlqxV05gTSeoFkOcw2fCCrAWxcr0ykAswFrNUqjHLM91danJlXoFWCH5gtUgVvJLZwM2zxiwVrMk6iHLY93U8vQ6Aetxdmov5JmDtaTLNlgzsTa8sgSL//F5PWOmnI51U4vTK/TKDayH2am9kAdYgLWROVQup7qtxQkCVoc5g7X5FzYkxNryyhAsvNrIXCqPQ93a4hSLvPID635ktRfyzL/rXgvWU5ZXgCXIHiv7M93a8iRLvAoBS81FBxmDlRCmDKyVr7rdOTOwfgasrRy46g6sZbJKvHIE631otRYd1A1Y72StfQ1gxXcIrz4sktUJWG9jq7XooI7AeiUrxytTsNQgdN8xvMoWa/vVXlNTY9FDtmCljMkE60zW6r82B4s3WBkdGKwFsXJe7TM3tRVd1BlYGy14BVjuHcSrTLFyXuwyOzUVfWT7nwkBa5cdxKu8P9yQ81qX2amp6CNTsJLG9AyWWoMBOgpYOW+xsl7qMjk1FX3kDtbdz7jpDyzeYGV1FK9yxMp6pcvc1FT0UQxYVmIteAVYAR3Fq22x8l7oMjU1FX1kCdYKMoA1dIfxalOsvNe5zExNRR+NCNbj77XMGK/yOhBY638aK/NlLhNTU9FHhwaLN1iZHcir9bdYmS9zmZiaij4yBKsAGcAaqwN5tSpW7qtc5qWmopPGA2vye21gqSkYpKODdQKsXrJ7iwVY++1AXqX+lvc1raavcpmXWopOMgNrFRkjsQBL1JHASolV8CKXaaml6KQosEzEMgdLDcEoHQqshFgFr3GZlVqKTgoDy0CsxXFavAKszA7lVZVYAQtVU9FHw4E1/U3AiuhYYC2Llf8Kn0mpqegjd7AMPxMuD9MAlpqBcTqWVxViAVZQ44NVLRZg5Xc0sIrFAqyg4sBqFis1CmD5dzSvisUCrKiMwNpyxhGsWrEAq6CjedUkls+E1FJ00jBgpUcBLP+OB9YyWVlf7DQfNRV95A6W1TexVkapBEuNwEgd0KuyP90QsFo1FX20B7CqxOINVklHBKtIrIjVqq3oolHAWh0EsNw7oFdFYgFWUP5g2fzvhICl7ZBgFYgFWFGNAdZpfRDA8u6QXn0o+I+FEetVW9FFNm+xAGvnHRWsbLEAKygTsLK0MfAKsGQd1KtssQArqMOCpRZgsA4LVqZYIQtWY9FDAWC1/7WjW2BViAVYZR3Wq0yxACuoocBKfgFgeXdgsIrFcpuIGoseGgGstycBsGQd2Ks8sQArJsCirI4MVo5YgBUTYFFWR/YqRyzAiskCrDxx2sFKfwVeuXdssLbFAqyYBgJr5SsAy71je7UtFmDFBFiUFWCtry9k0WotOgiwKK9je7UlVsyq1Vp0kAFYmeK0grX6JYDl3tHBWhcLsIIy+DEUJuIEg6U+/QN2dK9WxQpatlqLDgIsyguw0mKFLVvNhb5hwFr/EsDy7/BepcQKXLeaiw5qBitXnJ7A4ltYFQFWJlh8JvQMsCgvwNK/xVJr0UFxYNWKlfViwHIPrz5kiuV4fTUX+gCLMgMswNLXClY+Of2AhVdVAdaHPLEcL6/mQl8EWG1iAVYnAdY5wNI2CFhbXwRY/gHWNenK1V7IawSrwJxasczB4ifmVAZY1wBLWOOfHPUHK/OVgOUfYL0GWLoAi3IDrFuAJSsUrBqxzMH6GbAqA6xbgCWrDawidABr8ADrLcBS1TtY2S8s8Aqw6gKs92QrV4OhLgSshm9iAVZHAdZ7gKWpc7DyX4hX/gHWfYClKBasYrEAq6cA6yHAEtQElgc8iy/LeV22V4BVG2A9BliCYsCq/L9zAKurAGuSYuVqMNQNAVbO1wKWe4A1DbDCCwKr7q8dBayuAqxZgBVdA1gV9jiClSPWz4DVEmDNi1+5WgxxPYP19ggAVhcB1jzACi4KrJqf/+wDlvrYjxtgLRS/cDUZ2mLBKhOryCvAcg+wFhIsXG2GtCOBxSfCtgBrKcAKrR6sMq8awMr8asDyDrAWi1+4Gg1lgEW5AdZigBVZGFgVYgFWXwHWcvELV6shrH+wcr8asLwDrESAFRdgUW6AlSp84Wo2dFWDVepVxf9OWPb1mWCpT/3IAVYiwAqrY7AKvx6w3AOsRIAVVjhY+QIBVm8BVqrwhavdkFULVrFXgDV+gJUKsKKKByubIGuvAKs1wEoGWEH1C5bPGyzAagiwkgFWUEcDS33mhw6w0gFWTJVglXtVClaZboAVEWCli164Gg5VewErY86A1RxgpQOskOLAKhOrTDfAigmw0gFWSJ2CVfh2LBss9YkfPMBaKXblajhUARblB1grAVZEdWDVeFWC0KngawErMMBKB1gRxb3BKlEIsDoNsNLFLl0Nh6jAT4RVYGUPDlghAVY6wApIA9aWQ+VvsAArJsBKF7x2NR2aDgaW+rwPH2CtFLt4NR2aAr/n/vTwPE9/I/GFgNVXgLUSYPmn+I+EyZa+ErD6CrDSAZZ/XYE1f99l+CNzAMskwEoXu3o1HZo6A2uZMMDqJ8BKB1juvcSBVeeV3c+pP4OlPu07CLDSxS5fbYekOrBqvAKsfQRY6YKXr8ZDURxYtV4BVl8BVjrAcg+wqCzAShe8fjUeiqrAqvGqHqw8sQBrq9PJaBjASgVY7gHWYQIs94I3QI2HoJcjgXXsP9VwAiz3AMu7KrCqvAIscVZgZYvlfl77K3oD1HzEdzCwbE7smAGWf4DlXQ1YdV4BlrYTYPkHWN4B1lECrIAAy7sKsCq9AixtJzOxACtZ+Aao/QhvP2Bl/eBnkwM7ZoAVEWA5Vw5WvFeAZdAJsCICLN+GeIMFWAYZgpUrVsB57S7A8g2wjhJghRS+AWpBgov7RNgFWDbndcgAKyTA8m1PYG2LBViWYwHWQoDlG2AdJcAKaWMDHLZFTUhsxWBVewVY2gArpLUN8NkZNSGx7QqsTbEODNbJEqxMsSyP5TClNsBva9SGhAZYBwmwglreAc+9URsSWilYEq8Aqz3ACmppB5z3Ro1IZIB1kG5baTsaYM163IGQzVEjElkhWPVehYC1JRZgAZZz2c+04TXViEQGWAfptpW2o4WdyVHKf6Ytr6pWJDDAOkhve2k7WtiZHKOCZ9rysmpFAtsZWOti4dUJsBwreqYtL6xWJLAysBq8Aixpd5tpPFzQmey4umfadApqRuIKA6vurr5WcB3AWux+N42HCzqT3Vb5TJvOQc1IWC+AdYzud9N4uKAz2WnVz7TpLNSOhAVYB+l+N42HCzqT/dX0SANWVYB1kB6203i4mDPZUU3PssvmqB0Ja39grYkFWNeMh4s5k93U9CR7bY4akqgA6yA9bKfxcEFnspOaHmS33VFDElUZWA1e9QAWf6P7LePhQo5kLzU9yG67o4Ykqh2ClRbrwF4BllVNz7Hf7qghiQqwDtLjfhoPF3IkO6npOfbbHTUkUQHWQXrcT+PhQo5kHzU9xo67o4YkqiKwWrwCLG2P+2k7WsyR7KKmp9hzd9SQRLVHsFJiHfl77lNibEcLOZJd1PQUe26PGpKo8sH68882sCL+SnfASjbZUdvRIk5kFzU8w87bo4Ykqiyw/nytDawGsQCrucmO2o4WcSK7qP4R9t4eNSRRZYD153uNYtWaVXoVwFrocUtNBws5kV1U9/xGbI8akqi2wfrTEqwqtcqvAFjzHvfUdLCQE9lFpY9u4PaoJYnpZQusP/+0B2uhtdtaMVzCK8B6y3SwmBPZQ5UyReyPmpKYNsD6c5q5VD4B1jzAMgiwxK2DNfNqcLBMDv6oAZZFgKVtFay5V6OIBVjzAMsiwNK2AtYSV6OIxSfCeZZeARZgaUqDlfBqDLJ4gzUPsCyyBYuf/lxahVdDkAVYswDLJMCSlgJr1ashxTr6J0LAsgmwpCXA2vBqALH+uAZYbwGWTYClbBmsTa+6F+uPt/DqNcCyyRQswx1SUxLTIlgZXvUt1h8PAdYlwLIJsJQtgZXlVcdi/TELsD4BllmAJWwBrEyvehVrztVFrMN7Zfqjn+OPY0cBlrA5WNledSnWIlff4w0WYFllCpbdFqkpCWn+50YLvOpPrBRXZ7AszvzQAZZVgCXrZSpWkVe9gZX26o/fAAuwrAIsWVOwyrzqS6wVrv747TeLIz90D6fEdDDAAqyodgTWmleAZQtW/GnsK8BSNQGr1KuexAKs9QDLsB7FUlsS0iNY5V71A9aqV4AFWKYBlqaX3YC17hVgAZZpPYJ1BLGOAtZvgAVYpgGWpEewxuUKsLYzBEtwGHsLsCS9PIg1LleAtR1gWWYIFt/Eyq8BLLVQ0wBrI8AyDbAU1YKl1mkhwNoIsEwDLEEvlWDV/Cxm9wBrPcAyDbAE1YLVo1i8w9oIsEwzBMtql9Sc+PdSLVZ/Hwo3vAIswLINsOKbgjWyWIC1EWDZBljxtYDVmVhbnwgBC7BsA6zwXmZgDSvWqleAdc4OrPij2GOGYBltk9oT917axFIjdde6V3zP/Rxg2QZY4TWC1Y9YG14B1jnAMg6wolsCa0SxtrwCrHOAZZwhWDb7pPbEvZd9iLXpVRVYnz83Her+UngFWIBl1wtgpfv8eW9iAZZ1vYmlBsW7l12Itc1VDVifP+9OLMCyDrBiS4FVJNbrfu8MrM+3mk52X90fD6txQs5ht1lhZbVRalC8swTrpDIrx6vyvyD58w7FujsdVuPEnMNus6HKbqPUoHg3BatKrLWb0ItXxWB9vq/pdHfU3Y2xGifmHPZbjUyOG6UGxTt3sB4aFqy9kHV3L4yGCTqH/Va2F+4bpQbFuzRYBX8zVnECr0rB+jyv5Yh30vstMBom6hz2W/nT77pPalC8m4FV8Rar9gZtUpT3ojCwvpPV/OPdxb3vo9EwYQex22qffqdtUoPi3QpY2WK136scqVJfnenVH22fCF87Nf9wLG3vm2g0TE42J7Hbip92111Sg+LdGli5YhndsYL8wVr26vP18k2HXdr7FhoNk5PRUew1m0faajZqULybg1X6FsvmfhV2terrOQFY45J1MllA4c2yOoudZvJAm81GDYp3q2DliGVyu8r7+tZJAdaoZNlMv/BemR3GPrN4nO1mowbFuwWwisSyuFsVfb1LA9aYZNlMvvBm2Z3GLmt/mi1nowbFu0aw2m9WXV/LxSo6kgmvPk+m0XTsBRlNvPBmWZ7HHmt7lI0nowbFuyWw8sVqu1UNfS0Xq+hIZoI1GFlW0y68WcZHsruqn2KPyahB8W4LrFWxqu9Uc1/LxSo6ktlgjUSW2aQLb5bHueypqifYazJqULxbBCvzLVbVjTLp6zQhWKdRzLKbceHd8jqbvVT+/DpORg2Kd5tgpcUqv09WzbzKEKvoRKa8SoB1GsEsu9mW3i7H49lHPe2HGhTvlsHKEav0sbVrwSs5WKfezTKcaun9cj2gPdTTbqhB8S4B1qZYpQ+tYUtebYtVdCSrwDp1bJbpPEtvmPMR1dfTTqhB8S4HrC/bVF3/CGdIy15tgVV0IpNebYHVK1m2syy9YwHHVFtP26AGxbmUV0mxFmV6U6P0Ua4o4dWWWEUnsgGsU49mGU+x9JbFnFRhPe2BWhTnMsH68uDVRKYHNkof5sKSXHUE1rnMQ19LRlE10yscb72ww6qqpw1Qi+JcGqwlsR7AuN6LmRvFj3NJK16ti1V2JNvBOq2jkPllFhXPrW7EtSLPq6Selq8WxbkVsCZifbls/qoYzmJtXvkOqClgRScy7VUJWJdyDnyTHhvrKJhWdsU3LvbEKupo7WpRnHMAy4msrCu/YTUBrPigW4F1bfOwNwGyvgL76xUvP/zQhtfRytWi+LbmVT1Y9mLlXXbZrw7AyqiJkCkmm1M2uEZBgmMbXEfLVpPi2ypYL9VgmYpVidW9WF+KDuSKV35gnex+jE3GlK0ulZfm5EbW0ZrVpPjmBZaZWy1avfVlDLAutfORM+XSq6xfcSvZ4Q2roxWrSXFt3asFsEqpKH+47zPR6iJW0YlcAyvKrdPyN7621cibLGDZ1s961aT45g5WrVhmVNWIlQFWFFrFZc4UsIzrZrlqUnzbAOulHawysqydessDrB7Nyp0mYFnXy2LVpPh2OvmDlS2WNVL3lYCV71V3ZGXPMtQr9Rk+VGpSfDttiDUFq5KL7UfaVKelvMDqyqz8OQLWXlOT4trlaQoA69bSw2zC0XaOYPWDVv4EQ70CrMDUprh2eZqy32KdwnRxyBmsPtQKAKtqXupDfKTUpnh2fZoKwFKr01AEWHK0AIvUqHj2+jgBliFYSrcKZhTqFWAFpkbFsdvjlPsWa2SvSsSyAEtiFmDRjsF6e5wAywWscLJKJhPqFWAFpmbFrbvnCbB8wAolq2wqoV4BVmBqV7y6f54ywVKT05YELFexGmZS5xVgDZAaFq8eHqg8sdTktKUBy0+sponEegVYgalhcerxgQIsL6+MzbKaRaxXgBWXGhavHh8owPIEa6pFfVZTCPYKsOJSw+LV5Ik6AFhPSrBsxDKbQbBXgBWXGhavJk9U1lssNTmNScFqF8twBtFeAVZcalicmj1SgOUMVhtZptcP9wqw4lLL4tTskQIsd7CqxbK+fDRXgBWYWhan5s9UhlhqcRqTg1UjlsPV470CrLDUsHg1f6b2D1auWIZGJMjIzeXyAq8AKyw1LF4tPFSA5e1VmVhOV1d4BVhRqV1xC7AkYGWL5XdxhVeAFZXaFbeWnirA8gdriyz3S0u8Aqyg1Kz4dUiw8sRyNeOOjmCtrpct9gqwRkrNimOApQTrDZBArc7XE3kFWDGpUfEMsHoQKzjA2nVqU1w7JFifDw6WyivAikhNim+ABVhhXgFWQGpRvCsQC7B2EmDtODUo3gEWYEV5BVj+qT3x74BgdfefCYMDrP2m5iQgwDoYWDKvAMurf//79R/UmIQEWIAFWMP272vXX6gtCel4YP0MWAqvAMu6f793+bWakphywXoBrD2k8wqwDPv3tPNvqimJCbAAC7BGaobVK1hqSYICrCOBJfQKsFpblAqwAAuwAKu31rA6Nlhb33VXc9NeFlj7FEvpFWDVt8XVGSw1JEHNnyvAAizA6qptrwBrz2Ad9zOh0ivAqi3DK8ACrB1W5hVgdVIWWGpIogIswAKsvuMN1l0LDxZgAZaDV4BVGZ8I3/vrr2KwTmpvmvv5sGIB1ogB1q2//joiWD8DFmCNVJZX+wfrr9cA6zBglXkFWH3EG6xzf/2VBish1p7AOpZYv90CrAEDrOd7rwBr32D99pDSK8Cqi0+E91yVfxMLsMbpt3mANVpHB+uvadliHQ2swcVa0KpILMDqo2N/IpxxdTywvhzhLVZCqwKyzL0CrKoO/QZrgSvA2iFYa1xlimXvFWBVdWCwFrlaBGtZLMAaow2u8sgCrE46LlgJrwBrZ2BleLVNloNXgFXVUb1KcVXwmfAVrNHFuoKVI5aanqryvNowy8MrwKrqkGCtaFXwFguw+i+fqzWxXLwCrKqOB9a6VoC1J7CKvEqS5eMVYNW07dW+/lDDplYJsJbE2hdYGWKp+Smu1KsEWYDVT4cCK0crwDo2WAtkOXkFWDUd5xNhplYFYL0AVt9VeTUXC7A66ihvsPK5AqzdgFUr1oQswOqoI4BVglUSrORnwuHB+roI1n+/dver44D1QJaXV4BV0bZXo4NVqlXJW6x9gfXzG1Kp1P6UZyEWYHVUjlcDg1Wj1XHB2vJqQLHqwXojy80rwKpoz2BVapUCa0GsnYD1Nderg4n1m69XgFXRbj8R1mt1QLC+7hesZrEAq6t2CVYTVmmw5mLtCqwcr0YUq4ksR64Aq6Jtr0YDqxkrwNobWA1kuXoFWOXtCiwTq9bAmokFWKMEWPvIxKv7ox7g0sYMIsF62QtYX7PBOpZYgNVZBmAtHfgQpdKXdwNr8S3WPsA6nfYNVp1YgNVZ7WClT/2IUm2BNRVrN2CdssE6kli+XgFWcdtebYGVdf6HgeoWYAEWYHVYFVj/9702PbpE6r6Vh2yfYJ0AC7AGKAOs02mK1TWFI2GtPWV7BSvXqwOJ5ewVYJWW4dV3sE5zrS5iqVVxbP05A6wxA6zRywTrStb/TVKj4tnGg7Y3sE6ABVgjlA3W6TTl6tBgPYq1A7DO7R+sUrE8rbqkPv+jlePVK1hzrw4N1mlXYF2XBFiA1Xn5YB3Mq22wToA1YIA1dFlencFa4OrwYJ0ODNawYnXmFWBl9rpVWV79b8Krw4N1AqzxAqwRu21XFljLWgHWjsB6XQ9gAVaX3XYr7xPhIb06FFi39QBWtFeAldNts/K8AqxUu/ke1m1BgAVYolb2432z8rz6X8Babjf/lfBtRQcAq7NPhID14V6kpV25++22N1iA9S4WYI1SZ14B1gJXDxtz91t8Ilxt61G7/6Puh/MKsIwyPv0/fc94SNeK9opPhKtt7N4LYA1Yb58IT5Zk/fSa3YiulW5U5husY/4prL8K/roGwBqn/ry61nz6f7qv+w+aNVuU6dXl/8sBrGkvgDVkvYJ1rfL4/zSrcUCH2jcHsDZa375HsUYG631N+werw0+EkwodmFv1KFYXZJlsTIlXhwRrcwfvwVKj09L7igBL7tWlPAZSVj2CpRbLak+KvAKsRHsA6245JWANKVb/b7BurRmwKtVcLBVZpvuR6xVgbXUVS61Offdr2TtYRV5pwVpmJouqGVjxZJlvRqFXRwSrbEPV7NR3v4qdgzWUVxdlCoBaF+t1vAGpupTt1SpY+xarbEfV7FT3uIxdg1XmlR6s06nFqzlYk0bB6lThFWBtpoantsdV7BmsQq+GB2tTrLs6heq1fK8AKzs1PJVNVrFjsAb0Kg6sRB1gdSryCrCyU8tT2WQV+wWr1CvAektA1Q/XLv9c4xVgbaaWp7LpMvYK1pBeNYJlJlZwP9x1AqzMSndZLU9ds2XsFKwxvXL+rnuf/TDNEKw9i1W6z2p66potY59gFXsFWPHNpCoH630wwNpMbU9V82XsEaxhvWoSSz317FJSAVZJxduutqemhWXsEKxyr/oBq14s9cQz29CqTKz3YRNg7Vis8q1X61PR0jJ2B9bQXlWLpZ52ThlYFYF1NzRgbafWp7zFZewNrAqvugKrTiz1pLfL1KpErLvRAWs7NT/lLa9jV2LVcNWXV1Viqae8UQFWBWLdXyEF1n7FKr8Lan6KS6xjR2BVcdUdWOViqSe8UalXmWI9XAOwtlMDVFpqHXsBq5Kr7rwqFks93fXKucoT6/EqgLWdGqDCkuvYB1i1XI0Plnq261V5lSPW5DpHE6viVji54jPsCli5YqlJWm1PXhWJpZ7qepVcZYg1uxRgbeXEisuwX3cO1r68KhFLPdPV6r3aJGt+McDayIsVl3FXvNoBWHvzKl8s9TxXa/JqXazF6wHWal6uuIy7BlamWGqV0tV71S1YmWKpZ7lWI1frZCWuCVhrObniMew6WHliqVlKtkev8sBST3IlA65WyFq58EHEqrknTq54DLtnsHbpVZZY6ikmMrIqSdYPr3/VXyrASuTkisew617liaWGKdFOvdoGSz3BhWypWkDr9hsb8wCsxZxg8Rh2C6wcsdQyJdorWBtiqWe3kBdXS23NBbAWcoLFY9gdg1UvVs09D6yaK41mkVxtg3UCrHlOsHgMuwnWtlhql5Ltk6tTWqyMV8VM8K5YrjLAOgHWPB9YHEbNAGtTLLVLyfbqVQKsvJcETfFSNFZ5YN3IUsviVNWd8oHFYdQcsLbEUruUbLdgLYiV/4KQCUqsyvXqVSy1LE5V3S4fWBxGzfFqSyy1S+n26tVUrKIv956aiqoSsM5kqWHxquqm+cjiMGoeWKtiqVVaa69eFb5lKuStPi1WJWCdTmpYvKq6cT6y+Iya1aBgDflz6fMq4Kfw42Ntaqsu5U9XDYtXVffOiRafUbMa06tCsarutapse4q/4VWR2qm38qeshsWrqvtnL4sPWAVLOgBYVbdaV7VXtmKpiXosf95qWLyqu4vmtMjBSomlFmmrvXKV2RJXdmSpeZqXP3c1LF7V3UkfWnxGzW1MsHLFqrvPvZfyql0stUyJsuevdsWtutvpQ4vPqNkN6VWeWJ/rbnPvpb1qIkut0krZa1C74lbdLfWhxWXQgj4PCda2WN+/pu42994qWJVmqUlaL3sZalfcqrmnJydbXAYt6Hz6BwRrQ6zLl1Te5r7b8ipBVvLUqzXaLn9v1K54Vf6UXHOxxWXQgq7Hfzyw1sR6/Yra+9xzGV7NzZqcfCk/5WXvjdoVt2ofFh9bXAbN73b+R/Pq0opWh/bqkSy1OI1lb47aFbdqnxZjWlzAKl7U++kfjqtrCa72CFY+V3dmqb1pLnt71K64Vf3A2NrSG1jfyQq2xqhFrnYIVqFXV7TU3LSXuz1qVvyqfmJcbHEZND8RMrbNtfq8P7BqvPrpJzU37eXuj5oVv6ofGRdbXAbNTsFLUNW3udMAaz01K37VPzMetpiOCVh31d/nHqvz6jhgqVVxrP6h8bDFY8z81Kh4Vn+fO6zSK8DaQfVPjQsuHmNmp0bFtfob3V21XgHWDqp/bCxtASz36m90b1V7NT5YuVukVsWx+ufG0hbA8q/+TvfVgb0CLMC6T02Kb/V3uqvqvQKsPdTw6Hjg4jFmdmpSfGu40x3V4BVg7aGGZ8cDF48xs1OT4lvDne6mFq524BXfc+8OLMtBixekJsW5hlvdR01c7cErwAKs+9SiONdwq3uojasjeQVYiTxw8RgzN7UozrXcanmNXO3CK8D6a89gla9HLYpzLbdaXatXgLWXWp4iO1wAK6CWW60Ors7l7pYaFc9aniIPXDzGzE0tinMtt1odXp3L3S01Kp61PEUeuHiMmZtaFOdabrU6vDqXu1tqVDxreowccDEbErCmNd1qdXj1A3+q4VzTU+SAi9mQgDWt6VargyveYF1qeooccDEbErCmNd1qcT/8gFeAda7pMXLAxWxIwJrWdKvF/VAnlpoY23I3S22Ka23PkT0uViMC1qy2Wy3temAP7hVgnWt7kOxtMRrxK2BNa7vT2m5H9sBa/QBYl9oeJHtcrEYErGltd1rb+6E9Llf8ENVLjU+SPS5GI1Z4BVi9dn9sj8oVYF1rfJTsdTEaEbCmNd5pZY8H94hYncveLrUprjU+Sva6GI0IWNMa77Sy6dE9oFY/ANa1xkfJXhejEQFrWuOdFrZ4fI+F1bns/VKb4lrrw2Sui82AgDWr9U7rUkvRR/n7pTbFtcZnyV4XoxEBa1LjjVampqKPCjZMjYpnjc+SvS5GIwLWpMYbrUxNRR8VbJgaFc8anyV7XYxGBKxJjTdamFqKTirYMTUqnjU+TPa62I+YndoU1xpvtDC1FJ1UsGNqVDxrfJjsebEfMTu1Ka413mhhaik6qWTL1Ko41vgw2fNiP2J2alNca7zRwtRS9FHRlqlVcaz1aTLnxWZAwJrWeqNlqaXopKI9U6viWOvjZM6LzYCANa31RstSS9FJRXumVsWx5ufJmheT8QBrVvONVqWWopPKNk3Nil/Nz5M1LybjVXkFWF2mlqKTyjZNzYpb7c+TtS8m4wHWrPY7rUkNRS+V7ZraFbcMnihjXyyGA6x5BndakhqKTircNbUrbhk8Uca+WAwHWLMMbrQmtRSdVLptali8MniirH2xGA+wphncaE1qKTqpdNvUsHhl8ERZ+2IxHmBNM7jRmtRSdFLptqlh8crgibL2xWI8wJpmcKM1qaXopNJtU8PilcETZe2LxXiANc3gRktSQ9FLxRunlsUpg0fK2heL8QBrmsGNlqSGopPKN04ti1MWz5SxLwbDAdYsixutSC1FJ1XsnJoWnyyeKWNfDIYDrFkWN1qRWopOqtg5NS0+WTxT1ry0jwdYswzusyS1FJ1Us3VqW1yyeKaseWkfD7BmGdxnSWopOqlm69S2uGTxTFnz0j4eYM0yuM+S1FJ0Us3WqW1xyeKZsualfTzAmmVwnxWpoeilqs1T4+KRxUNlzUv7eIA1y+A+K1JD0UtVm6fGxSOLh8qal/bxAGuWwX1WpIaikyp3T62LQxYPlTUv7eMB1jSD2yxJLUUnVe6eWheHTJ4qa15EXgFWf6ml6KTK3VPr4pDJUwVY3dd8j0Wppeik2u1T82KfyVMFWN3XfI9FqaXopNrtU/Nin8lTBVjd13yPRaml6KTq/VP7Yp7JU2XuC2BZ13qLVaml6KTq/VP7Yp7NY2XNC2BZ13iDZaml6KOGDVQDY53NY9UbWJXLUKviWNv91aWmoo8aNlANjHU2jxVg9V7b/dWlpqKPGjZQDYx1No+VuS+AZVzT7RWmpqKPWnZQLYxxNo8VYPVe0+0Vpqaij1p2MASRgIu8X8wiwOq8prsrTE1FHzVtYYQh/td4v5ZJfYFVuwq1Ko613Fxlaiq6qG0LQxDxv8j7tSwyB0YC1o7Fqr+12tRWdFHbFsYg4n+V92sZBFidV39rtamt6KLGPQwxxPsiDxdrz9wXwLKtekvEqa3oosY9DDHE+yIPF2tvJ2DtV6z6LRGnxqKDmvcwxBDnizxerLmuwGpYh9oVtxr2RJtaiw5q3sMQQ5wv8nix9nYC1m7FatkTaWotOqh9EyMM8b3G5GLtmQMjAmunYjVtiTS1Fh3UvokhhLheZHa11sx9UYG1T7HatkSZWosOMtjFCEI8rzG/WmuA1XVtW6JMrUUHGexiCCGeF5lfrbH9gLVLsRq3RJhaiw4y2MUQQRwvsnS5tnYE1h7Fat0SXWot9JlsYwQgbtdIXK8pe190YO2QrPYtUaXmQp/JNkb44XWN5AVbsudFCdbuyLLYEk1qLvSZbGMEH07XWLliQ3sDa19k2eyIJDUX+mz2MUAPl0usX7K6el0cwEKsaTYbIknNhTyjfQzQw+US65eszkMXNVg7EstoQySpwVBntI0BerhcYv2S1QFW1xltiCQ1GOqMttEfD48rbF2zOg9c5GDtRyyrDVGkBkOd0Tb64+Fxha1rVtcXWLzFmmS0H5LUYKgz2kZ/PDyusHXN6gCr64z2Q5IaDHFW2+iPh8cVNi9amYstgGWW0X5IUoshzmob/e3wuMLmRSvrDSz+YMNjNtuhSS2GOKtt9KfD4QoZV63Lg5YWrwDrMZvt0KQWQ5vdPrrTYX+BnKvWBVhdZ7MbotRkaLPbR3867K+Qc9WqAKvrbHZDlJoMbXb76E+H/RVyrlpVd2DZiKWGxiqTzVClJkOa4T76y2F+hbzL1rRPsNTOmGWxGbLUZkgz3McAOcwvkXfZ8lxkASyzLDZDltoMaYb7GCCH+SXyLlseYPWdxWbIUpuhzHIfA+Qwv0TeZcsDrL6z2AxdajWEWW5jABzWl8i9bnEesrR5BVj3WWyGLrUawiy3MQIO62vkXrcwF1kawbIQS+2MWQZ7IUythjDTfQyAw/gS2dctDLD6zmArlKnV0GW7jwFwGF8i+7qFAVbfGWyFMjUbumz3McAN20sUXLgswOo7g61QpmZDl+0+Rrhhe42CC5fkAwtgmWWwFdLUbsiy3cYIN2yvUXDhknxkASyrDHZCm9oNVcbbGOGG7TUKLlxSn2Dx859vtW+EODUcqoy3MYIN02sUXTk/J1gAy6r2jRCnhkOV9T4GsGF5ibIr5wdYnde+EeLUcKiy3scINiyvUXbl7JxkASyj2vdBnloOUdbbGMGG5TWKLpzffsHah1gG+2DS/wOD1EtKBACiBQAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Image.open('./mhp_extension/demo/demo_multiple_human_parsing.png')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + }, + "pycharm": { + "stem_cell": { + "cell_type": "raw", + "metadata": { + "collapsed": false + }, + "source": [ + "## COCO style annotation transfer" + ] + } + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo/demo.jpg b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo/demo.jpg new file mode 100644 index 0000000000000000000000000000000000000000..870817943ddd2e0c23c26d4620ff51ea2c9d5ebd Binary files /dev/null and b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo/demo.jpg differ diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo/demo_global_human_parsing.png b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo/demo_global_human_parsing.png new file mode 100644 index 0000000000000000000000000000000000000000..afc5b8fee781e0cf5c505b0e99a86849742d9ef0 Binary files /dev/null and b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo/demo_global_human_parsing.png differ diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo/demo_instance_human_mask.png b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo/demo_instance_human_mask.png new file mode 100644 index 0000000000000000000000000000000000000000..9cd5b1b2223d2bd302e347806a42e6aa09c2c5b7 Binary files /dev/null and b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo/demo_instance_human_mask.png differ diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo/demo_multiple_human_parsing.png b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo/demo_multiple_human_parsing.png new file mode 100644 index 0000000000000000000000000000000000000000..28875d8dc700464b3841cfb79d7e10428684d69a Binary files /dev/null and b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/demo/demo_multiple_human_parsing.png differ diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.circleci/config.yml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.circleci/config.yml new file mode 100644 index 0000000000000000000000000000000000000000..6c605889cf4ac01d3ed63f62d65a0d6ae1f6edd0 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.circleci/config.yml @@ -0,0 +1,179 @@ +# Python CircleCI 2.0 configuration file +# +# Check https://circleci.com/docs/2.0/language-python/ for more details +# +version: 2 + +# ------------------------------------------------------------------------------------- +# Environments to run the jobs in +# ------------------------------------------------------------------------------------- +cpu: &cpu + docker: + - image: circleci/python:3.6.8-stretch + resource_class: medium + +gpu: &gpu + machine: + image: ubuntu-1604:201903-01 + docker_layer_caching: true + resource_class: gpu.small + +# ------------------------------------------------------------------------------------- +# Re-usable commands +# ------------------------------------------------------------------------------------- +install_python: &install_python + - run: + name: Install Python + working_directory: ~/ + command: | + pyenv install 3.6.1 + pyenv global 3.6.1 + +setup_venv: &setup_venv + - run: + name: Setup Virtual Env + working_directory: ~/ + command: | + python -m venv ~/venv + echo ". ~/venv/bin/activate" >> $BASH_ENV + . ~/venv/bin/activate + python --version + which python + which pip + pip install --upgrade pip + +install_dep: &install_dep + - run: + name: Install Dependencies + command: | + pip install --progress-bar off -U 'git+https://github.com/facebookresearch/fvcore' + pip install --progress-bar off cython opencv-python + pip install --progress-bar off 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI' + pip install --progress-bar off torch torchvision + +install_detectron2: &install_detectron2 + - run: + name: Install Detectron2 + command: | + gcc --version + pip install -U --progress-bar off -e .[dev] + python -m detectron2.utils.collect_env + +install_nvidia_driver: &install_nvidia_driver + - run: + name: Install nvidia driver + working_directory: ~/ + command: | + wget -q 'https://s3.amazonaws.com/ossci-linux/nvidia_driver/NVIDIA-Linux-x86_64-430.40.run' + sudo /bin/bash ./NVIDIA-Linux-x86_64-430.40.run -s --no-drm + nvidia-smi + +run_unittests: &run_unittests + - run: + name: Run Unit Tests + command: | + python -m unittest discover -v -s tests + +# ------------------------------------------------------------------------------------- +# Jobs to run +# ------------------------------------------------------------------------------------- +jobs: + cpu_tests: + <<: *cpu + + working_directory: ~/detectron2 + + steps: + - checkout + - <<: *setup_venv + + # Cache the venv directory that contains dependencies + - restore_cache: + keys: + - cache-key-{{ .Branch }}-ID-20200425 + + - <<: *install_dep + + - save_cache: + paths: + - ~/venv + key: cache-key-{{ .Branch }}-ID-20200425 + + - <<: *install_detectron2 + + - run: + name: isort + command: | + isort -c -sp . + - run: + name: black + command: | + black --check -l 100 . + - run: + name: flake8 + command: | + flake8 . + + - <<: *run_unittests + + gpu_tests: + <<: *gpu + + working_directory: ~/detectron2 + + steps: + - checkout + - <<: *install_nvidia_driver + + - run: + name: Install nvidia-docker + working_directory: ~/ + command: | + curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | sudo apt-key add - + distribution=$(. /etc/os-release;echo $ID$VERSION_ID) + curl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | \ + sudo tee /etc/apt/sources.list.d/nvidia-docker.list + sudo apt-get update && sudo apt-get install -y nvidia-docker2 + # reload the docker daemon configuration + sudo pkill -SIGHUP dockerd + + - run: + name: Launch docker + working_directory: ~/detectron2/docker + command: | + nvidia-docker build -t detectron2:v0 -f Dockerfile-circleci . + nvidia-docker run -itd --name d2 detectron2:v0 + docker exec -it d2 nvidia-smi + + - run: + name: Build Detectron2 + command: | + docker exec -it d2 pip install 'git+https://github.com/facebookresearch/fvcore' + docker cp ~/detectron2 d2:/detectron2 + # This will build d2 for the target GPU arch only + docker exec -it d2 pip install -e /detectron2 + docker exec -it d2 python3 -m detectron2.utils.collect_env + docker exec -it d2 python3 -c 'import torch; assert(torch.cuda.is_available())' + + - run: + name: Run Unit Tests + command: | + docker exec -e CIRCLECI=true -it d2 python3 -m unittest discover -v -s /detectron2/tests + +workflows: + version: 2 + regular_test: + jobs: + - cpu_tests + - gpu_tests + + #nightly_test: + #jobs: + #- gpu_tests + #triggers: + #- schedule: + #cron: "0 0 * * *" + #filters: + #branches: + #only: + #- master diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.clang-format b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.clang-format new file mode 100644 index 0000000000000000000000000000000000000000..a757d4fff0c2f065d7d51719b52aef35ec48d04e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.clang-format @@ -0,0 +1,85 @@ +AccessModifierOffset: -1 +AlignAfterOpenBracket: AlwaysBreak +AlignConsecutiveAssignments: false +AlignConsecutiveDeclarations: false +AlignEscapedNewlinesLeft: true +AlignOperands: false +AlignTrailingComments: false +AllowAllParametersOfDeclarationOnNextLine: false +AllowShortBlocksOnASingleLine: false +AllowShortCaseLabelsOnASingleLine: false +AllowShortFunctionsOnASingleLine: Empty +AllowShortIfStatementsOnASingleLine: false +AllowShortLoopsOnASingleLine: false +AlwaysBreakAfterReturnType: None +AlwaysBreakBeforeMultilineStrings: true +AlwaysBreakTemplateDeclarations: true +BinPackArguments: false +BinPackParameters: false +BraceWrapping: + AfterClass: false + AfterControlStatement: false + AfterEnum: false + AfterFunction: false + AfterNamespace: false + AfterObjCDeclaration: false + AfterStruct: false + AfterUnion: false + BeforeCatch: false + BeforeElse: false + IndentBraces: false +BreakBeforeBinaryOperators: None +BreakBeforeBraces: Attach +BreakBeforeTernaryOperators: true +BreakConstructorInitializersBeforeComma: false +BreakAfterJavaFieldAnnotations: false +BreakStringLiterals: false +ColumnLimit: 80 +CommentPragmas: '^ IWYU pragma:' +ConstructorInitializerAllOnOneLineOrOnePerLine: true +ConstructorInitializerIndentWidth: 4 +ContinuationIndentWidth: 4 +Cpp11BracedListStyle: true +DerivePointerAlignment: false +DisableFormat: false +ForEachMacros: [ FOR_EACH, FOR_EACH_ENUMERATE, FOR_EACH_KV, FOR_EACH_R, FOR_EACH_RANGE, ] +IncludeCategories: + - Regex: '^<.*\.h(pp)?>' + Priority: 1 + - Regex: '^<.*' + Priority: 2 + - Regex: '.*' + Priority: 3 +IndentCaseLabels: true +IndentWidth: 2 +IndentWrappedFunctionNames: false +KeepEmptyLinesAtTheStartOfBlocks: false +MacroBlockBegin: '' +MacroBlockEnd: '' +MaxEmptyLinesToKeep: 1 +NamespaceIndentation: None +ObjCBlockIndentWidth: 2 +ObjCSpaceAfterProperty: false +ObjCSpaceBeforeProtocolList: false +PenaltyBreakBeforeFirstCallParameter: 1 +PenaltyBreakComment: 300 +PenaltyBreakFirstLessLess: 120 +PenaltyBreakString: 1000 +PenaltyExcessCharacter: 1000000 +PenaltyReturnTypeOnItsOwnLine: 200 +PointerAlignment: Left +ReflowComments: true +SortIncludes: true +SpaceAfterCStyleCast: false +SpaceBeforeAssignmentOperators: true +SpaceBeforeParens: ControlStatements +SpaceInEmptyParentheses: false +SpacesBeforeTrailingComments: 1 +SpacesInAngles: false +SpacesInContainerLiterals: true +SpacesInCStyleCastParentheses: false +SpacesInParentheses: false +SpacesInSquareBrackets: false +Standard: Cpp11 +TabWidth: 8 +UseTab: Never diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.flake8 b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.flake8 new file mode 100644 index 0000000000000000000000000000000000000000..0cc61b77a7e7005b3499394c36288dc8f3bcad39 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.flake8 @@ -0,0 +1,9 @@ +# This is an example .flake8 config, used when developing *Black* itself. +# Keep in sync with setup.cfg which is used for source packages. + +[flake8] +ignore = W503, E203, E221, C901, C408, E741 +max-line-length = 100 +max-complexity = 18 +select = B,C,E,F,W,T4,B9 +exclude = build,__init__.py diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/CODE_OF_CONDUCT.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000000000000000000000000000000..0f7ad8bfc173eac554f0b6ef7c684861e8014bbe --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/CODE_OF_CONDUCT.md @@ -0,0 +1,5 @@ +# Code of Conduct + +Facebook has adopted a Code of Conduct that we expect project participants to adhere to. +Please read the [full text](https://code.fb.com/codeofconduct/) +so that you can understand what actions will and will not be tolerated. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/CONTRIBUTING.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..81936dfedb495dd5cd21da2bfcf9819b97ed1dff --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/CONTRIBUTING.md @@ -0,0 +1,49 @@ +# Contributing to detectron2 + +## Issues +We use GitHub issues to track public bugs and questions. +Please make sure to follow one of the +[issue templates](https://github.com/facebookresearch/detectron2/issues/new/choose) +when reporting any issues. + +Facebook has a [bounty program](https://www.facebook.com/whitehat/) for the safe +disclosure of security bugs. In those cases, please go through the process +outlined on that page and do not file a public issue. + +## Pull Requests +We actively welcome your pull requests. + +However, if you're adding any significant features (e.g. > 50 lines), please +make sure to have a corresponding issue to discuss your motivation and proposals, +before sending a PR. We do not always accept new features, and we take the following +factors into consideration: + +1. Whether the same feature can be achieved without modifying detectron2. +Detectron2 is designed so that you can implement many extensions from the outside, e.g. +those in [projects](https://github.com/facebookresearch/detectron2/tree/master/projects). +If some part is not as extensible, you can also bring up the issue to make it more extensible. +2. Whether the feature is potentially useful to a large audience, or only to a small portion of users. +3. Whether the proposed solution has a good design / interface. +4. Whether the proposed solution adds extra mental/practical overhead to users who don't + need such feature. +5. Whether the proposed solution breaks existing APIs. + +When sending a PR, please do: + +1. If a PR contains multiple orthogonal changes, split it to several PRs. +2. If you've added code that should be tested, add tests. +3. For PRs that need experiments (e.g. adding a new model or new methods), + you don't need to update model zoo, but do provide experiment results in the description of the PR. +4. If APIs are changed, update the documentation. +5. Make sure your code lints with `./dev/linter.sh`. + + +## Contributor License Agreement ("CLA") +In order to accept your pull request, we need you to submit a CLA. You only need +to do this once to work on any of Facebook's open source projects. + +Complete your CLA here: + +## License +By contributing to detectron2, you agree that your contributions will be licensed +under the LICENSE file in the root directory of this source tree. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/Detectron2-Logo-Horz.svg b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/Detectron2-Logo-Horz.svg new file mode 100644 index 0000000000000000000000000000000000000000..eb2d643ddd940cd8bdb5eaad093029969ff2364c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/Detectron2-Logo-Horz.svg @@ -0,0 +1 @@ +Detectron2-Logo-Horz \ No newline at end of file diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE.md new file mode 100644 index 0000000000000000000000000000000000000000..5e8aaa2d3722e7e73a3d94b2b7dfc4f751d7a240 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,5 @@ + +Please select an issue template from +https://github.com/facebookresearch/detectron2/issues/new/choose . + +Otherwise your issue will be closed. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/bugs.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/bugs.md new file mode 100644 index 0000000000000000000000000000000000000000..52d299886a457480d27c54a27734a704786a1d28 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/bugs.md @@ -0,0 +1,36 @@ +--- +name: "🐛 Bugs" +about: Report bugs in detectron2 +title: Please read & provide the following + +--- + +## Instructions To Reproduce the 🐛 Bug: + +1. what changes you made (`git diff`) or what code you wrote +``` + +``` +2. what exact command you run: +3. what you observed (including __full logs__): +``` + +``` +4. please simplify the steps as much as possible so they do not require additional resources to + run, such as a private dataset. + +## Expected behavior: + +If there are no obvious error in "what you observed" provided above, +please tell us the expected behavior. + +## Environment: + +Provide your environment information using the following command: +``` +wget -nc -q https://github.com/facebookresearch/detectron2/raw/master/detectron2/utils/collect_env.py && python collect_env.py +``` + +If your issue looks like an installation issue / environment issue, +please first try to solve it yourself with the instructions in +https://detectron2.readthedocs.io/tutorials/install.html#common-installation-issues diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/config.yml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000000000000000000000000000000000..c19e2490a71893c516b2bd54b887399493fadcd4 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,9 @@ +# require an issue template to be chosen +blank_issues_enabled: false + +# Unexpected behaviors & bugs are split to two templates. +# When they are one template, users think "it's not a bug" and don't choose the template. +# +# But the file name is still "unexpected-problems-bugs.md" so that old references +# to this issue template still works. +# It's ok since this template should be a superset of "bugs.md" (unexpected behaviors is a superset of bugs) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/feature-request.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/feature-request.md new file mode 100644 index 0000000000000000000000000000000000000000..dd69a33478c85068cdd7b8b90161f97cc55c1621 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/feature-request.md @@ -0,0 +1,31 @@ +--- +name: "\U0001F680Feature Request" +about: Submit a proposal/request for a new detectron2 feature + +--- + +## 🚀 Feature +A clear and concise description of the feature proposal. + + +## Motivation & Examples + +Tell us why the feature is useful. + +Describe what the feature would look like, if it is implemented. +Best demonstrated using **code examples** in addition to words. + +## Note + +We only consider adding new features if they are relevant to many users. + +If you request implementation of research papers -- +we only consider papers that have enough significance and prevalance in the object detection field. + +We do not take requests for most projects in the `projects/` directory, +because they are research code release that is mainly for other researchers to reproduce results. + +Instead of adding features inside detectron2, +you can implement many features by [extending detectron2](https://detectron2.readthedocs.io/tutorials/extend.html). +The [projects/](https://github.com/facebookresearch/detectron2/tree/master/projects/) directory contains many of such examples. + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/questions-help-support.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/questions-help-support.md new file mode 100644 index 0000000000000000000000000000000000000000..081156136b709b1e0ec4d27404b9cb8fa9ba1d27 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/questions-help-support.md @@ -0,0 +1,26 @@ +--- +name: "❓How to do something?" +about: How to do something using detectron2? What does an API do? + +--- + +## ❓ How to do something using detectron2 + +Describe what you want to do, including: +1. what inputs you will provide, if any: +2. what outputs you are expecting: + +## ❓ What does an API do and how to use it? +Please link to which API or documentation you're asking about from +https://detectron2.readthedocs.io/ + + +NOTE: + +1. Only general answers are provided. + If you want to ask about "why X did not work", please use the + [Unexpected behaviors](https://github.com/facebookresearch/detectron2/issues/new/choose) issue template. + +2. About how to implement new models / new dataloader / new training logic, etc., check documentation first. + +3. We do not answer general machine learning / computer vision questions that are not specific to detectron2, such as how a model works, how to improve your training/make it converge, or what algorithm/methods can be used to achieve X. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/unexpected-problems-bugs.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/unexpected-problems-bugs.md new file mode 100644 index 0000000000000000000000000000000000000000..bafee7a1a3897903d26e68001d3d3d2b7686015b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/ISSUE_TEMPLATE/unexpected-problems-bugs.md @@ -0,0 +1,45 @@ +--- +name: "Unexpected behaviors" +about: Run into unexpected behaviors when using detectron2 +title: Please read & provide the following + +--- + +If you do not know the root cause of the problem, and wish someone to help you, please +post according to this template: + +## Instructions To Reproduce the Issue: + +1. what changes you made (`git diff`) or what code you wrote +``` + +``` +2. what exact command you run: +3. what you observed (including __full logs__): +``` + +``` +4. please simplify the steps as much as possible so they do not require additional resources to + run, such as a private dataset. + +## Expected behavior: + +If there are no obvious error in "what you observed" provided above, +please tell us the expected behavior. + +If you expect the model to converge / work better, note that we do not give suggestions +on how to train a new model. +Only in one of the two conditions we will help with it: +(1) You're unable to reproduce the results in detectron2 model zoo. +(2) It indicates a detectron2 bug. + +## Environment: + +Provide your environment information using the following command: +``` +wget -nc -q https://github.com/facebookresearch/detectron2/raw/master/detectron2/utils/collect_env.py && python collect_env.py +``` + +If your issue looks like an installation issue / environment issue, +please first try to solve it yourself with the instructions in +https://detectron2.readthedocs.io/tutorials/install.html#common-installation-issues diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/pull_request_template.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/pull_request_template.md new file mode 100644 index 0000000000000000000000000000000000000000..4ff5ea51776ff27b3e794e366a92a455e2f06a01 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.github/pull_request_template.md @@ -0,0 +1,9 @@ +Thanks for your contribution! + +If you're sending a large PR (e.g., >50 lines), +please open an issue first about the feature / bug, and indicate how you want to contribute. + +Before submitting a PR, please run `dev/linter.sh` to lint the code. + +See https://detectron2.readthedocs.io/notes/contributing.html#pull-requests +about how we handle PRs. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.gitignore b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..e85df4cf713e2c4a6fc02885f2b2ff3d0f104763 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/.gitignore @@ -0,0 +1,46 @@ +# output dir +output +instant_test_output +inference_test_output + + +*.jpg +*.png +*.txt +*.json +*.diff + +# compilation and distribution +__pycache__ +_ext +*.pyc +*.so +detectron2.egg-info/ +build/ +dist/ +wheels/ + +# pytorch/python/numpy formats +*.pth +*.pkl +*.npy + +# ipython/jupyter notebooks +*.ipynb +**/.ipynb_checkpoints/ + +# Editor temporaries +*.swn +*.swo +*.swp +*~ + +# editor settings +.idea +.vscode + +# project dirs +/detectron2/model_zoo/configs +/datasets +/projects/*/datasets +/models diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/GETTING_STARTED.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/GETTING_STARTED.md new file mode 100644 index 0000000000000000000000000000000000000000..acaf13f02c906b45ffc2f49ee5a0ce01d82b4786 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/GETTING_STARTED.md @@ -0,0 +1,79 @@ +## Getting Started with Detectron2 + +This document provides a brief intro of the usage of builtin command-line tools in detectron2. + +For a tutorial that involves actual coding with the API, +see our [Colab Notebook](https://colab.research.google.com/drive/16jcaJoc6bCFAQ96jDe2HwtXj7BMD_-m5) +which covers how to run inference with an +existing model, and how to train a builtin model on a custom dataset. + +For more advanced tutorials, refer to our [documentation](https://detectron2.readthedocs.io/tutorials/extend.html). + + +### Inference Demo with Pre-trained Models + +1. Pick a model and its config file from + [model zoo](MODEL_ZOO.md), + for example, `mask_rcnn_R_50_FPN_3x.yaml`. +2. We provide `demo.py` that is able to run builtin standard models. Run it with: +``` +cd demo/ +python demo.py --config-file ../configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml \ + --input input1.jpg input2.jpg \ + [--other-options] + --opts MODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl +``` +The configs are made for training, therefore we need to specify `MODEL.WEIGHTS` to a model from model zoo for evaluation. +This command will run the inference and show visualizations in an OpenCV window. + +For details of the command line arguments, see `demo.py -h` or look at its source code +to understand its behavior. Some common arguments are: +* To run __on your webcam__, replace `--input files` with `--webcam`. +* To run __on a video__, replace `--input files` with `--video-input video.mp4`. +* To run __on cpu__, add `MODEL.DEVICE cpu` after `--opts`. +* To save outputs to a directory (for images) or a file (for webcam or video), use `--output`. + + +### Training & Evaluation in Command Line + +We provide a script in "tools/{,plain_}train_net.py", that is made to train +all the configs provided in detectron2. +You may want to use it as a reference to write your own training script. + +To train a model with "train_net.py", first +setup the corresponding datasets following +[datasets/README.md](./datasets/README.md), +then run: +``` +cd tools/ +./train_net.py --num-gpus 8 \ + --config-file ../configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml +``` + +The configs are made for 8-GPU training. +To train on 1 GPU, you may need to [change some parameters](https://arxiv.org/abs/1706.02677), e.g.: +``` +./train_net.py \ + --config-file ../configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml \ + --num-gpus 1 SOLVER.IMS_PER_BATCH 2 SOLVER.BASE_LR 0.0025 +``` + +For most models, CPU training is not supported. + +To evaluate a model's performance, use +``` +./train_net.py \ + --config-file ../configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml \ + --eval-only MODEL.WEIGHTS /path/to/checkpoint_file +``` +For more options, see `./train_net.py -h`. + +### Use Detectron2 APIs in Your Code + +See our [Colab Notebook](https://colab.research.google.com/drive/16jcaJoc6bCFAQ96jDe2HwtXj7BMD_-m5) +to learn how to use detectron2 APIs to: +1. run inference with an existing model +2. train a builtin model on a custom dataset + +See [detectron2/projects](https://github.com/facebookresearch/detectron2/tree/master/projects) +for more ways to build your project on detectron2. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/INSTALL.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/INSTALL.md new file mode 100644 index 0000000000000000000000000000000000000000..3985f8ae4f5ecde26b310b4ab01c49b922f742e9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/INSTALL.md @@ -0,0 +1,184 @@ +## Installation + +Our [Colab Notebook](https://colab.research.google.com/drive/16jcaJoc6bCFAQ96jDe2HwtXj7BMD_-m5) +has step-by-step instructions that install detectron2. +The [Dockerfile](docker) +also installs detectron2 with a few simple commands. + +### Requirements +- Linux or macOS with Python ≥ 3.6 +- PyTorch ≥ 1.4 +- [torchvision](https://github.com/pytorch/vision/) that matches the PyTorch installation. + You can install them together at [pytorch.org](https://pytorch.org) to make sure of this. +- OpenCV, optional, needed by demo and visualization +- pycocotools: `pip install cython; pip install -U 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI'` + + +### Build Detectron2 from Source + +gcc & g++ ≥ 5 are required. [ninja](https://ninja-build.org/) is recommended for faster build. +After having them, run: +``` +python -m pip install 'git+https://github.com/facebookresearch/detectron2.git' +# (add --user if you don't have permission) + +# Or, to install it from a local clone: +git clone https://github.com/facebookresearch/detectron2.git +python -m pip install -e detectron2 + +# Or if you are on macOS +# CC=clang CXX=clang++ python -m pip install -e . +``` + +To __rebuild__ detectron2 that's built from a local clone, use `rm -rf build/ **/*.so` to clean the +old build first. You often need to rebuild detectron2 after reinstalling PyTorch. + +### Install Pre-Built Detectron2 (Linux only) +``` +# for CUDA 10.1: +python -m pip install detectron2 -f https://dl.fbaipublicfiles.com/detectron2/wheels/cu101/index.html +``` +You can replace cu101 with "cu{100,92}" or "cpu". + +Note that: +1. Such installation has to be used with certain version of official PyTorch release. + See [releases](https://github.com/facebookresearch/detectron2/releases) for requirements. + It will not work with a different version of PyTorch or a non-official build of PyTorch. +2. Such installation is out-of-date w.r.t. master branch of detectron2. It may not be + compatible with the master branch of a research project that uses detectron2 (e.g. those in + [projects](projects) or [meshrcnn](https://github.com/facebookresearch/meshrcnn/)). + +### Common Installation Issues + +If you met issues using the pre-built detectron2, please uninstall it and try building it from source. + +Click each issue for its solutions: + +
+ +Undefined torch/aten/caffe2 symbols, or segmentation fault immediately when running the library. + +
+ +This usually happens when detectron2 or torchvision is not +compiled with the version of PyTorch you're running. + +Pre-built torchvision or detectron2 has to work with the corresponding official release of pytorch. +If the error comes from a pre-built torchvision, uninstall torchvision and pytorch and reinstall them +following [pytorch.org](http://pytorch.org). So the versions will match. + +If the error comes from a pre-built detectron2, check [release notes](https://github.com/facebookresearch/detectron2/releases) +to see the corresponding pytorch version required for each pre-built detectron2. + +If the error comes from detectron2 or torchvision that you built manually from source, +remove files you built (`build/`, `**/*.so`) and rebuild it so it can pick up the version of pytorch currently in your environment. + +If you cannot resolve this problem, please include the output of `gdb -ex "r" -ex "bt" -ex "quit" --args python -m detectron2.utils.collect_env` +in your issue. +
+ +
+ +Undefined C++ symbols (e.g. `GLIBCXX`) or C++ symbols not found. + +
+Usually it's because the library is compiled with a newer C++ compiler but run with an old C++ runtime. + +This often happens with old anaconda. +Try `conda update libgcc`. Then rebuild detectron2. + +The fundamental solution is to run the code with proper C++ runtime. +One way is to use `LD_PRELOAD=/path/to/libstdc++.so`. + +
+ +
+ +"Not compiled with GPU support" or "Detectron2 CUDA Compiler: not available". + +
+CUDA is not found when building detectron2. +You should make sure + +``` +python -c 'import torch; from torch.utils.cpp_extension import CUDA_HOME; print(torch.cuda.is_available(), CUDA_HOME)' +``` + +print valid outputs at the time you build detectron2. + +Most models can run inference (but not training) without GPU support. To use CPUs, set `MODEL.DEVICE='cpu'` in the config. +
+ +
+ +"invalid device function" or "no kernel image is available for execution". + +
+Two possibilities: + +* You build detectron2 with one version of CUDA but run it with a different version. + + To check whether it is the case, + use `python -m detectron2.utils.collect_env` to find out inconsistent CUDA versions. + In the output of this command, you should expect "Detectron2 CUDA Compiler", "CUDA_HOME", "PyTorch built with - CUDA" + to contain cuda libraries of the same version. + + When they are inconsistent, + you need to either install a different build of PyTorch (or build by yourself) + to match your local CUDA installation, or install a different version of CUDA to match PyTorch. + +* Detectron2 or PyTorch/torchvision is not built for the correct GPU architecture (compute compatibility). + + The GPU architecture for PyTorch/detectron2/torchvision is available in the "architecture flags" in + `python -m detectron2.utils.collect_env`. + + The GPU architecture flags of detectron2/torchvision by default matches the GPU model detected + during compilation. This means the compiled code may not work on a different GPU model. + To overwrite the GPU architecture for detectron2/torchvision, use `TORCH_CUDA_ARCH_LIST` environment variable during compilation. + + For example, `export TORCH_CUDA_ARCH_LIST=6.0,7.0` makes it compile for both P100s and V100s. + Visit [developer.nvidia.com/cuda-gpus](https://developer.nvidia.com/cuda-gpus) to find out + the correct compute compatibility number for your device. + +
+ +
+ +Undefined CUDA symbols; cannot open libcudart.so; other nvcc failures. + +
+The version of NVCC you use to build detectron2 or torchvision does +not match the version of CUDA you are running with. +This often happens when using anaconda's CUDA runtime. + +Use `python -m detectron2.utils.collect_env` to find out inconsistent CUDA versions. +In the output of this command, you should expect "Detectron2 CUDA Compiler", "CUDA_HOME", "PyTorch built with - CUDA" +to contain cuda libraries of the same version. + +When they are inconsistent, +you need to either install a different build of PyTorch (or build by yourself) +to match your local CUDA installation, or install a different version of CUDA to match PyTorch. +
+ + +
+ +"ImportError: cannot import name '_C'". + +
+Please build and install detectron2 following the instructions above. + +If you are running code from detectron2's root directory, `cd` to a different one. +Otherwise you may not import the code that you installed. +
+ +
+ +ONNX conversion segfault after some "TraceWarning". + +
+The ONNX package is compiled with too old compiler. + +Please build and install ONNX from its source code using a compiler +whose version is closer to what's used by PyTorch (available in `torch.__config__.show()`). +
diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/LICENSE b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..d4836895578c791dffd78d07d83a72a961e270a4 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/LICENSE @@ -0,0 +1,201 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, +and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by +the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all +other entities that control, are controlled by, or are under common +control with that entity. For the purposes of this definition, +"control" means (i) the power, direct or indirect, to cause the +direction or management of such entity, whether by contract or +otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity +exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, +including but not limited to software source code, documentation +source, and configuration files. + +"Object" form shall mean any form resulting from mechanical +transformation or translation of a Source form, including but +not limited to compiled object code, generated documentation, +and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or +Object form, made available under the License, as indicated by a +copyright notice that is included in or attached to the work +(an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object +form, that is based on (or derived from) the Work and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. For the purposes +of this License, Derivative Works shall not include works that remain +separable from, or merely link (or bind by name) to the interfaces of, +the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including +the original version of the Work and any modifications or additions +to that Work or Derivative Works thereof, that is intentionally +submitted to Licensor for inclusion in the Work by the copyright owner +or by an individual or Legal Entity authorized to submit on behalf of +the copyright owner. For the purposes of this definition, "submitted" +means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, +and issue tracking systems that are managed by, or on behalf of, the +Licensor for the purpose of discussing and improving the Work, but +excluding communication that is conspicuously marked or otherwise +designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity +on behalf of whom a Contribution has been received by Licensor and +subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the +Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +(except as stated in this section) patent license to make, have made, +use, offer to sell, sell, import, and otherwise transfer the Work, +where such license applies only to those patent claims licensable +by such Contributor that are necessarily infringed by their +Contribution(s) alone or by combination of their Contribution(s) +with the Work to which such Contribution(s) was submitted. If You +institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work +or a Contribution incorporated within the Work constitutes direct +or contributory patent infringement, then any patent licenses +granted to You under this License for that Work shall terminate +as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the +Work or Derivative Works thereof in any medium, with or without +modifications, and in Source or Object form, provided that You +meet the following conditions: + +(a) You must give any other recipients of the Work or +Derivative Works a copy of this License; and + +(b) You must cause any modified files to carry prominent notices +stating that You changed the files; and + +(c) You must retain, in the Source form of any Derivative Works +that You distribute, all copyright, patent, trademark, and +attribution notices from the Source form of the Work, +excluding those notices that do not pertain to any part of +the Derivative Works; and + +(d) If the Work includes a "NOTICE" text file as part of its +distribution, then any Derivative Works that You distribute must +include a readable copy of the attribution notices contained +within such NOTICE file, excluding those notices that do not +pertain to any part of the Derivative Works, in at least one +of the following places: within a NOTICE text file distributed +as part of the Derivative Works; within the Source form or +documentation, if provided along with the Derivative Works; or, +within a display generated by the Derivative Works, if and +wherever such third-party notices normally appear. The contents +of the NOTICE file are for informational purposes only and +do not modify the License. You may add Your own attribution +notices within Derivative Works that You distribute, alongside +or as an addendum to the NOTICE text from the Work, provided +that such additional attribution notices cannot be construed +as modifying the License. + +You may add Your own copyright statement to Your modifications and +may provide additional or different license terms and conditions +for use, reproduction, or distribution of Your modifications, or +for any such Derivative Works as a whole, provided Your use, +reproduction, and distribution of the Work otherwise complies with +the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, +any Contribution intentionally submitted for inclusion in the Work +by You to the Licensor shall be under the terms and conditions of +this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify +the terms of any separate license agreement you may have executed +with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade +names, trademarks, service marks, or product names of the Licensor, +except as required for reasonable and customary use in describing the +origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or +agreed to in writing, Licensor provides the Work (and each +Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +implied, including, without limitation, any warranties or conditions +of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +PARTICULAR PURPOSE. You are solely responsible for determining the +appropriateness of using or redistributing the Work and assume any +risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, +whether in tort (including negligence), contract, or otherwise, +unless required by applicable law (such as deliberate and grossly +negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, +incidental, or consequential damages of any character arising as a +result of this License or out of the use or inability to use the +Work (including but not limited to damages for loss of goodwill, +work stoppage, computer failure or malfunction, or any and all +other commercial damages or losses), even if such Contributor +has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing +the Work or Derivative Works thereof, You may choose to offer, +and charge a fee for, acceptance of support, warranty, indemnity, +or other liability obligations and/or rights consistent with this +License. However, in accepting such obligations, You may act only +on Your own behalf and on Your sole responsibility, not on behalf +of any other Contributor, and only if You agree to indemnify, +defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason +of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following +boilerplate notice, with the fields enclosed by brackets "[]" +replaced with your own identifying information. (Don't include +the brackets!) The text should be enclosed in the appropriate +comment syntax for the file format. We also recommend that a +file or class name and description of purpose be included on the +same "printed page" as the copyright notice for easier +identification within third-party archives. + +Copyright 2019 - present, Facebook, Inc + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/MODEL_ZOO.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/MODEL_ZOO.md new file mode 100644 index 0000000000000000000000000000000000000000..07b81ffffa37d97b10f8d39f934b9f62bcb51cc1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/MODEL_ZOO.md @@ -0,0 +1,903 @@ +# Detectron2 Model Zoo and Baselines + +## Introduction + +This file documents a large collection of baselines trained +with detectron2 in Sep-Oct, 2019. +All numbers were obtained on [Big Basin](https://engineering.fb.com/data-center-engineering/introducing-big-basin-our-next-generation-ai-hardware/) +servers with 8 NVIDIA V100 GPUs & NVLink. The software in use were PyTorch 1.3, CUDA 9.2, cuDNN 7.4.2 or 7.6.3. +You can access these models from code using [detectron2.model_zoo](https://detectron2.readthedocs.io/modules/model_zoo.html) APIs. + +In addition to these official baseline models, you can find more models in [projects/](projects/). + +#### How to Read the Tables +* The "Name" column contains a link to the config file. Running `tools/train_net.py` with this config file + and 8 GPUs will reproduce the model. +* Training speed is averaged across the entire training. + We keep updating the speed with latest version of detectron2/pytorch/etc., + so they might be different from the `metrics` file. + Training speed for multi-machine jobs is not provided. +* Inference speed is measured by `tools/train_net.py --eval-only`, or [inference_on_dataset()](https://detectron2.readthedocs.io/modules/evaluation.html#detectron2.evaluation.inference_on_dataset), + with batch size 1 in detectron2 directly. + Measuring it with your own code will likely introduce other overhead. + Actual deployment in production should in general be faster than the given inference + speed due to more optimizations. +* The *model id* column is provided for ease of reference. + To check downloaded file integrity, any model on this page contains its md5 prefix in its file name. +* Training curves and other statistics can be found in `metrics` for each model. + +#### Common Settings for COCO Models +* All COCO models were trained on `train2017` and evaluated on `val2017`. +* The default settings are __not directly comparable__ with Detectron's standard settings. + For example, our default training data augmentation uses scale jittering in addition to horizontal flipping. + + To make fair comparisons with Detectron's settings, see + [Detectron1-Comparisons](configs/Detectron1-Comparisons/) for accuracy comparison, + and [benchmarks](https://detectron2.readthedocs.io/notes/benchmarks.html) + for speed comparison. +* For Faster/Mask R-CNN, we provide baselines based on __3 different backbone combinations__: + * __FPN__: Use a ResNet+FPN backbone with standard conv and FC heads for mask and box prediction, + respectively. It obtains the best + speed/accuracy tradeoff, but the other two are still useful for research. + * __C4__: Use a ResNet conv4 backbone with conv5 head. The original baseline in the Faster R-CNN paper. + * __DC5__ (Dilated-C5): Use a ResNet conv5 backbone with dilations in conv5, and standard conv and FC heads + for mask and box prediction, respectively. + This is used by the Deformable ConvNet paper. +* Most models are trained with the 3x schedule (~37 COCO epochs). + Although 1x models are heavily under-trained, we provide some ResNet-50 models with the 1x (~12 COCO epochs) + training schedule for comparison when doing quick research iteration. + +#### ImageNet Pretrained Models + +We provide backbone models pretrained on ImageNet-1k dataset. +These models have __different__ format from those provided in Detectron: we do not fuse BatchNorm into an affine layer. +* [R-50.pkl](https://dl.fbaipublicfiles.com/detectron2/ImageNetPretrained/MSRA/R-50.pkl): converted copy of [MSRA's original ResNet-50](https://github.com/KaimingHe/deep-residual-networks) model. +* [R-101.pkl](https://dl.fbaipublicfiles.com/detectron2/ImageNetPretrained/MSRA/R-101.pkl): converted copy of [MSRA's original ResNet-101](https://github.com/KaimingHe/deep-residual-networks) model. +* [X-101-32x8d.pkl](https://dl.fbaipublicfiles.com/detectron2/ImageNetPretrained/FAIR/X-101-32x8d.pkl): ResNeXt-101-32x8d model trained with Caffe2 at FB. + +Pretrained models in Detectron's format can still be used. For example: +* [X-152-32x8d-IN5k.pkl](https://dl.fbaipublicfiles.com/detectron/ImageNetPretrained/25093814/X-152-32x8d-IN5k.pkl): + ResNeXt-152-32x8d model trained on ImageNet-5k with Caffe2 at FB (see ResNeXt paper for details on ImageNet-5k). +* [R-50-GN.pkl](https://dl.fbaipublicfiles.com/detectron/ImageNetPretrained/47261647/R-50-GN.pkl): + ResNet-50 with Group Normalization. +* [R-101-GN.pkl](https://dl.fbaipublicfiles.com/detectron/ImageNetPretrained/47592356/R-101-GN.pkl): + ResNet-101 with Group Normalization. + +Torchvision's ResNet models can be used after converted by [this script](tools/convert-torchvision-to-d2.py). + +#### License + +All models available for download through this document are licensed under the +[Creative Commons Attribution-ShareAlike 3.0 license](https://creativecommons.org/licenses/by-sa/3.0/). + +### COCO Object Detection Baselines + +#### Faster R-CNN: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
model iddownload
R50-C41x0.5510.1024.835.7137257644model | metrics
R50-DC51x0.3800.0685.037.3137847829model | metrics
R50-FPN1x0.2100.0383.037.9137257794model | metrics
R50-C43x0.5430.1044.838.4137849393model | metrics
R50-DC53x0.3780.0705.039.0137849425model | metrics
R50-FPN3x0.2090.0383.040.2137849458model | metrics
R101-C43x0.6190.1395.941.1138204752model | metrics
R101-DC53x0.4520.0866.140.6138204841model | metrics
R101-FPN3x0.2860.0514.142.0137851257model | metrics
X101-FPN3x0.6380.0986.743.0139173657model | metrics
+ +#### RetinaNet: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
model iddownload
R501x0.2000.0553.936.5137593951model | metrics
R503x0.2010.0553.937.9137849486model | metrics
R1013x0.2800.0685.139.9138363263model | metrics
+ +#### RPN & Fast R-CNN: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
prop.
AR
model iddownload
RPN R50-C41x0.1300.0341.551.6137258005model | metrics
RPN R50-FPN1x0.1860.0322.758.0137258492model | metrics
Fast R-CNN R50-FPN1x0.1400.0292.637.8137635226model | metrics
+ +### COCO Instance Segmentation Baselines with Mask R-CNN + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
mask
AP
model iddownload
R50-C41x0.5840.1105.236.832.2137259246model | metrics
R50-DC51x0.4710.0766.538.334.2137260150model | metrics
R50-FPN1x0.2610.0433.438.635.2137260431model | metrics
R50-C43x0.5750.1115.239.834.4137849525model | metrics
R50-DC53x0.4700.0766.540.035.9137849551model | metrics
R50-FPN3x0.2610.0433.441.037.2137849600model | metrics
R101-C43x0.6520.1456.342.636.7138363239model | metrics
R101-DC53x0.5450.0927.641.937.3138363294model | metrics
R101-FPN3x0.3400.0564.642.938.6138205316model | metrics
X101-FPN3x0.6900.1037.244.339.5139653917model | metrics
+ +### COCO Person Keypoint Detection Baselines with Keypoint R-CNN + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
kp.
AP
model iddownload
R50-FPN1x0.3150.0725.053.664.0137261548model | metrics
R50-FPN3x0.3160.0665.055.465.5137849621model | metrics
R101-FPN3x0.3900.0766.156.466.1138363331model | metrics
X101-FPN3x0.7380.1218.757.366.0139686956model | metrics
+ +### COCO Panoptic Segmentation Baselines with Panoptic FPN + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
mask
AP
PQmodel iddownload
R50-FPN1x0.3040.0534.837.634.739.4139514544model | metrics
R50-FPN3x0.3020.0534.840.036.541.5139514569model | metrics
R101-FPN3x0.3920.0666.042.438.543.0139514519model | metrics
+ + +### LVIS Instance Segmentation Baselines with Mask R-CNN + +Mask R-CNN baselines on the [LVIS dataset](https://lvisdataset.org), v0.5. +These baselines are described in Table 3(c) of the [LVIS paper](https://arxiv.org/abs/1908.03195). + +NOTE: the 1x schedule here has the same amount of __iterations__ as the COCO 1x baselines. +They are roughly 24 epochs of LVISv0.5 data. +The final results of these configs have large variance across different runs. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
mask
AP
model iddownload
R50-FPN1x0.2920.1077.123.624.4144219072model | metrics
R101-FPN1x0.3710.1147.825.625.9144219035model | metrics
X101-FPN1x0.7120.15110.226.727.1144219108model | metrics
+ + + +### Cityscapes & Pascal VOC Baselines + +Simple baselines for +* Mask R-CNN on Cityscapes instance segmentation (initialized from COCO pre-training, then trained on Cityscapes fine annotations only) +* Faster R-CNN on PASCAL VOC object detection (trained on VOC 2007 train+val + VOC 2012 train+val, tested on VOC 2007 using 11-point interpolated AP) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Nametrain
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
box
AP50
mask
AP
model iddownload
R50-FPN, Cityscapes0.2400.0784.436.5142423278model | metrics
R50-C4, VOC0.5370.0814.851.980.3142202221model | metrics
+ + + +### Other Settings + +Ablations for Deformable Conv and Cascade R-CNN: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
mask
AP
model iddownload
Baseline R50-FPN1x0.2610.0433.438.635.2137260431model | metrics
Deformable Conv1x0.3420.0483.541.537.5138602867model | metrics
Cascade R-CNN1x0.3170.0524.042.136.4138602847model | metrics
Baseline R50-FPN3x0.2610.0433.441.037.2137849600model | metrics
Deformable Conv3x0.3490.0473.542.738.5144998336model | metrics
Cascade R-CNN3x0.3280.0534.044.338.5144998488model | metrics
+ + +Ablations for normalization methods, and a few models trained from scratch following [Rethinking ImageNet Pre-training](https://arxiv.org/abs/1811.08883). +(Note: The baseline uses `2fc` head while the others use [`4conv1fc` head](https://arxiv.org/abs/1803.08494)) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
mask
AP
model iddownload
Baseline R50-FPN3x0.2610.0433.441.037.2137849600model | metrics
GN3x0.3560.0697.342.638.6138602888model | metrics
SyncBN3x0.3710.0535.541.937.8169527823model | metrics
GN (from scratch)3x0.4000.0699.839.936.6138602908model | metrics
GN (from scratch)9xN/A0.0709.843.739.6183808979model | metrics
SyncBN (from scratch)9xN/A0.0557.243.639.3184226666model | metrics
+ + +A few very large models trained for a long time, for demo purposes. They are trained using multiple machines: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Nameinference
time
(s/im)
train
mem
(GB)
box
AP
mask
AP
PQmodel iddownload
Panoptic FPN R1010.10711.447.441.346.1139797668model | metrics
Mask R-CNN X1520.24215.150.244.018131413model | metrics
above + test-time aug.51.945.9
diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1fbb95b39ce9e9c0eab83079319a9298fca438b1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/README.md @@ -0,0 +1,56 @@ + + +Detectron2 is Facebook AI Research's next generation software system +that implements state-of-the-art object detection algorithms. +It is a ground-up rewrite of the previous version, +[Detectron](https://github.com/facebookresearch/Detectron/), +and it originates from [maskrcnn-benchmark](https://github.com/facebookresearch/maskrcnn-benchmark/). + +
+ +
+ +### What's New +* It is powered by the [PyTorch](https://pytorch.org) deep learning framework. +* Includes more features such as panoptic segmentation, densepose, Cascade R-CNN, rotated bounding boxes, etc. +* Can be used as a library to support [different projects](projects/) on top of it. + We'll open source more research projects in this way. +* It [trains much faster](https://detectron2.readthedocs.io/notes/benchmarks.html). + +See our [blog post](https://ai.facebook.com/blog/-detectron2-a-pytorch-based-modular-object-detection-library-/) +to see more demos and learn about detectron2. + +## Installation + +See [INSTALL.md](INSTALL.md). + +## Quick Start + +See [GETTING_STARTED.md](GETTING_STARTED.md), +or the [Colab Notebook](https://colab.research.google.com/drive/16jcaJoc6bCFAQ96jDe2HwtXj7BMD_-m5). + +Learn more at our [documentation](https://detectron2.readthedocs.org). +And see [projects/](projects/) for some projects that are built on top of detectron2. + +## Model Zoo and Baselines + +We provide a large set of baseline results and trained models available for download in the [Detectron2 Model Zoo](MODEL_ZOO.md). + + +## License + +Detectron2 is released under the [Apache 2.0 license](LICENSE). + +## Citing Detectron2 + +If you use Detectron2 in your research or wish to refer to the baseline results published in the [Model Zoo](MODEL_ZOO.md), please use the following BibTeX entry. + +```BibTeX +@misc{wu2019detectron2, + author = {Yuxin Wu and Alexander Kirillov and Francisco Massa and + Wan-Yen Lo and Ross Girshick}, + title = {Detectron2}, + howpublished = {\url{https://github.com/facebookresearch/detectron2}}, + year = {2019} +} +``` diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Base-RCNN-C4.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Base-RCNN-C4.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fbf34a0ea57a587e09997edd94c4012d69d0b6ad --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Base-RCNN-C4.yaml @@ -0,0 +1,18 @@ +MODEL: + META_ARCHITECTURE: "GeneralizedRCNN" + RPN: + PRE_NMS_TOPK_TEST: 6000 + POST_NMS_TOPK_TEST: 1000 + ROI_HEADS: + NAME: "Res5ROIHeads" +DATASETS: + TRAIN: ("coco_2017_train",) + TEST: ("coco_2017_val",) +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.02 + STEPS: (60000, 80000) + MAX_ITER: 90000 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +VERSION: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Base-RCNN-DilatedC5.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Base-RCNN-DilatedC5.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c0d6d16bdaf532f09e4976f0aa240a49e748da27 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Base-RCNN-DilatedC5.yaml @@ -0,0 +1,31 @@ +MODEL: + META_ARCHITECTURE: "GeneralizedRCNN" + RESNETS: + OUT_FEATURES: ["res5"] + RES5_DILATION: 2 + RPN: + IN_FEATURES: ["res5"] + PRE_NMS_TOPK_TEST: 6000 + POST_NMS_TOPK_TEST: 1000 + ROI_HEADS: + NAME: "StandardROIHeads" + IN_FEATURES: ["res5"] + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_FC: 2 + POOLER_RESOLUTION: 7 + ROI_MASK_HEAD: + NAME: "MaskRCNNConvUpsampleHead" + NUM_CONV: 4 + POOLER_RESOLUTION: 14 +DATASETS: + TRAIN: ("coco_2017_train",) + TEST: ("coco_2017_val",) +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.02 + STEPS: (60000, 80000) + MAX_ITER: 90000 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +VERSION: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Base-RCNN-FPN.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Base-RCNN-FPN.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3e020f2e7b2f26765be317f907126a1556621abf --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Base-RCNN-FPN.yaml @@ -0,0 +1,42 @@ +MODEL: + META_ARCHITECTURE: "GeneralizedRCNN" + BACKBONE: + NAME: "build_resnet_fpn_backbone" + RESNETS: + OUT_FEATURES: ["res2", "res3", "res4", "res5"] + FPN: + IN_FEATURES: ["res2", "res3", "res4", "res5"] + ANCHOR_GENERATOR: + SIZES: [[32], [64], [128], [256], [512]] # One size for each in feature map + ASPECT_RATIOS: [[0.5, 1.0, 2.0]] # Three aspect ratios (same for all in feature maps) + RPN: + IN_FEATURES: ["p2", "p3", "p4", "p5", "p6"] + PRE_NMS_TOPK_TRAIN: 2000 # Per FPN level + PRE_NMS_TOPK_TEST: 1000 # Per FPN level + # Detectron1 uses 2000 proposals per-batch, + # (See "modeling/rpn/rpn_outputs.py" for details of this legacy issue) + # which is approximately 1000 proposals per-image since the default batch size for FPN is 2. + POST_NMS_TOPK_TRAIN: 1000 + POST_NMS_TOPK_TEST: 1000 + ROI_HEADS: + NAME: "StandardROIHeads" + IN_FEATURES: ["p2", "p3", "p4", "p5"] + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_FC: 2 + POOLER_RESOLUTION: 7 + ROI_MASK_HEAD: + NAME: "MaskRCNNConvUpsampleHead" + NUM_CONV: 4 + POOLER_RESOLUTION: 14 +DATASETS: + TRAIN: ("coco_2017_train",) + TEST: ("coco_2017_val",) +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.02 + STEPS: (60000, 80000) + MAX_ITER: 90000 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +VERSION: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Base-RetinaNet.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Base-RetinaNet.yaml new file mode 100644 index 0000000000000000000000000000000000000000..12ec9d2fc20cc0438f17bde2c5f6fbee9496c1b0 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Base-RetinaNet.yaml @@ -0,0 +1,24 @@ +MODEL: + META_ARCHITECTURE: "RetinaNet" + BACKBONE: + NAME: "build_retinanet_resnet_fpn_backbone" + RESNETS: + OUT_FEATURES: ["res3", "res4", "res5"] + ANCHOR_GENERATOR: + SIZES: !!python/object/apply:eval ["[[x, x * 2**(1.0/3), x * 2**(2.0/3) ] for x in [32, 64, 128, 256, 512 ]]"] + FPN: + IN_FEATURES: ["res3", "res4", "res5"] + RETINANET: + IOU_THRESHOLDS: [0.4, 0.5] + IOU_LABELS: [0, -1, 1] +DATASETS: + TRAIN: ("coco_2017_train",) + TEST: ("coco_2017_val",) +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.01 # Note that RetinaNet uses a different default learning rate + STEPS: (60000, 80000) + MAX_ITER: 90000 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +VERSION: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..773ac10e87c626760d00d831bf664ce9ff073c49 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,17 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + LOAD_PROPOSALS: True + RESNETS: + DEPTH: 50 + PROPOSAL_GENERATOR: + NAME: "PrecomputedProposals" +DATASETS: + TRAIN: ("coco_2017_train",) + PROPOSAL_FILES_TRAIN: ("detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/coco_2017_train_box_proposals_21bc3a.pkl", ) + TEST: ("coco_2017_val",) + PROPOSAL_FILES_TEST: ("detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/coco_2017_val_box_proposals_ee0dad.pkl", ) +DATALOADER: + # proposals are part of the dataset_dicts, and take a lot of RAM + NUM_WORKERS: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_101_C4_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_101_C4_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..db142cd671c1841b4f64cf130bee7f7954ecdd28 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_101_C4_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: False + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_101_DC5_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_101_DC5_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..bceb6b343618d8cd9a6c414ff9eb86ab31cc230a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_101_DC5_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-DilatedC5.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: False + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_101_FPN_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_101_FPN_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..57a098f53ee8c54ecfa354cc96efefd890dc1b72 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_101_FPN_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: False + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_C4_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_C4_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f96130105c3ba6ab393e0932870903875f5cb732 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_C4_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_C4_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_C4_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..bc51bce390a85ee3529ffdcebde05748e1646be0 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_C4_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_DC5_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_DC5_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0fe96f57febdac5790ea4cec168fa4b97ac4807a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_DC5_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "../Base-RCNN-DilatedC5.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_DC5_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_DC5_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..33fadeb87d1ef67ab2b55926b9a652ab4ac4a27d --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_DC5_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-DilatedC5.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3262019a1211b910d3b371569199ed1afaacf6a4 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..41395182bf5c9dd8ab1241c4414068817298d554 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9c9b5ab77157baa581d90d9847c045c19ed6ffa3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x.yaml @@ -0,0 +1,13 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + MASK_ON: False + WEIGHTS: "detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl" + PIXEL_STD: [57.375, 57.120, 58.395] + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/retinanet_R_101_FPN_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/retinanet_R_101_FPN_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4abb1b9a547957aa6afc0b29129e00f89cf98d59 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/retinanet_R_101_FPN_3x.yaml @@ -0,0 +1,8 @@ +_BASE_: "../Base-RetinaNet.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/retinanet_R_50_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/retinanet_R_50_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4a24ce3a9a108a8792e18c8aabfb7b712f0d3725 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/retinanet_R_50_FPN_1x.yaml @@ -0,0 +1,5 @@ +_BASE_: "../Base-RetinaNet.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/retinanet_R_50_FPN_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/retinanet_R_50_FPN_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3b5412d4a7aef1d6c3f7c1e34f94007de639b833 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/retinanet_R_50_FPN_3x.yaml @@ -0,0 +1,8 @@ +_BASE_: "../Base-RetinaNet.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/rpn_R_50_C4_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/rpn_R_50_C4_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e04821156b0376ba5215d5ce5b7010a36b43e6a1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/rpn_R_50_C4_1x.yaml @@ -0,0 +1,10 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + META_ARCHITECTURE: "ProposalNetwork" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 + RPN: + PRE_NMS_TOPK_TEST: 12000 + POST_NMS_TOPK_TEST: 2000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/rpn_R_50_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/rpn_R_50_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..dc9c95203b1c3c9cd9bb9876bb8d9a5dd9b31d9a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Detection/rpn_R_50_FPN_1x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "ProposalNetwork" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 + RPN: + POST_NMS_TOPK_TEST: 2000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_C4_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_C4_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1a94cc45a0f2aaa8c92e14871c553b736545e327 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_C4_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: True + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_DC5_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_DC5_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..67b70cf4be8c19f5dc735b6f55a8690698f34b69 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_DC5_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-DilatedC5.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: True + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_FPN_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_FPN_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1935a302d2d0fa7f69553b3fd50b5a7082c6c0d1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_FPN_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: True + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a9aeb4eac38026dbb867e799f9fd3a8d8eb3af80 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..38ed867d897dfec839cbcf11a2e2dc8abb92f07c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b13eefab2a049c48d94d5051c82ceb6dbde40579 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "../Base-RCNN-DilatedC5.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d401016358f967f6619d88b1c9bd5673a1cdeba8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-DilatedC5.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d50fb866ca7811a87b42555c7213f88e00bf6df1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..be7d06b8e0f032ee7fcaabd7c122158518489fd2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d14c63f74383bfc308750f51d51344398b02a239 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml @@ -0,0 +1,13 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + MASK_ON: True + WEIGHTS: "detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl" + PIXEL_STD: [57.375, 57.120, 58.395] + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/Base-Keypoint-RCNN-FPN.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/Base-Keypoint-RCNN-FPN.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4e03944a42d2e497da5ceca17c8fda797dac3f82 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/Base-Keypoint-RCNN-FPN.yaml @@ -0,0 +1,15 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + KEYPOINT_ON: True + ROI_HEADS: + NUM_CLASSES: 1 + ROI_BOX_HEAD: + SMOOTH_L1_BETA: 0.5 # Keypoint AP degrades (though box AP improves) when using plain L1 loss + RPN: + # Detectron1 uses 2000 proposals per-batch, but this option is per-image in detectron2. + # 1000 proposals per-image is found to hurt box AP. + # Therefore we increase it to 1500 per-image. + POST_NMS_TOPK_TRAIN: 1500 +DATASETS: + TRAIN: ("keypoints_coco_2017_train",) + TEST: ("keypoints_coco_2017_val",) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/keypoint_rcnn_R_101_FPN_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/keypoint_rcnn_R_101_FPN_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9309535c57a1aa7d23297aac80a9bd78a6c79fcc --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/keypoint_rcnn_R_101_FPN_3x.yaml @@ -0,0 +1,8 @@ +_BASE_: "Base-Keypoint-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7bf85cf745b53b3e7ab28fe94b7f4f9e7fe6e335 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,5 @@ +_BASE_: "Base-Keypoint-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a07f243f650a497b9372501e3face75194cf0941 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml @@ -0,0 +1,8 @@ +_BASE_: "Base-Keypoint-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/keypoint_rcnn_X_101_32x8d_FPN_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/keypoint_rcnn_X_101_32x8d_FPN_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d4bfa20a98c0a65c6bd60e93b07e8f4b7d92a867 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-Keypoints/keypoint_rcnn_X_101_32x8d_FPN_3x.yaml @@ -0,0 +1,12 @@ +_BASE_: "Base-Keypoint-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl" + PIXEL_STD: [57.375, 57.120, 58.395] + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-PanopticSegmentation/Base-Panoptic-FPN.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-PanopticSegmentation/Base-Panoptic-FPN.yaml new file mode 100644 index 0000000000000000000000000000000000000000..755c12018c5db8ca456d5e7fa8cbd18d90f97527 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-PanopticSegmentation/Base-Panoptic-FPN.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "PanopticFPN" + MASK_ON: True + SEM_SEG_HEAD: + LOSS_WEIGHT: 0.5 +DATASETS: + TRAIN: ("coco_2017_train_panoptic_separated",) + TEST: ("coco_2017_val_panoptic_separated",) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-PanopticSegmentation/panoptic_fpn_R_101_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-PanopticSegmentation/panoptic_fpn_R_101_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0e01f6fb31e9b00b1857b7de3b5074184d1f4a21 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-PanopticSegmentation/panoptic_fpn_R_101_3x.yaml @@ -0,0 +1,8 @@ +_BASE_: "Base-Panoptic-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6afa2c1cc92495309ed1553a17359fe5d7d6566e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.yaml @@ -0,0 +1,5 @@ +_BASE_: "Base-Panoptic-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b956b3f673e78649184fe2c50e2700b3f1f14794 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml @@ -0,0 +1,8 @@ +_BASE_: "Base-Panoptic-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Cityscapes/mask_rcnn_R_50_FPN.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Cityscapes/mask_rcnn_R_50_FPN.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1a7aaeb961581ed9492c4cfe5a69a1eb60495b3e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Cityscapes/mask_rcnn_R_50_FPN.yaml @@ -0,0 +1,27 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + # WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + # For better, more stable performance initialize from COCO + WEIGHTS: "detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl" + MASK_ON: True + ROI_HEADS: + NUM_CLASSES: 8 +# This is similar to the setting used in Mask R-CNN paper, Appendix A +# But there are some differences, e.g., we did not initialize the output +# layer using the corresponding classes from COCO +INPUT: + MIN_SIZE_TRAIN: (800, 832, 864, 896, 928, 960, 992, 1024) + MIN_SIZE_TRAIN_SAMPLING: "choice" + MIN_SIZE_TEST: 1024 + MAX_SIZE_TRAIN: 2048 + MAX_SIZE_TEST: 2048 +DATASETS: + TRAIN: ("cityscapes_fine_instance_seg_train",) + TEST: ("cityscapes_fine_instance_seg_val",) +SOLVER: + BASE_LR: 0.01 + STEPS: (18000,) + MAX_ITER: 24000 + IMS_PER_BATCH: 8 +TEST: + EVAL_PERIOD: 8000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Detectron1-Comparisons/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Detectron1-Comparisons/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a90ed9e433a00b8b9f43961d7a2696d5b9013127 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Detectron1-Comparisons/README.md @@ -0,0 +1,83 @@ + +Detectron2 model zoo's experimental settings and a few implementation details are different from Detectron. + +The differences in implementation details are shared in +[Compatibility with Other Libraries](../../docs/notes/compatibility.md). + +The differences in model zoo's experimental settings include: +* Use scale augmentation during training. This improves AP with lower training cost. +* Use L1 loss instead of smooth L1 loss for simplicity. This sometimes improves box AP but may + affect other AP. +* Use `POOLER_SAMPLING_RATIO=0` instead of 2. This does not significantly affect AP. +* Use `ROIAlignV2`. This does not significantly affect AP. + +In this directory, we provide a few configs that __do not__ have the above changes. +They mimic Detectron's behavior as close as possible, +and provide a fair comparison of accuracy and speed against Detectron. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
mask
AP
kp.
AP
model iddownload
Faster R-CNN1x0.2190.0383.136.9137781054model | metrics
Keypoint R-CNN1x0.3130.0715.053.164.2137781195model | metrics
Mask R-CNN1x0.2730.0433.437.834.9137781281model | metrics
+ +## Comparisons: + +* Faster R-CNN: Detectron's AP is 36.7, similar to ours. +* Keypoint R-CNN: Detectron's AP is box 53.6, keypoint 64.2. Fixing a Detectron's + [bug](https://github.com/facebookresearch/Detectron/issues/459) lead to a drop in box AP, and can be + compensated back by some parameter tuning. +* Mask R-CNN: Detectron's AP is box 37.7, mask 33.9. We're 1 AP better in mask AP, due to more correct implementation. + +For speed comparison, see [benchmarks](https://detectron2.readthedocs.io/notes/benchmarks.html). diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6ce77f137fa2c4e5254a62b58c18b8b76096f2aa --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x.yaml @@ -0,0 +1,17 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 + # Detectron1 uses smooth L1 loss with some magic beta values. + # The defaults are changed to L1 loss in Detectron2. + RPN: + SMOOTH_L1_BETA: 0.1111 + ROI_BOX_HEAD: + SMOOTH_L1_BETA: 1.0 + POOLER_SAMPLING_RATIO: 2 + POOLER_TYPE: "ROIAlign" +INPUT: + # no scale augmentation + MIN_SIZE_TRAIN: (800, ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..aacf868ba5290c752031c130a2081af48afc0808 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,27 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + KEYPOINT_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + NUM_CLASSES: 1 + ROI_KEYPOINT_HEAD: + POOLER_RESOLUTION: 14 + POOLER_SAMPLING_RATIO: 2 + POOLER_TYPE: "ROIAlign" + # Detectron1 uses smooth L1 loss with some magic beta values. + # The defaults are changed to L1 loss in Detectron2. + ROI_BOX_HEAD: + SMOOTH_L1_BETA: 1.0 + POOLER_SAMPLING_RATIO: 2 + POOLER_TYPE: "ROIAlign" + RPN: + SMOOTH_L1_BETA: 0.1111 + # Detectron1 uses 2000 proposals per-batch, but this option is per-image in detectron2 + # 1000 proposals per-image is found to hurt box AP. + # Therefore we increase it to 1500 per-image. + POST_NMS_TOPK_TRAIN: 1500 +DATASETS: + TRAIN: ("keypoints_coco_2017_train",) + TEST: ("keypoints_coco_2017_val",) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4ea86a8d8e2cd3e51cbc7311b0d00710c07d01f6 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml @@ -0,0 +1,20 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + # Detectron1 uses smooth L1 loss with some magic beta values. + # The defaults are changed to L1 loss in Detectron2. + RPN: + SMOOTH_L1_BETA: 0.1111 + ROI_BOX_HEAD: + SMOOTH_L1_BETA: 1.0 + POOLER_SAMPLING_RATIO: 2 + POOLER_TYPE: "ROIAlign" + ROI_MASK_HEAD: + POOLER_SAMPLING_RATIO: 2 + POOLER_TYPE: "ROIAlign" +INPUT: + # no scale augmentation + MIN_SIZE_TRAIN: (800, ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/LVIS-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/LVIS-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f0c3a1bbc0a09e1384de522f30c443ba1e36fafa --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/LVIS-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml @@ -0,0 +1,19 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: True + RESNETS: + DEPTH: 101 + ROI_HEADS: + NUM_CLASSES: 1230 + SCORE_THRESH_TEST: 0.0001 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +DATASETS: + TRAIN: ("lvis_v0.5_train",) + TEST: ("lvis_v0.5_val",) +TEST: + DETECTIONS_PER_IMAGE: 300 # LVIS allows up to 300 +DATALOADER: + SAMPLER_TRAIN: "RepeatFactorTrainingSampler" + REPEAT_THRESHOLD: 0.001 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/LVIS-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/LVIS-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..64b4caa4ef2b284782367ea702e1ae6653472630 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/LVIS-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,19 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + NUM_CLASSES: 1230 + SCORE_THRESH_TEST: 0.0001 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +DATASETS: + TRAIN: ("lvis_v0.5_train",) + TEST: ("lvis_v0.5_val",) +TEST: + DETECTIONS_PER_IMAGE: 300 # LVIS allows up to 300 +DATALOADER: + SAMPLER_TRAIN: "RepeatFactorTrainingSampler" + REPEAT_THRESHOLD: 0.001 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/LVIS-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/LVIS-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c8b822c6c006ba642f4caf9b55e7983f6797427a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/LVIS-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml @@ -0,0 +1,23 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl" + PIXEL_STD: [57.375, 57.120, 58.395] + MASK_ON: True + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 101 + ROI_HEADS: + NUM_CLASSES: 1230 + SCORE_THRESH_TEST: 0.0001 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +DATASETS: + TRAIN: ("lvis_v0.5_train",) + TEST: ("lvis_v0.5_val",) +TEST: + DETECTIONS_PER_IMAGE: 300 # LVIS allows up to 300 +DATALOADER: + SAMPLER_TRAIN: "RepeatFactorTrainingSampler" + REPEAT_THRESHOLD: 0.001 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/cascade_mask_rcnn_R_50_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/cascade_mask_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..abb33b618932e94b66239945ac892f4c84a6e8f8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/cascade_mask_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,12 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + NAME: CascadeROIHeads + ROI_BOX_HEAD: + CLS_AGNOSTIC_BBOX_REG: True + RPN: + POST_NMS_TOPK_TRAIN: 2000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e2201ad5c46ded91ccfa47b7698a521625c5e447 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml @@ -0,0 +1,15 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + NAME: CascadeROIHeads + ROI_BOX_HEAD: + CLS_AGNOSTIC_BBOX_REG: True + RPN: + POST_NMS_TOPK_TRAIN: 2000 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fc117f6b5e3e51558ec2f01b73c5365622e5ce25 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml @@ -0,0 +1,36 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + MASK_ON: True + WEIGHTS: "catalog://ImageNetPretrained/FAIR/X-152-32x8d-IN5k" + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 152 + DEFORM_ON_PER_STAGE: [False, True, True, True] + ROI_HEADS: + NAME: "CascadeROIHeads" + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_CONV: 4 + NUM_FC: 1 + NORM: "GN" + CLS_AGNOSTIC_BBOX_REG: True + ROI_MASK_HEAD: + NUM_CONV: 8 + NORM: "GN" + RPN: + POST_NMS_TOPK_TRAIN: 2000 +SOLVER: + IMS_PER_BATCH: 128 + STEPS: (35000, 45000) + MAX_ITER: 50000 + BASE_LR: 0.16 +INPUT: + MIN_SIZE_TRAIN: (640, 864) + MIN_SIZE_TRAIN_SAMPLING: "range" + MAX_SIZE_TRAIN: 1440 + CROP: + ENABLED: True +TEST: + EVAL_PERIOD: 2500 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv_parsing.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv_parsing.yaml new file mode 100644 index 0000000000000000000000000000000000000000..544f58f620607ba6eb592593a2f85243c8670451 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv_parsing.yaml @@ -0,0 +1,42 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + MASK_ON: True +# WEIGHTS: "catalog://ImageNetPretrained/FAIR/X-152-32x8d-IN5k" + WEIGHTS: "model_0039999_e76410.pkl" + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 152 + DEFORM_ON_PER_STAGE: [False, True, True, True] + ROI_HEADS: + NAME: "CascadeROIHeads" + NUM_CLASSES: 1 + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_CONV: 4 + NUM_FC: 1 + NORM: "GN" + CLS_AGNOSTIC_BBOX_REG: True + ROI_MASK_HEAD: + NUM_CONV: 8 + NORM: "GN" + RPN: + POST_NMS_TOPK_TRAIN: 2000 +SOLVER: +# IMS_PER_BATCH: 128 + IMS_PER_BATCH: 1 + STEPS: (35000, 45000) + MAX_ITER: 50000 + BASE_LR: 0.16 +INPUT: + MIN_SIZE_TRAIN: (640, 864) + MIN_SIZE_TRAIN_SAMPLING: "range" + MAX_SIZE_TRAIN: 1440 + CROP: + ENABLED: True +TEST: + EVAL_PERIOD: 2500 +DATASETS: + TRAIN: ("CIHP_train","VIP_trainval") + TEST: ("CIHP_val",) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/demo.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/demo.yaml new file mode 100644 index 0000000000000000000000000000000000000000..bbf9685f5921c7aa1c967b4e7da88aaf061a72e2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/demo.yaml @@ -0,0 +1,25 @@ +_BASE_: "cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml" +MODEL: + MASK_ON: True + ROI_HEADS: + NMS_THRESH_TEST: 0.95 + SCORE_THRESH_TEST: 0.5 + NUM_CLASSES: 1 +SOLVER: + IMS_PER_BATCH: 1 + STEPS: (30000, 45000) + MAX_ITER: 50000 + BASE_LR: 0.02 +INPUT: + MIN_SIZE_TRAIN: (640, 864) + MIN_SIZE_TRAIN_SAMPLING: "range" + MAX_SIZE_TRAIN: 1440 + CROP: + ENABLED: True +TEST: + AUG: + ENABLED: True +DATASETS: + TRAIN: ("demo_train",) + TEST: ("demo_val",) +OUTPUT_DIR: "../../data/DemoDataset/detectron2_prediction" diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_1x_cls_agnostic.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_1x_cls_agnostic.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4c3b767ff473bbab7225cc8a4a92608543d78246 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_1x_cls_agnostic.yaml @@ -0,0 +1,10 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + ROI_BOX_HEAD: + CLS_AGNOSTIC_BBOX_REG: True + ROI_MASK_HEAD: + CLS_AGNOSTIC_MASK: True diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_1x_dconv_c3-c5.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_1x_dconv_c3-c5.yaml new file mode 100644 index 0000000000000000000000000000000000000000..04ff988d073ef9169ee4ca2cbce0d6f030c15232 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_1x_dconv_c3-c5.yaml @@ -0,0 +1,8 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + DEFORM_ON_PER_STAGE: [False, True, True, True] # on Res3,Res4,Res5 + DEFORM_MODULATED: False diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_3x_dconv_c3-c5.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_3x_dconv_c3-c5.yaml new file mode 100644 index 0000000000000000000000000000000000000000..68c0ca58d7df97ca728c339da0ca9828fe6be318 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_3x_dconv_c3-c5.yaml @@ -0,0 +1,11 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + DEFORM_ON_PER_STAGE: [False, True, True, True] # on Res3,Res4,Res5 + DEFORM_MODULATED: False +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_3x_gn.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_3x_gn.yaml new file mode 100644 index 0000000000000000000000000000000000000000..74d274e5a529b5a8afe186940868f9d48c6112b3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_3x_gn.yaml @@ -0,0 +1,21 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "catalog://ImageNetPretrained/FAIR/R-50-GN" + MASK_ON: True + RESNETS: + DEPTH: 50 + NORM: "GN" + STRIDE_IN_1X1: False + FPN: + NORM: "GN" + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_CONV: 4 + NUM_FC: 1 + NORM: "GN" + ROI_MASK_HEAD: + NORM: "GN" +SOLVER: + # 3x schedule + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_3x_syncbn.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_3x_syncbn.yaml new file mode 100644 index 0000000000000000000000000000000000000000..11ebb076ba529f26c71a0d972e96ca4c2d6a830b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/mask_rcnn_R_50_FPN_3x_syncbn.yaml @@ -0,0 +1,24 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + NORM: "SyncBN" + STRIDE_IN_1X1: True + FPN: + NORM: "SyncBN" + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_CONV: 4 + NUM_FC: 1 + NORM: "SyncBN" + ROI_MASK_HEAD: + NORM: "SyncBN" +SOLVER: + # 3x schedule + STEPS: (210000, 250000) + MAX_ITER: 270000 +TEST: + PRECISE_BN: + ENABLED: True diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/panoptic_fpn_R_101_dconv_cascade_gn_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/panoptic_fpn_R_101_dconv_cascade_gn_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..34016cea3ca9d7fb69ef4fe01d6b47ee8690a13b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/panoptic_fpn_R_101_dconv_cascade_gn_3x.yaml @@ -0,0 +1,26 @@ +# A large PanopticFPN for demo purposes. +# Use GN on backbone to support semantic seg. +# Use Cascade + Deform Conv to improve localization. +_BASE_: "../COCO-PanopticSegmentation/Base-Panoptic-FPN.yaml" +MODEL: + WEIGHTS: "catalog://ImageNetPretrained/FAIR/R-101-GN" + RESNETS: + DEPTH: 101 + NORM: "GN" + DEFORM_ON_PER_STAGE: [False, True, True, True] + STRIDE_IN_1X1: False + FPN: + NORM: "GN" + ROI_HEADS: + NAME: CascadeROIHeads + ROI_BOX_HEAD: + CLS_AGNOSTIC_BBOX_REG: True + ROI_MASK_HEAD: + NORM: "GN" + RPN: + POST_NMS_TOPK_TRAIN: 2000 +SOLVER: + STEPS: (105000, 125000) + MAX_ITER: 135000 + IMS_PER_BATCH: 32 + BASE_LR: 0.04 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/parsing_finetune_cihp.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/parsing_finetune_cihp.yaml new file mode 100644 index 0000000000000000000000000000000000000000..766f46aa0cd3a80efb330052bdb695bebb5efb7d --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/parsing_finetune_cihp.yaml @@ -0,0 +1,24 @@ +_BASE_: "cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml" +MODEL: + MASK_ON: True + WEIGHTS: "model_0039999_e76410.pkl" + ROI_HEADS: + NUM_CLASSES: 1 +SOLVER: + IMS_PER_BATCH: 16 + STEPS: (140000, 180000) + MAX_ITER: 200000 + BASE_LR: 0.02 +INPUT: + MIN_SIZE_TRAIN: (640, 864) + MIN_SIZE_TRAIN_SAMPLING: "range" + MAX_SIZE_TRAIN: 1440 + CROP: + ENABLED: True +TEST: + EVAL_PERIOD: 0 +DATASETS: + TRAIN: ("CIHP_train") + TEST: ("CIHP_val",) +OUTPUT_DIR: "./finetune_output" + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/parsing_inference.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/parsing_inference.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d6a529b1eff2ddf553b1ba32f7b65172f03fae1f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/parsing_inference.yaml @@ -0,0 +1,26 @@ +_BASE_: "cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml" +MODEL: + MASK_ON: True + WEIGHTS: "./finetune_ouput/model_final.pth" + ROI_HEADS: + NMS_THRESH_TEST: 0.95 + SCORE_THRESH_TEST: 0.5 + NUM_CLASSES: 1 +SOLVER: + IMS_PER_BATCH: 1 + STEPS: (30000, 45000) + MAX_ITER: 50000 + BASE_LR: 0.02 +INPUT: + MIN_SIZE_TRAIN: (640, 864) + MIN_SIZE_TRAIN_SAMPLING: "range" + MAX_SIZE_TRAIN: 1440 + CROP: + ENABLED: True +TEST: + AUG: + ENABLED: True +DATASETS: + TRAIN: ("CIHP_trainval",) + TEST: ("CIHP_test",) +OUTPUT_DIR: "./inference_output" diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f3400288cde242fcf66eef7f63b5a9165ca663c5 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml @@ -0,0 +1,13 @@ +_BASE_: "mask_rcnn_R_50_FPN_3x_gn.yaml" +MODEL: + # Train from random initialization. + WEIGHTS: "" + # It makes sense to divide by STD when training from scratch + # But it seems to make no difference on the results and C2's models didn't do this. + # So we keep things consistent with C2. + # PIXEL_STD: [57.375, 57.12, 58.395] + MASK_ON: True + BACKBONE: + FREEZE_AT: 0 +# NOTE: Please refer to Rethinking ImageNet Pre-training https://arxiv.org/abs/1811.08883 +# to learn what you need for training from scratch. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_gn.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_gn.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d90c9ff0ef4573252ee165b4c958ec5f74178176 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_gn.yaml @@ -0,0 +1,19 @@ +_BASE_: "mask_rcnn_R_50_FPN_3x_gn.yaml" +MODEL: + PIXEL_STD: [57.375, 57.12, 58.395] + WEIGHTS: "" + MASK_ON: True + RESNETS: + STRIDE_IN_1X1: False + BACKBONE: + FREEZE_AT: 0 +SOLVER: + # 9x schedule + IMS_PER_BATCH: 64 # 4x the standard + STEPS: (187500, 197500) # last 60/4==15k and last 20/4==5k + MAX_ITER: 202500 # 90k * 9 / 4 + BASE_LR: 0.08 +TEST: + EVAL_PERIOD: 2500 +# NOTE: Please refer to Rethinking ImageNet Pre-training https://arxiv.org/abs/1811.08883 +# to learn what you need for training from scratch. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_syncbn.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_syncbn.yaml new file mode 100644 index 0000000000000000000000000000000000000000..60d4e42330e396a1901437df8e17b262d5ad547a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_syncbn.yaml @@ -0,0 +1,19 @@ +_BASE_: "mask_rcnn_R_50_FPN_3x_syncbn.yaml" +MODEL: + PIXEL_STD: [57.375, 57.12, 58.395] + WEIGHTS: "" + MASK_ON: True + RESNETS: + STRIDE_IN_1X1: False + BACKBONE: + FREEZE_AT: 0 +SOLVER: + # 9x schedule + IMS_PER_BATCH: 64 # 4x the standard + STEPS: (187500, 197500) # last 60/4==15k and last 20/4==5k + MAX_ITER: 202500 # 90k * 9 / 4 + BASE_LR: 0.08 +TEST: + EVAL_PERIOD: 2500 +# NOTE: Please refer to Rethinking ImageNet Pre-training https://arxiv.org/abs/1811.08883 +# to learn what you need for training from scratch. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/semantic_R_50_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/semantic_R_50_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ac256e1372770ab3d9ae522c962de0fd0dbceeb5 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/Misc/semantic_R_50_FPN_1x.yaml @@ -0,0 +1,11 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "SemanticSegmentor" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +DATASETS: + TRAIN: ("coco_2017_train_panoptic_stuffonly",) + TEST: ("coco_2017_val_panoptic_stuffonly",) +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/PascalVOC-Detection/faster_rcnn_R_50_C4.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/PascalVOC-Detection/faster_rcnn_R_50_C4.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ea2a6baaebd1a186db18f2904430ffb25901898e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/PascalVOC-Detection/faster_rcnn_R_50_C4.yaml @@ -0,0 +1,18 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 + ROI_HEADS: + NUM_CLASSES: 20 +INPUT: + MIN_SIZE_TRAIN: (480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800) + MIN_SIZE_TEST: 800 +DATASETS: + TRAIN: ('voc_2007_trainval', 'voc_2012_trainval') + TEST: ('voc_2007_test',) +SOLVER: + STEPS: (12000, 16000) + MAX_ITER: 18000 # 17.4 epochs + WARMUP_ITERS: 100 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/PascalVOC-Detection/faster_rcnn_R_50_FPN.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/PascalVOC-Detection/faster_rcnn_R_50_FPN.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e554cab18a358a27b630c1ab0c2359666b0e1514 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/PascalVOC-Detection/faster_rcnn_R_50_FPN.yaml @@ -0,0 +1,18 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 + ROI_HEADS: + NUM_CLASSES: 20 +INPUT: + MIN_SIZE_TRAIN: (480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800) + MIN_SIZE_TEST: 800 +DATASETS: + TRAIN: ('voc_2007_trainval', 'voc_2012_trainval') + TEST: ('voc_2007_test',) +SOLVER: + STEPS: (12000, 16000) + MAX_ITER: 18000 # 17.4 epochs + WARMUP_ITERS: 100 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/my_Base-RCNN-FPN.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/my_Base-RCNN-FPN.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d649eed7f333dfb07d7a096c6267dc0066e847c1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/my_Base-RCNN-FPN.yaml @@ -0,0 +1,42 @@ +MODEL: + META_ARCHITECTURE: "GeneralizedRCNN" + BACKBONE: + NAME: "build_resnet_fpn_backbone" + RESNETS: + OUT_FEATURES: ["res2", "res3", "res4", "res5"] + FPN: + IN_FEATURES: ["res2", "res3", "res4", "res5"] + ANCHOR_GENERATOR: + SIZES: [[32], [64], [128], [256], [512]] # One size for each in feature map + ASPECT_RATIOS: [[0.5, 1.0, 2.0]] # Three aspect ratios (same for all in feature maps) + RPN: + IN_FEATURES: ["p2", "p3", "p4", "p5", "p6"] + PRE_NMS_TOPK_TRAIN: 2000 # Per FPN level + PRE_NMS_TOPK_TEST: 1000 # Per FPN level + # Detectron1 uses 2000 proposals per-batch, + # (See "modeling/rpn/rpn_outputs.py" for details of this legacy issue) + # which is approximately 1000 proposals per-image since the default batch size for FPN is 2. + POST_NMS_TOPK_TRAIN: 1000 + POST_NMS_TOPK_TEST: 1000 + ROI_HEADS: + NAME: "StandardROIHeads" + IN_FEATURES: ["p2", "p3", "p4", "p5"] + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_FC: 2 + POOLER_RESOLUTION: 7 + ROI_MASK_HEAD: + NAME: "MaskRCNNConvUpsampleHead" + NUM_CONV: 4 + POOLER_RESOLUTION: 14 +DATASETS: + TRAIN: ("coco_2017_train",) + TEST: ("coco_2017_val",) +SOLVER: + IMS_PER_BATCH: 2 + BASE_LR: 0.02 + STEPS: (60000, 80000) + MAX_ITER: 90000 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +VERSION: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a278199b8557a1e2fb341fe6757786a6cecb82b3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/README.md @@ -0,0 +1 @@ +These are quick configs for performance or accuracy regression tracking purposes. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_inference_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fc5a4116cb096278823049c1f823e99f8e16e97e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml" +MODEL: + WEIGHTS: "detectron2://Misc/cascade_mask_rcnn_R_50_FPN_3x/144998488/model_final_480dd8.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 50.18, 0.02], ["segm", "AP", 43.87, 0.02]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e41a0fe7ffe9c3531741df49e546aa45cfe4fdee --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_instant_test.yaml @@ -0,0 +1,11 @@ +_BASE_: "../Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml" +DATASETS: + TRAIN: ("coco_2017_val_100",) + TEST: ("coco_2017_val_100",) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/fast_rcnn_R_50_FPN_inference_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/fast_rcnn_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a2f37e5e2cc2a9e195e13703e9930e67e0f9a896 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/fast_rcnn_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-Detection/fast_rcnn_R_50_FPN_1x/137635226/model_final_e5f7ce.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 45.70, 0.02]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/fast_rcnn_R_50_FPN_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/fast_rcnn_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..52fc0ec03c8b87ab2be1dda97bec1e8c93e6bb5c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/fast_rcnn_R_50_FPN_instant_test.yaml @@ -0,0 +1,15 @@ +_BASE_: "../COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" +DATASETS: + TRAIN: ("coco_2017_val_100",) + PROPOSAL_FILES_TRAIN: ("detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/coco_2017_val_box_proposals_ee0dad.pkl", ) + TEST: ("coco_2017_val_100",) + PROPOSAL_FILES_TEST: ("detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/coco_2017_val_box_proposals_ee0dad.pkl", ) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/keypoint_rcnn_R_50_FPN_inference_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/keypoint_rcnn_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..14cf2aa82aec52ad44e28ead0665dad811d55457 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/keypoint_rcnn_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x/137849621/model_final_a6e10b.pkl" +DATASETS: + TEST: ("keypoints_coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 52.47, 0.02], ["keypoints", "AP", 67.36, 0.02]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/keypoint_rcnn_R_50_FPN_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/keypoint_rcnn_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..dc09034bdd3db9d3e0dc62a017a3883dbe79c649 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/keypoint_rcnn_R_50_FPN_instant_test.yaml @@ -0,0 +1,14 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + KEYPOINT_ON: True +DATASETS: + TRAIN: ("keypoints_coco_2017_val_100",) + TEST: ("keypoints_coco_2017_val_100",) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/keypoint_rcnn_R_50_FPN_normalized_training_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/keypoint_rcnn_R_50_FPN_normalized_training_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4b92392f1c4457033ae4c87a521e339fe9e184ce --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/keypoint_rcnn_R_50_FPN_normalized_training_acc_test.yaml @@ -0,0 +1,30 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + KEYPOINT_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + BATCH_SIZE_PER_IMAGE: 256 + NUM_CLASSES: 1 + ROI_KEYPOINT_HEAD: + POOLER_RESOLUTION: 14 + POOLER_SAMPLING_RATIO: 2 + NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS: False + LOSS_WEIGHT: 4.0 + ROI_BOX_HEAD: + SMOOTH_L1_BETA: 1.0 # Keypoint AP degrades when using plain L1 loss + RPN: + SMOOTH_L1_BETA: 0.2 # Keypoint AP degrades when using plain L1 loss +DATASETS: + TRAIN: ("keypoints_coco_2017_val",) + TEST: ("keypoints_coco_2017_val",) +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +SOLVER: + WARMUP_FACTOR: 0.33333333 + WARMUP_ITERS: 100 + STEPS: (5500, 5800) + MAX_ITER: 6000 +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 55.35, 1.0], ["keypoints", "AP", 76.91, 1.0]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/keypoint_rcnn_R_50_FPN_training_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/keypoint_rcnn_R_50_FPN_training_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9bd962878fea64035887c48981beeb8d41bfdbd0 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/keypoint_rcnn_R_50_FPN_training_acc_test.yaml @@ -0,0 +1,28 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + KEYPOINT_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + BATCH_SIZE_PER_IMAGE: 256 + NUM_CLASSES: 1 + ROI_KEYPOINT_HEAD: + POOLER_RESOLUTION: 14 + POOLER_SAMPLING_RATIO: 2 + ROI_BOX_HEAD: + SMOOTH_L1_BETA: 1.0 # Keypoint AP degrades when using plain L1 loss + RPN: + SMOOTH_L1_BETA: 0.2 # Keypoint AP degrades when using plain L1 loss +DATASETS: + TRAIN: ("keypoints_coco_2017_val",) + TEST: ("keypoints_coco_2017_val",) +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +SOLVER: + WARMUP_FACTOR: 0.33333333 + WARMUP_ITERS: 100 + STEPS: (5500, 5800) + MAX_ITER: 6000 +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 53.5, 1.0], ["keypoints", "AP", 72.4, 1.0]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_C4_GCV_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_C4_GCV_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ab6e69812b94ea7e071f29d9a6937d5c70805b5b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_C4_GCV_instant_test.yaml @@ -0,0 +1,18 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True +DATASETS: + TRAIN: ("coco_2017_val_100",) + TEST: ("coco_2017_val_100",) +SOLVER: + BASE_LR: 0.001 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 + CLIP_GRADIENTS: + ENABLED: True + CLIP_TYPE: "value" + CLIP_VALUE: 1.0 +DATALOADER: + NUM_WORKERS: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_C4_inference_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_C4_inference_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b2d5b7ff87e069f8c774a230bdfd47b8c12d18a3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_C4_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x/137849525/model_final_4ce675.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 47.37, 0.02], ["segm", "AP", 40.99, 0.02]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_C4_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_C4_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6c4f1214efa520944fd941daec082ad45c164a23 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_C4_instant_test.yaml @@ -0,0 +1,14 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True +DATASETS: + TRAIN: ("coco_2017_val_100",) + TEST: ("coco_2017_val_100",) +SOLVER: + BASE_LR: 0.001 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_C4_training_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_C4_training_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f68dd8f96c7896b5fc95d694a399f2ce417c1deb --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_C4_training_acc_test.yaml @@ -0,0 +1,22 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + ROI_HEADS: + BATCH_SIZE_PER_IMAGE: 256 + MASK_ON: True +DATASETS: + TRAIN: ("coco_2017_val",) + TEST: ("coco_2017_val",) +INPUT: + MIN_SIZE_TRAIN: (600,) + MAX_SIZE_TRAIN: 1000 + MIN_SIZE_TEST: 800 + MAX_SIZE_TEST: 1000 +SOLVER: + IMS_PER_BATCH: 8 # base uses 16 + WARMUP_FACTOR: 0.33333 + WARMUP_ITERS: 100 + STEPS: (11000, 11600) + MAX_ITER: 12000 +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 41.88, 0.7], ["segm", "AP", 33.79, 0.5]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_DC5_inference_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_DC5_inference_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e3ce6cf922ae07fba5b5e01edbac19bf58a8e9dd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_DC5_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x/137849551/model_final_84107b.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 47.44, 0.02], ["segm", "AP", 42.94, 0.02]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e5454bfd95cc37749c50aec7866f32d9a80ca2b7 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,10 @@ +_BASE_: "../COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 47.34, 0.02], ["segm", "AP", 42.67, 0.02], ["bbox_TTA", "AP", 49.11, 0.02], ["segm_TTA", "AP", 45.04, 0.02]] + AUG: + ENABLED: True + MIN_SIZES: (700, 800) # to save some time diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_FPN_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6dbfcde0bf837990634d419a6dda1e2909c3cd7f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_FPN_instant_test.yaml @@ -0,0 +1,14 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True +DATASETS: + TRAIN: ("coco_2017_val_100",) + TEST: ("coco_2017_val_100",) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_FPN_training_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_FPN_training_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ffca550461035967a565dca39bca039658a68eed --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/mask_rcnn_R_50_FPN_training_acc_test.yaml @@ -0,0 +1,21 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + ROI_HEADS: + BATCH_SIZE_PER_IMAGE: 256 + MASK_ON: True +DATASETS: + TRAIN: ("coco_2017_val",) + TEST: ("coco_2017_val",) +INPUT: + MIN_SIZE_TRAIN: (600,) + MAX_SIZE_TRAIN: 1000 + MIN_SIZE_TEST: 800 + MAX_SIZE_TEST: 1000 +SOLVER: + WARMUP_FACTOR: 0.3333333 + WARMUP_ITERS: 100 + STEPS: (5500, 5800) + MAX_ITER: 6000 +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 42.0, 1.6], ["segm", "AP", 35.4, 1.25]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/panoptic_fpn_R_50_inference_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/panoptic_fpn_R_50_inference_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..70874e3a92c9034d75cbbebb145b61084ba15e42 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/panoptic_fpn_R_50_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-PanopticSegmentation/panoptic_fpn_R_50_3x/139514569/model_final_c10459.pkl" +DATASETS: + TEST: ("coco_2017_val_100_panoptic_separated",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 46.47, 0.02], ["segm", "AP", 43.39, 0.02], ["sem_seg", "mIoU", 42.55, 0.02], ["panoptic_seg", "PQ", 38.99, 0.02]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/panoptic_fpn_R_50_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/panoptic_fpn_R_50_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7cdee7bfcf6dc75dda52602a0d9177ad0a9cc6ed --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/panoptic_fpn_R_50_instant_test.yaml @@ -0,0 +1,19 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "PanopticFPN" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + SEM_SEG_HEAD: + LOSS_WEIGHT: 0.5 +DATASETS: + TRAIN: ("coco_2017_val_100_panoptic_separated",) + TEST: ("coco_2017_val_100_panoptic_separated",) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 1 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/panoptic_fpn_R_50_training_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/panoptic_fpn_R_50_training_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..05816316f851690e60ee54b852b6f49ede73c886 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/panoptic_fpn_R_50_training_acc_test.yaml @@ -0,0 +1,20 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "PanopticFPN" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + SEM_SEG_HEAD: + LOSS_WEIGHT: 0.5 +DATASETS: + TRAIN: ("coco_2017_val_panoptic_separated",) + TEST: ("coco_2017_val_panoptic_separated",) +SOLVER: + BASE_LR: 0.01 + WARMUP_FACTOR: 0.001 + WARMUP_ITERS: 500 + STEPS: (5500,) + MAX_ITER: 7000 +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 46.70, 1.1], ["segm", "AP", 38.73, 0.7], ["sem_seg", "mIoU", 64.73, 1.2], ["panoptic_seg", "PQ", 48.13, 0.8]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/retinanet_R_50_FPN_inference_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/retinanet_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..36b998833bac04c830d5ab9f44d5773b0437ac0b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/retinanet_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-Detection/retinanet_R_50_FPN_3x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-Detection/retinanet_R_50_FPN_3x/137849486/model_final_4cafe0.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 44.36, 0.02]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/retinanet_R_50_FPN_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/retinanet_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8d95c1f614296716374686b22055a587ccd052b9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/retinanet_R_50_FPN_instant_test.yaml @@ -0,0 +1,13 @@ +_BASE_: "../COCO-Detection/retinanet_R_50_FPN_1x.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" +DATASETS: + TRAIN: ("coco_2017_val_100",) + TEST: ("coco_2017_val_100",) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/rpn_R_50_FPN_inference_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/rpn_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c7c3f908a9e80e98b2d25b6d384a60acaba9d4f8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/rpn_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-Detection/rpn_R_50_FPN_1x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/model_final_02ce48.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["box_proposals", "AR@1000", 58.16, 0.02]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/rpn_R_50_FPN_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/rpn_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..402d432477507dc36f04c4a9777cb80fe06b2809 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/rpn_R_50_FPN_instant_test.yaml @@ -0,0 +1,13 @@ +_BASE_: "../COCO-Detection/rpn_R_50_FPN_1x.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" +DATASETS: + TRAIN: ("coco_2017_val_100",) + TEST: ("coco_2017_val_100",) +SOLVER: + STEPS: (30,) + MAX_ITER: 40 + BASE_LR: 0.005 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/semantic_R_50_FPN_inference_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/semantic_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..bca74987d5218736983617883e0fe37f79d219b7 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/semantic_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,10 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "SemanticSegmentor" + WEIGHTS: "detectron2://semantic_R_50_FPN_1x/111802073/model_final_c18079783c55a94968edc28b7101c5f0.pkl" + RESNETS: + DEPTH: 50 +DATASETS: + TEST: ("coco_2017_val_100_panoptic_stuffonly",) +TEST: + EXPECTED_RESULTS: [["sem_seg", "mIoU", 39.53, 0.02], ["sem_seg", "mACC", 51.50, 0.02]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/semantic_R_50_FPN_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/semantic_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..14ab606f219b462fe37fcc7d5fbdbe65cb5c2642 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/semantic_R_50_FPN_instant_test.yaml @@ -0,0 +1,18 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "SemanticSegmentor" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +DATASETS: + TRAIN: ("coco_2017_val_100_panoptic_stuffonly",) + TEST: ("coco_2017_val_100_panoptic_stuffonly",) +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/semantic_R_50_FPN_training_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/semantic_R_50_FPN_training_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1f78d775889b11e9e76743de5ddb8139198edf61 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/configs/quick_schedules/semantic_R_50_FPN_training_acc_test.yaml @@ -0,0 +1,20 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "SemanticSegmentor" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +DATASETS: + TRAIN: ("coco_2017_val_panoptic_stuffonly",) + TEST: ("coco_2017_val_panoptic_stuffonly",) +SOLVER: + BASE_LR: 0.01 + WARMUP_FACTOR: 0.001 + WARMUP_ITERS: 300 + STEPS: (5500,) + MAX_ITER: 7000 +TEST: + EXPECTED_RESULTS: [["sem_seg", "mIoU", 76.51, 1.0], ["sem_seg", "mACC", 83.25, 1.0]] +INPUT: + # no scale augmentation + MIN_SIZE_TRAIN: (800, ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/demo/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/demo/README.md new file mode 100644 index 0000000000000000000000000000000000000000..caa755f6f0f472a04a419deec4a6acfdb949023b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/demo/README.md @@ -0,0 +1,8 @@ + +## Detectron2 Demo + +We provide a command line tool to run a simple demo of builtin models. +The usage is explained in [GETTING_STARTED.md](../GETTING_STARTED.md). + +See our [blog post](https://ai.facebook.com/blog/-detectron2-a-pytorch-based-modular-object-detection-library-) +for a high-quality demo generated with this tool. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/demo/demo.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/demo/demo.py new file mode 100755 index 0000000000000000000000000000000000000000..1fd8df8f539cfe4a4f003fb820f49ffad0f54f80 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/demo/demo.py @@ -0,0 +1,161 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import argparse +import glob +import multiprocessing as mp +import os +import time +import cv2 +import tqdm + +from detectron2.config import get_cfg +from detectron2.data.detection_utils import read_image +from detectron2.utils.logger import setup_logger + +from predictor import VisualizationDemo + +# constants +WINDOW_NAME = "COCO detections" + + +def setup_cfg(args): + # load config from file and command-line arguments + cfg = get_cfg() + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + # Set score_threshold for builtin models + cfg.MODEL.RETINANET.SCORE_THRESH_TEST = args.confidence_threshold + cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = args.confidence_threshold + cfg.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH = args.confidence_threshold + cfg.freeze() + return cfg + + +def get_parser(): + parser = argparse.ArgumentParser(description="Detectron2 demo for builtin models") + parser.add_argument( + "--config-file", + default="configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml", + metavar="FILE", + help="path to config file", + ) + parser.add_argument("--webcam", action="store_true", help="Take inputs from webcam.") + parser.add_argument("--video-input", help="Path to video file.") + parser.add_argument( + "--input", + nargs="+", + help="A list of space separated input images; " + "or a single glob pattern such as 'directory/*.jpg'", + ) + parser.add_argument( + "--output", + help="A file or directory to save output visualizations. " + "If not given, will show output in an OpenCV window.", + ) + + parser.add_argument( + "--confidence-threshold", + type=float, + default=0.5, + help="Minimum score for instance predictions to be shown", + ) + parser.add_argument( + "--opts", + help="Modify config options using the command-line 'KEY VALUE' pairs", + default=[], + nargs=argparse.REMAINDER, + ) + return parser + + +if __name__ == "__main__": + mp.set_start_method("spawn", force=True) + args = get_parser().parse_args() + setup_logger(name="fvcore") + logger = setup_logger() + logger.info("Arguments: " + str(args)) + + cfg = setup_cfg(args) + + demo = VisualizationDemo(cfg) + + if args.input: + if len(args.input) == 1: + args.input = glob.glob(os.path.expanduser(args.input[0])) + assert args.input, "The input path(s) was not found" + for path in tqdm.tqdm(args.input, disable=not args.output): + # use PIL, to be consistent with evaluation + img = read_image(path, format="BGR") + start_time = time.time() + predictions, visualized_output = demo.run_on_image(img) + logger.info( + "{}: {} in {:.2f}s".format( + path, + "detected {} instances".format(len(predictions["instances"])) + if "instances" in predictions + else "finished", + time.time() - start_time, + ) + ) + + if args.output: + if os.path.isdir(args.output): + assert os.path.isdir(args.output), args.output + out_filename = os.path.join(args.output, os.path.basename(path)) + else: + assert len(args.input) == 1, "Please specify a directory with args.output" + out_filename = args.output + visualized_output.save(out_filename) + else: + cv2.namedWindow(WINDOW_NAME, cv2.WINDOW_NORMAL) + cv2.imshow(WINDOW_NAME, visualized_output.get_image()[:, :, ::-1]) + if cv2.waitKey(0) == 27: + break # esc to quit + elif args.webcam: + assert args.input is None, "Cannot have both --input and --webcam!" + assert args.output is None, "output not yet supported with --webcam!" + cam = cv2.VideoCapture(0) + for vis in tqdm.tqdm(demo.run_on_video(cam)): + cv2.namedWindow(WINDOW_NAME, cv2.WINDOW_NORMAL) + cv2.imshow(WINDOW_NAME, vis) + if cv2.waitKey(1) == 27: + break # esc to quit + cam.release() + cv2.destroyAllWindows() + elif args.video_input: + video = cv2.VideoCapture(args.video_input) + width = int(video.get(cv2.CAP_PROP_FRAME_WIDTH)) + height = int(video.get(cv2.CAP_PROP_FRAME_HEIGHT)) + frames_per_second = video.get(cv2.CAP_PROP_FPS) + num_frames = int(video.get(cv2.CAP_PROP_FRAME_COUNT)) + basename = os.path.basename(args.video_input) + + if args.output: + if os.path.isdir(args.output): + output_fname = os.path.join(args.output, basename) + output_fname = os.path.splitext(output_fname)[0] + ".mkv" + else: + output_fname = args.output + assert not os.path.isfile(output_fname), output_fname + output_file = cv2.VideoWriter( + filename=output_fname, + # some installation of opencv may not support x264 (due to its license), + # you can try other format (e.g. MPEG) + fourcc=cv2.VideoWriter_fourcc(*"x264"), + fps=float(frames_per_second), + frameSize=(width, height), + isColor=True, + ) + assert os.path.isfile(args.video_input) + for vis_frame in tqdm.tqdm(demo.run_on_video(video), total=num_frames): + if args.output: + output_file.write(vis_frame) + else: + cv2.namedWindow(basename, cv2.WINDOW_NORMAL) + cv2.imshow(basename, vis_frame) + if cv2.waitKey(1) == 27: + break # esc to quit + video.release() + if args.output: + output_file.release() + else: + cv2.destroyAllWindows() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/demo/predictor.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/demo/predictor.py new file mode 100644 index 0000000000000000000000000000000000000000..689fa85436d928858e652df665f5e7460a1f3154 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/demo/predictor.py @@ -0,0 +1,220 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import atexit +import bisect +import multiprocessing as mp +from collections import deque +import cv2 +import torch + +from detectron2.data import MetadataCatalog +from detectron2.engine.defaults import DefaultPredictor +from detectron2.utils.video_visualizer import VideoVisualizer +from detectron2.utils.visualizer import ColorMode, Visualizer + + +class VisualizationDemo(object): + def __init__(self, cfg, instance_mode=ColorMode.IMAGE, parallel=False): + """ + Args: + cfg (CfgNode): + instance_mode (ColorMode): + parallel (bool): whether to run the model in different processes from visualization. + Useful since the visualization logic can be slow. + """ + self.metadata = MetadataCatalog.get( + cfg.DATASETS.TEST[0] if len(cfg.DATASETS.TEST) else "__unused" + ) + self.cpu_device = torch.device("cpu") + self.instance_mode = instance_mode + + self.parallel = parallel + if parallel: + num_gpu = torch.cuda.device_count() + self.predictor = AsyncPredictor(cfg, num_gpus=num_gpu) + else: + self.predictor = DefaultPredictor(cfg) + + def run_on_image(self, image): + """ + Args: + image (np.ndarray): an image of shape (H, W, C) (in BGR order). + This is the format used by OpenCV. + + Returns: + predictions (dict): the output of the model. + vis_output (VisImage): the visualized image output. + """ + vis_output = None + predictions = self.predictor(image) + # Convert image from OpenCV BGR format to Matplotlib RGB format. + image = image[:, :, ::-1] + visualizer = Visualizer(image, self.metadata, instance_mode=self.instance_mode) + if "panoptic_seg" in predictions: + panoptic_seg, segments_info = predictions["panoptic_seg"] + vis_output = visualizer.draw_panoptic_seg_predictions( + panoptic_seg.to(self.cpu_device), segments_info + ) + else: + if "sem_seg" in predictions: + vis_output = visualizer.draw_sem_seg( + predictions["sem_seg"].argmax(dim=0).to(self.cpu_device) + ) + if "instances" in predictions: + instances = predictions["instances"].to(self.cpu_device) + vis_output = visualizer.draw_instance_predictions(predictions=instances) + + return predictions, vis_output + + def _frame_from_video(self, video): + while video.isOpened(): + success, frame = video.read() + if success: + yield frame + else: + break + + def run_on_video(self, video): + """ + Visualizes predictions on frames of the input video. + + Args: + video (cv2.VideoCapture): a :class:`VideoCapture` object, whose source can be + either a webcam or a video file. + + Yields: + ndarray: BGR visualizations of each video frame. + """ + video_visualizer = VideoVisualizer(self.metadata, self.instance_mode) + + def process_predictions(frame, predictions): + frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR) + if "panoptic_seg" in predictions: + panoptic_seg, segments_info = predictions["panoptic_seg"] + vis_frame = video_visualizer.draw_panoptic_seg_predictions( + frame, panoptic_seg.to(self.cpu_device), segments_info + ) + elif "instances" in predictions: + predictions = predictions["instances"].to(self.cpu_device) + vis_frame = video_visualizer.draw_instance_predictions(frame, predictions) + elif "sem_seg" in predictions: + vis_frame = video_visualizer.draw_sem_seg( + frame, predictions["sem_seg"].argmax(dim=0).to(self.cpu_device) + ) + + # Converts Matplotlib RGB format to OpenCV BGR format + vis_frame = cv2.cvtColor(vis_frame.get_image(), cv2.COLOR_RGB2BGR) + return vis_frame + + frame_gen = self._frame_from_video(video) + if self.parallel: + buffer_size = self.predictor.default_buffer_size + + frame_data = deque() + + for cnt, frame in enumerate(frame_gen): + frame_data.append(frame) + self.predictor.put(frame) + + if cnt >= buffer_size: + frame = frame_data.popleft() + predictions = self.predictor.get() + yield process_predictions(frame, predictions) + + while len(frame_data): + frame = frame_data.popleft() + predictions = self.predictor.get() + yield process_predictions(frame, predictions) + else: + for frame in frame_gen: + yield process_predictions(frame, self.predictor(frame)) + + +class AsyncPredictor: + """ + A predictor that runs the model asynchronously, possibly on >1 GPUs. + Because rendering the visualization takes considerably amount of time, + this helps improve throughput when rendering videos. + """ + + class _StopToken: + pass + + class _PredictWorker(mp.Process): + def __init__(self, cfg, task_queue, result_queue): + self.cfg = cfg + self.task_queue = task_queue + self.result_queue = result_queue + super().__init__() + + def run(self): + predictor = DefaultPredictor(self.cfg) + + while True: + task = self.task_queue.get() + if isinstance(task, AsyncPredictor._StopToken): + break + idx, data = task + result = predictor(data) + self.result_queue.put((idx, result)) + + def __init__(self, cfg, num_gpus: int = 1): + """ + Args: + cfg (CfgNode): + num_gpus (int): if 0, will run on CPU + """ + num_workers = max(num_gpus, 1) + self.task_queue = mp.Queue(maxsize=num_workers * 3) + self.result_queue = mp.Queue(maxsize=num_workers * 3) + self.procs = [] + for gpuid in range(max(num_gpus, 1)): + cfg = cfg.clone() + cfg.defrost() + cfg.MODEL.DEVICE = "cuda:{}".format(gpuid) if num_gpus > 0 else "cpu" + self.procs.append( + AsyncPredictor._PredictWorker(cfg, self.task_queue, self.result_queue) + ) + + self.put_idx = 0 + self.get_idx = 0 + self.result_rank = [] + self.result_data = [] + + for p in self.procs: + p.start() + atexit.register(self.shutdown) + + def put(self, image): + self.put_idx += 1 + self.task_queue.put((self.put_idx, image)) + + def get(self): + self.get_idx += 1 # the index needed for this request + if len(self.result_rank) and self.result_rank[0] == self.get_idx: + res = self.result_data[0] + del self.result_data[0], self.result_rank[0] + return res + + while True: + # make sure the results are returned in the correct order + idx, res = self.result_queue.get() + if idx == self.get_idx: + return res + insert = bisect.bisect(self.result_rank, idx) + self.result_rank.insert(insert, idx) + self.result_data.insert(insert, res) + + def __len__(self): + return self.put_idx - self.get_idx + + def __call__(self, image): + self.put(image) + return self.get() + + def shutdown(self): + for _ in self.procs: + self.task_queue.put(AsyncPredictor._StopToken()) + + @property + def default_buffer_size(self): + return len(self.procs) * 5 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..41816af2e8e538fa2ef4dc7b34f5667e0e823b90 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +from .utils.env import setup_environment + +setup_environment() + + +# This line will be programatically read/write by setup.py. +# Leave them at the bottom of this file and don't touch them. +__version__ = "0.1.3" diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/checkpoint/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/checkpoint/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e17a9df03d886b379ffbb1c4ec41e03c5025410f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/checkpoint/__init__.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# File: + + +from . import catalog as _UNUSED # register the handler +from .detection_checkpoint import DetectionCheckpointer +from fvcore.common.checkpoint import Checkpointer, PeriodicCheckpointer + +__all__ = ["Checkpointer", "PeriodicCheckpointer", "DetectionCheckpointer"] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/checkpoint/c2_model_loading.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/checkpoint/c2_model_loading.py new file mode 100644 index 0000000000000000000000000000000000000000..e27ba8463c744438d44f04f23fd4975525eba667 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/checkpoint/c2_model_loading.py @@ -0,0 +1,313 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import copy +import logging +import re +import torch +from fvcore.common.checkpoint import ( + get_missing_parameters_message, + get_unexpected_parameters_message, +) + + +def convert_basic_c2_names(original_keys): + """ + Apply some basic name conversion to names in C2 weights. + It only deals with typical backbone models. + + Args: + original_keys (list[str]): + Returns: + list[str]: The same number of strings matching those in original_keys. + """ + layer_keys = copy.deepcopy(original_keys) + layer_keys = [ + {"pred_b": "linear_b", "pred_w": "linear_w"}.get(k, k) for k in layer_keys + ] # some hard-coded mappings + + layer_keys = [k.replace("_", ".") for k in layer_keys] + layer_keys = [re.sub("\\.b$", ".bias", k) for k in layer_keys] + layer_keys = [re.sub("\\.w$", ".weight", k) for k in layer_keys] + # Uniform both bn and gn names to "norm" + layer_keys = [re.sub("bn\\.s$", "norm.weight", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.bias$", "norm.bias", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.rm", "norm.running_mean", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.running.mean$", "norm.running_mean", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.riv$", "norm.running_var", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.running.var$", "norm.running_var", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.gamma$", "norm.weight", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.beta$", "norm.bias", k) for k in layer_keys] + layer_keys = [re.sub("gn\\.s$", "norm.weight", k) for k in layer_keys] + layer_keys = [re.sub("gn\\.bias$", "norm.bias", k) for k in layer_keys] + + # stem + layer_keys = [re.sub("^res\\.conv1\\.norm\\.", "conv1.norm.", k) for k in layer_keys] + # to avoid mis-matching with "conv1" in other components (e.g. detection head) + layer_keys = [re.sub("^conv1\\.", "stem.conv1.", k) for k in layer_keys] + + # layer1-4 is used by torchvision, however we follow the C2 naming strategy (res2-5) + # layer_keys = [re.sub("^res2.", "layer1.", k) for k in layer_keys] + # layer_keys = [re.sub("^res3.", "layer2.", k) for k in layer_keys] + # layer_keys = [re.sub("^res4.", "layer3.", k) for k in layer_keys] + # layer_keys = [re.sub("^res5.", "layer4.", k) for k in layer_keys] + + # blocks + layer_keys = [k.replace(".branch1.", ".shortcut.") for k in layer_keys] + layer_keys = [k.replace(".branch2a.", ".conv1.") for k in layer_keys] + layer_keys = [k.replace(".branch2b.", ".conv2.") for k in layer_keys] + layer_keys = [k.replace(".branch2c.", ".conv3.") for k in layer_keys] + + # DensePose substitutions + layer_keys = [re.sub("^body.conv.fcn", "body_conv_fcn", k) for k in layer_keys] + layer_keys = [k.replace("AnnIndex.lowres", "ann_index_lowres") for k in layer_keys] + layer_keys = [k.replace("Index.UV.lowres", "index_uv_lowres") for k in layer_keys] + layer_keys = [k.replace("U.lowres", "u_lowres") for k in layer_keys] + layer_keys = [k.replace("V.lowres", "v_lowres") for k in layer_keys] + return layer_keys + + +def convert_c2_detectron_names(weights): + """ + Map Caffe2 Detectron weight names to Detectron2 names. + + Args: + weights (dict): name -> tensor + + Returns: + dict: detectron2 names -> tensor + dict: detectron2 names -> C2 names + """ + logger = logging.getLogger(__name__) + logger.info("Remapping C2 weights ......") + original_keys = sorted(weights.keys()) + layer_keys = copy.deepcopy(original_keys) + + layer_keys = convert_basic_c2_names(layer_keys) + + # -------------------------------------------------------------------------- + # RPN hidden representation conv + # -------------------------------------------------------------------------- + # FPN case + # In the C2 model, the RPN hidden layer conv is defined for FPN level 2 and then + # shared for all other levels, hence the appearance of "fpn2" + layer_keys = [ + k.replace("conv.rpn.fpn2", "proposal_generator.rpn_head.conv") for k in layer_keys + ] + # Non-FPN case + layer_keys = [k.replace("conv.rpn", "proposal_generator.rpn_head.conv") for k in layer_keys] + + # -------------------------------------------------------------------------- + # RPN box transformation conv + # -------------------------------------------------------------------------- + # FPN case (see note above about "fpn2") + layer_keys = [ + k.replace("rpn.bbox.pred.fpn2", "proposal_generator.rpn_head.anchor_deltas") + for k in layer_keys + ] + layer_keys = [ + k.replace("rpn.cls.logits.fpn2", "proposal_generator.rpn_head.objectness_logits") + for k in layer_keys + ] + # Non-FPN case + layer_keys = [ + k.replace("rpn.bbox.pred", "proposal_generator.rpn_head.anchor_deltas") for k in layer_keys + ] + layer_keys = [ + k.replace("rpn.cls.logits", "proposal_generator.rpn_head.objectness_logits") + for k in layer_keys + ] + + # -------------------------------------------------------------------------- + # Fast R-CNN box head + # -------------------------------------------------------------------------- + layer_keys = [re.sub("^bbox\\.pred", "bbox_pred", k) for k in layer_keys] + layer_keys = [re.sub("^cls\\.score", "cls_score", k) for k in layer_keys] + layer_keys = [re.sub("^fc6\\.", "box_head.fc1.", k) for k in layer_keys] + layer_keys = [re.sub("^fc7\\.", "box_head.fc2.", k) for k in layer_keys] + # 4conv1fc head tensor names: head_conv1_w, head_conv1_gn_s + layer_keys = [re.sub("^head\\.conv", "box_head.conv", k) for k in layer_keys] + + # -------------------------------------------------------------------------- + # FPN lateral and output convolutions + # -------------------------------------------------------------------------- + def fpn_map(name): + """ + Look for keys with the following patterns: + 1) Starts with "fpn.inner." + Example: "fpn.inner.res2.2.sum.lateral.weight" + Meaning: These are lateral pathway convolutions + 2) Starts with "fpn.res" + Example: "fpn.res2.2.sum.weight" + Meaning: These are FPN output convolutions + """ + splits = name.split(".") + norm = ".norm" if "norm" in splits else "" + if name.startswith("fpn.inner."): + # splits example: ['fpn', 'inner', 'res2', '2', 'sum', 'lateral', 'weight'] + stage = int(splits[2][len("res") :]) + return "fpn_lateral{}{}.{}".format(stage, norm, splits[-1]) + elif name.startswith("fpn.res"): + # splits example: ['fpn', 'res2', '2', 'sum', 'weight'] + stage = int(splits[1][len("res") :]) + return "fpn_output{}{}.{}".format(stage, norm, splits[-1]) + return name + + layer_keys = [fpn_map(k) for k in layer_keys] + + # -------------------------------------------------------------------------- + # Mask R-CNN mask head + # -------------------------------------------------------------------------- + # roi_heads.StandardROIHeads case + layer_keys = [k.replace(".[mask].fcn", "mask_head.mask_fcn") for k in layer_keys] + layer_keys = [re.sub("^\\.mask\\.fcn", "mask_head.mask_fcn", k) for k in layer_keys] + layer_keys = [k.replace("mask.fcn.logits", "mask_head.predictor") for k in layer_keys] + # roi_heads.Res5ROIHeads case + layer_keys = [k.replace("conv5.mask", "mask_head.deconv") for k in layer_keys] + + # -------------------------------------------------------------------------- + # Keypoint R-CNN head + # -------------------------------------------------------------------------- + # interestingly, the keypoint head convs have blob names that are simply "conv_fcnX" + layer_keys = [k.replace("conv.fcn", "roi_heads.keypoint_head.conv_fcn") for k in layer_keys] + layer_keys = [ + k.replace("kps.score.lowres", "roi_heads.keypoint_head.score_lowres") for k in layer_keys + ] + layer_keys = [k.replace("kps.score.", "roi_heads.keypoint_head.score.") for k in layer_keys] + + # -------------------------------------------------------------------------- + # Done with replacements + # -------------------------------------------------------------------------- + assert len(set(layer_keys)) == len(layer_keys) + assert len(original_keys) == len(layer_keys) + + new_weights = {} + new_keys_to_original_keys = {} + for orig, renamed in zip(original_keys, layer_keys): + new_keys_to_original_keys[renamed] = orig + if renamed.startswith("bbox_pred.") or renamed.startswith("mask_head.predictor."): + # remove the meaningless prediction weight for background class + new_start_idx = 4 if renamed.startswith("bbox_pred.") else 1 + new_weights[renamed] = weights[orig][new_start_idx:] + logger.info( + "Remove prediction weight for background class in {}. The shape changes from " + "{} to {}.".format( + renamed, tuple(weights[orig].shape), tuple(new_weights[renamed].shape) + ) + ) + elif renamed.startswith("cls_score."): + # move weights of bg class from original index 0 to last index + logger.info( + "Move classification weights for background class in {} from index 0 to " + "index {}.".format(renamed, weights[orig].shape[0] - 1) + ) + new_weights[renamed] = torch.cat([weights[orig][1:], weights[orig][:1]]) + else: + new_weights[renamed] = weights[orig] + + return new_weights, new_keys_to_original_keys + + +# Note the current matching is not symmetric. +# it assumes model_state_dict will have longer names. +def align_and_update_state_dicts(model_state_dict, ckpt_state_dict, c2_conversion=True): + """ + Match names between the two state-dict, and update the values of model_state_dict in-place with + copies of the matched tensor in ckpt_state_dict. + If `c2_conversion==True`, `ckpt_state_dict` is assumed to be a Caffe2 + model and will be renamed at first. + + Strategy: suppose that the models that we will create will have prefixes appended + to each of its keys, for example due to an extra level of nesting that the original + pre-trained weights from ImageNet won't contain. For example, model.state_dict() + might return backbone[0].body.res2.conv1.weight, while the pre-trained model contains + res2.conv1.weight. We thus want to match both parameters together. + For that, we look for each model weight, look among all loaded keys if there is one + that is a suffix of the current weight name, and use it if that's the case. + If multiple matches exist, take the one with longest size + of the corresponding name. For example, for the same model as before, the pretrained + weight file can contain both res2.conv1.weight, as well as conv1.weight. In this case, + we want to match backbone[0].body.conv1.weight to conv1.weight, and + backbone[0].body.res2.conv1.weight to res2.conv1.weight. + """ + model_keys = sorted(model_state_dict.keys()) + if c2_conversion: + ckpt_state_dict, original_keys = convert_c2_detectron_names(ckpt_state_dict) + # original_keys: the name in the original dict (before renaming) + else: + original_keys = {x: x for x in ckpt_state_dict.keys()} + ckpt_keys = sorted(ckpt_state_dict.keys()) + + def match(a, b): + # Matched ckpt_key should be a complete (starts with '.') suffix. + # For example, roi_heads.mesh_head.whatever_conv1 does not match conv1, + # but matches whatever_conv1 or mesh_head.whatever_conv1. + return a == b or a.endswith("." + b) + + # get a matrix of string matches, where each (i, j) entry correspond to the size of the + # ckpt_key string, if it matches + match_matrix = [len(j) if match(i, j) else 0 for i in model_keys for j in ckpt_keys] + match_matrix = torch.as_tensor(match_matrix).view(len(model_keys), len(ckpt_keys)) + # use the matched one with longest size in case of multiple matches + max_match_size, idxs = match_matrix.max(1) + # remove indices that correspond to no-match + idxs[max_match_size == 0] = -1 + + # used for logging + max_len_model = max(len(key) for key in model_keys) if model_keys else 1 + max_len_ckpt = max(len(key) for key in ckpt_keys) if ckpt_keys else 1 + log_str_template = "{: <{}} loaded from {: <{}} of shape {}" + logger = logging.getLogger(__name__) + # matched_pairs (matched checkpoint key --> matched model key) + matched_keys = {} + for idx_model, idx_ckpt in enumerate(idxs.tolist()): + if idx_ckpt == -1: + continue + key_model = model_keys[idx_model] + key_ckpt = ckpt_keys[idx_ckpt] + value_ckpt = ckpt_state_dict[key_ckpt] + shape_in_model = model_state_dict[key_model].shape + + if shape_in_model != value_ckpt.shape: + logger.warning( + "Shape of {} in checkpoint is {}, while shape of {} in model is {}.".format( + key_ckpt, value_ckpt.shape, key_model, shape_in_model + ) + ) + logger.warning( + "{} will not be loaded. Please double check and see if this is desired.".format( + key_ckpt + ) + ) + continue + + model_state_dict[key_model] = value_ckpt.clone() + if key_ckpt in matched_keys: # already added to matched_keys + logger.error( + "Ambiguity found for {} in checkpoint!" + "It matches at least two keys in the model ({} and {}).".format( + key_ckpt, key_model, matched_keys[key_ckpt] + ) + ) + raise ValueError("Cannot match one checkpoint key to multiple keys in the model.") + + matched_keys[key_ckpt] = key_model + logger.info( + log_str_template.format( + key_model, + max_len_model, + original_keys[key_ckpt], + max_len_ckpt, + tuple(shape_in_model), + ) + ) + matched_model_keys = matched_keys.values() + matched_ckpt_keys = matched_keys.keys() + # print warnings about unmatched keys on both side + unmatched_model_keys = [k for k in model_keys if k not in matched_model_keys] + if len(unmatched_model_keys): + logger.info(get_missing_parameters_message(unmatched_model_keys)) + + unmatched_ckpt_keys = [k for k in ckpt_keys if k not in matched_ckpt_keys] + if len(unmatched_ckpt_keys): + logger.info( + get_unexpected_parameters_message(original_keys[x] for x in unmatched_ckpt_keys) + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/checkpoint/catalog.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/checkpoint/catalog.py new file mode 100644 index 0000000000000000000000000000000000000000..62f81f3c1531e2726400cba4c97b60d744670da5 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/checkpoint/catalog.py @@ -0,0 +1,134 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +from fvcore.common.file_io import PathHandler, PathManager + + +class ModelCatalog(object): + """ + Store mappings from names to third-party models. + """ + + S3_C2_DETECTRON_PREFIX = "https://dl.fbaipublicfiles.com/detectron" + + # MSRA models have STRIDE_IN_1X1=True. False otherwise. + # NOTE: all BN models here have fused BN into an affine layer. + # As a result, you should only load them to a model with "FrozenBN". + # Loading them to a model with regular BN or SyncBN is wrong. + # Even when loaded to FrozenBN, it is still different from affine by an epsilon, + # which should be negligible for training. + # NOTE: all models here uses PIXEL_STD=[1,1,1] + # NOTE: Most of the BN models here are no longer used. We use the + # re-converted pre-trained models under detectron2 model zoo instead. + C2_IMAGENET_MODELS = { + "MSRA/R-50": "ImageNetPretrained/MSRA/R-50.pkl", + "MSRA/R-101": "ImageNetPretrained/MSRA/R-101.pkl", + "FAIR/R-50-GN": "ImageNetPretrained/47261647/R-50-GN.pkl", + "FAIR/R-101-GN": "ImageNetPretrained/47592356/R-101-GN.pkl", + "FAIR/X-101-32x8d": "ImageNetPretrained/20171220/X-101-32x8d.pkl", + "FAIR/X-101-64x4d": "ImageNetPretrained/FBResNeXt/X-101-64x4d.pkl", + "FAIR/X-152-32x8d-IN5k": "ImageNetPretrained/25093814/X-152-32x8d-IN5k.pkl", + } + + C2_DETECTRON_PATH_FORMAT = ( + "{prefix}/{url}/output/train/{dataset}/{type}/model_final.pkl" # noqa B950 + ) + + C2_DATASET_COCO = "coco_2014_train%3Acoco_2014_valminusminival" + C2_DATASET_COCO_KEYPOINTS = "keypoints_coco_2014_train%3Akeypoints_coco_2014_valminusminival" + + # format: {model_name} -> part of the url + C2_DETECTRON_MODELS = { + "35857197/e2e_faster_rcnn_R-50-C4_1x": "35857197/12_2017_baselines/e2e_faster_rcnn_R-50-C4_1x.yaml.01_33_49.iAX0mXvW", # noqa B950 + "35857345/e2e_faster_rcnn_R-50-FPN_1x": "35857345/12_2017_baselines/e2e_faster_rcnn_R-50-FPN_1x.yaml.01_36_30.cUF7QR7I", # noqa B950 + "35857890/e2e_faster_rcnn_R-101-FPN_1x": "35857890/12_2017_baselines/e2e_faster_rcnn_R-101-FPN_1x.yaml.01_38_50.sNxI7sX7", # noqa B950 + "36761737/e2e_faster_rcnn_X-101-32x8d-FPN_1x": "36761737/12_2017_baselines/e2e_faster_rcnn_X-101-32x8d-FPN_1x.yaml.06_31_39.5MIHi1fZ", # noqa B950 + "35858791/e2e_mask_rcnn_R-50-C4_1x": "35858791/12_2017_baselines/e2e_mask_rcnn_R-50-C4_1x.yaml.01_45_57.ZgkA7hPB", # noqa B950 + "35858933/e2e_mask_rcnn_R-50-FPN_1x": "35858933/12_2017_baselines/e2e_mask_rcnn_R-50-FPN_1x.yaml.01_48_14.DzEQe4wC", # noqa B950 + "35861795/e2e_mask_rcnn_R-101-FPN_1x": "35861795/12_2017_baselines/e2e_mask_rcnn_R-101-FPN_1x.yaml.02_31_37.KqyEK4tT", # noqa B950 + "36761843/e2e_mask_rcnn_X-101-32x8d-FPN_1x": "36761843/12_2017_baselines/e2e_mask_rcnn_X-101-32x8d-FPN_1x.yaml.06_35_59.RZotkLKI", # noqa B950 + "48616381/e2e_mask_rcnn_R-50-FPN_2x_gn": "GN/48616381/04_2018_gn_baselines/e2e_mask_rcnn_R-50-FPN_2x_gn_0416.13_23_38.bTlTI97Q", # noqa B950 + "37697547/e2e_keypoint_rcnn_R-50-FPN_1x": "37697547/12_2017_baselines/e2e_keypoint_rcnn_R-50-FPN_1x.yaml.08_42_54.kdzV35ao", # noqa B950 + "35998355/rpn_R-50-C4_1x": "35998355/12_2017_baselines/rpn_R-50-C4_1x.yaml.08_00_43.njH5oD9L", # noqa B950 + "35998814/rpn_R-50-FPN_1x": "35998814/12_2017_baselines/rpn_R-50-FPN_1x.yaml.08_06_03.Axg0r179", # noqa B950 + "36225147/fast_R-50-FPN_1x": "36225147/12_2017_baselines/fast_rcnn_R-50-FPN_1x.yaml.08_39_09.L3obSdQ2", # noqa B950 + } + + @staticmethod + def get(name): + if name.startswith("Caffe2Detectron/COCO"): + return ModelCatalog._get_c2_detectron_baseline(name) + if name.startswith("ImageNetPretrained/"): + return ModelCatalog._get_c2_imagenet_pretrained(name) + raise RuntimeError("model not present in the catalog: {}".format(name)) + + @staticmethod + def _get_c2_imagenet_pretrained(name): + prefix = ModelCatalog.S3_C2_DETECTRON_PREFIX + name = name[len("ImageNetPretrained/") :] + name = ModelCatalog.C2_IMAGENET_MODELS[name] + url = "/".join([prefix, name]) + return url + + @staticmethod + def _get_c2_detectron_baseline(name): + name = name[len("Caffe2Detectron/COCO/") :] + url = ModelCatalog.C2_DETECTRON_MODELS[name] + if "keypoint_rcnn" in name: + dataset = ModelCatalog.C2_DATASET_COCO_KEYPOINTS + else: + dataset = ModelCatalog.C2_DATASET_COCO + + if "35998355/rpn_R-50-C4_1x" in name: + # this one model is somehow different from others .. + type = "rpn" + else: + type = "generalized_rcnn" + + # Detectron C2 models are stored in the structure defined in `C2_DETECTRON_PATH_FORMAT`. + url = ModelCatalog.C2_DETECTRON_PATH_FORMAT.format( + prefix=ModelCatalog.S3_C2_DETECTRON_PREFIX, url=url, type=type, dataset=dataset + ) + return url + + +class ModelCatalogHandler(PathHandler): + """ + Resolve URL like catalog://. + """ + + PREFIX = "catalog://" + + def _get_supported_prefixes(self): + return [self.PREFIX] + + def _get_local_path(self, path): + logger = logging.getLogger(__name__) + catalog_path = ModelCatalog.get(path[len(self.PREFIX) :]) + logger.info("Catalog entry {} points to {}".format(path, catalog_path)) + return PathManager.get_local_path(catalog_path) + + def _open(self, path, mode="r", **kwargs): + return PathManager.open(self._get_local_path(path), mode, **kwargs) + + +class Detectron2Handler(PathHandler): + """ + Resolve anything that's in Detectron2 model zoo. + """ + + PREFIX = "detectron2://" + S3_DETECTRON2_PREFIX = "https://dl.fbaipublicfiles.com/detectron2/" + + def _get_supported_prefixes(self): + return [self.PREFIX] + + def _get_local_path(self, path): + name = path[len(self.PREFIX) :] + return PathManager.get_local_path(self.S3_DETECTRON2_PREFIX + name) + + def _open(self, path, mode="r", **kwargs): + return PathManager.open(self._get_local_path(path), mode, **kwargs) + + +PathManager.register_handler(ModelCatalogHandler()) +PathManager.register_handler(Detectron2Handler()) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/checkpoint/detection_checkpoint.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/checkpoint/detection_checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..06e6739f7b2070cf3e2d34099188e5ea1f7cf622 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/checkpoint/detection_checkpoint.py @@ -0,0 +1,73 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import pickle +from fvcore.common.checkpoint import Checkpointer +from fvcore.common.file_io import PathManager + +import detectron2.utils.comm as comm + +from .c2_model_loading import align_and_update_state_dicts + + +class DetectionCheckpointer(Checkpointer): + """ + Same as :class:`Checkpointer`, but is able to handle models in detectron & detectron2 + model zoo, and apply conversions for legacy models. + """ + + def __init__(self, model, save_dir="", *, save_to_disk=None, **checkpointables): + is_main_process = comm.is_main_process() + super().__init__( + model, + save_dir, + save_to_disk=is_main_process if save_to_disk is None else save_to_disk, + **checkpointables, + ) + + def _load_file(self, filename): + if filename.endswith(".pkl"): + with PathManager.open(filename, "rb") as f: + data = pickle.load(f, encoding="latin1") + if "model" in data and "__author__" in data: + # file is in Detectron2 model zoo format + self.logger.info("Reading a file from '{}'".format(data["__author__"])) + return data + else: + # assume file is from Caffe2 / Detectron1 model zoo + if "blobs" in data: + # Detection models have "blobs", but ImageNet models don't + data = data["blobs"] + data = {k: v for k, v in data.items() if not k.endswith("_momentum")} + return {"model": data, "__author__": "Caffe2", "matching_heuristics": True} + + loaded = super()._load_file(filename) # load native pth checkpoint + if "model" not in loaded: + loaded = {"model": loaded} + return loaded + + def _load_model(self, checkpoint): + if checkpoint.get("matching_heuristics", False): + self._convert_ndarray_to_tensor(checkpoint["model"]) + # convert weights by name-matching heuristics + model_state_dict = self.model.state_dict() + align_and_update_state_dicts( + model_state_dict, + checkpoint["model"], + c2_conversion=checkpoint.get("__author__", None) == "Caffe2", + ) + checkpoint["model"] = model_state_dict + # for non-caffe2 models, use standard ways to load it + incompatible = super()._load_model(checkpoint) + if incompatible is None: # support older versions of fvcore + return None + + model_buffers = dict(self.model.named_buffers(recurse=False)) + for k in ["pixel_mean", "pixel_std"]: + # Ignore missing key message about pixel_mean/std. + # Though they may be missing in old checkpoints, they will be correctly + # initialized from config anyway. + if k in model_buffers: + try: + incompatible.missing_keys.remove(k) + except ValueError: + pass + return incompatible diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/config/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/config/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f996ecd74947c504f86e3e6854a45bd74ad32c1c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/config/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .compat import downgrade_config, upgrade_config +from .config import CfgNode, get_cfg, global_cfg, set_global_cfg, configurable + +__all__ = [ + "CfgNode", + "get_cfg", + "global_cfg", + "set_global_cfg", + "downgrade_config", + "upgrade_config", + "configurable", +] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/config/compat.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/config/compat.py new file mode 100644 index 0000000000000000000000000000000000000000..41fe3a00ca05885abf28106808fe7f8d862b5036 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/config/compat.py @@ -0,0 +1,229 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +Backward compatibility of configs. + +Instructions to bump version: ++ It's not needed to bump version if new keys are added. + It's only needed when backward-incompatible changes happen + (i.e., some existing keys disappear, or the meaning of a key changes) ++ To bump version, do the following: + 1. Increment _C.VERSION in defaults.py + 2. Add a converter in this file. + + Each ConverterVX has a function "upgrade" which in-place upgrades config from X-1 to X, + and a function "downgrade" which in-place downgrades config from X to X-1 + + In each function, VERSION is left unchanged. + + Each converter assumes that its input has the relevant keys + (i.e., the input is not a partial config). + 3. Run the tests (test_config.py) to make sure the upgrade & downgrade + functions are consistent. +""" + +import logging +from typing import List, Optional, Tuple + +from .config import CfgNode as CN +from .defaults import _C + +__all__ = ["upgrade_config", "downgrade_config"] + + +def upgrade_config(cfg: CN, to_version: Optional[int] = None) -> CN: + """ + Upgrade a config from its current version to a newer version. + + Args: + cfg (CfgNode): + to_version (int): defaults to the latest version. + """ + cfg = cfg.clone() + if to_version is None: + to_version = _C.VERSION + + assert cfg.VERSION <= to_version, "Cannot upgrade from v{} to v{}!".format( + cfg.VERSION, to_version + ) + for k in range(cfg.VERSION, to_version): + converter = globals()["ConverterV" + str(k + 1)] + converter.upgrade(cfg) + cfg.VERSION = k + 1 + return cfg + + +def downgrade_config(cfg: CN, to_version: int) -> CN: + """ + Downgrade a config from its current version to an older version. + + Args: + cfg (CfgNode): + to_version (int): + + Note: + A general downgrade of arbitrary configs is not always possible due to the + different functionalities in different versions. + The purpose of downgrade is only to recover the defaults in old versions, + allowing it to load an old partial yaml config. + Therefore, the implementation only needs to fill in the default values + in the old version when a general downgrade is not possible. + """ + cfg = cfg.clone() + assert cfg.VERSION >= to_version, "Cannot downgrade from v{} to v{}!".format( + cfg.VERSION, to_version + ) + for k in range(cfg.VERSION, to_version, -1): + converter = globals()["ConverterV" + str(k)] + converter.downgrade(cfg) + cfg.VERSION = k - 1 + return cfg + + +def guess_version(cfg: CN, filename: str) -> int: + """ + Guess the version of a partial config where the VERSION field is not specified. + Returns the version, or the latest if cannot make a guess. + + This makes it easier for users to migrate. + """ + logger = logging.getLogger(__name__) + + def _has(name: str) -> bool: + cur = cfg + for n in name.split("."): + if n not in cur: + return False + cur = cur[n] + return True + + # Most users' partial configs have "MODEL.WEIGHT", so guess on it + ret = None + if _has("MODEL.WEIGHT") or _has("TEST.AUG_ON"): + ret = 1 + + if ret is not None: + logger.warning("Config '{}' has no VERSION. Assuming it to be v{}.".format(filename, ret)) + else: + ret = _C.VERSION + logger.warning( + "Config '{}' has no VERSION. Assuming it to be compatible with latest v{}.".format( + filename, ret + ) + ) + return ret + + +def _rename(cfg: CN, old: str, new: str) -> None: + old_keys = old.split(".") + new_keys = new.split(".") + + def _set(key_seq: List[str], val: str) -> None: + cur = cfg + for k in key_seq[:-1]: + if k not in cur: + cur[k] = CN() + cur = cur[k] + cur[key_seq[-1]] = val + + def _get(key_seq: List[str]) -> CN: + cur = cfg + for k in key_seq: + cur = cur[k] + return cur + + def _del(key_seq: List[str]) -> None: + cur = cfg + for k in key_seq[:-1]: + cur = cur[k] + del cur[key_seq[-1]] + if len(cur) == 0 and len(key_seq) > 1: + _del(key_seq[:-1]) + + _set(new_keys, _get(old_keys)) + _del(old_keys) + + +class _RenameConverter: + """ + A converter that handles simple rename. + """ + + RENAME: List[Tuple[str, str]] = [] # list of tuples of (old name, new name) + + @classmethod + def upgrade(cls, cfg: CN) -> None: + for old, new in cls.RENAME: + _rename(cfg, old, new) + + @classmethod + def downgrade(cls, cfg: CN) -> None: + for old, new in cls.RENAME[::-1]: + _rename(cfg, new, old) + + +class ConverterV1(_RenameConverter): + RENAME = [("MODEL.RPN_HEAD.NAME", "MODEL.RPN.HEAD_NAME")] + + +class ConverterV2(_RenameConverter): + """ + A large bulk of rename, before public release. + """ + + RENAME = [ + ("MODEL.WEIGHT", "MODEL.WEIGHTS"), + ("MODEL.PANOPTIC_FPN.SEMANTIC_LOSS_SCALE", "MODEL.SEM_SEG_HEAD.LOSS_WEIGHT"), + ("MODEL.PANOPTIC_FPN.RPN_LOSS_SCALE", "MODEL.RPN.LOSS_WEIGHT"), + ("MODEL.PANOPTIC_FPN.INSTANCE_LOSS_SCALE", "MODEL.PANOPTIC_FPN.INSTANCE_LOSS_WEIGHT"), + ("MODEL.PANOPTIC_FPN.COMBINE_ON", "MODEL.PANOPTIC_FPN.COMBINE.ENABLED"), + ( + "MODEL.PANOPTIC_FPN.COMBINE_OVERLAP_THRESHOLD", + "MODEL.PANOPTIC_FPN.COMBINE.OVERLAP_THRESH", + ), + ( + "MODEL.PANOPTIC_FPN.COMBINE_STUFF_AREA_LIMIT", + "MODEL.PANOPTIC_FPN.COMBINE.STUFF_AREA_LIMIT", + ), + ( + "MODEL.PANOPTIC_FPN.COMBINE_INSTANCES_CONFIDENCE_THRESHOLD", + "MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH", + ), + ("MODEL.ROI_HEADS.SCORE_THRESH", "MODEL.ROI_HEADS.SCORE_THRESH_TEST"), + ("MODEL.ROI_HEADS.NMS", "MODEL.ROI_HEADS.NMS_THRESH_TEST"), + ("MODEL.RETINANET.INFERENCE_SCORE_THRESHOLD", "MODEL.RETINANET.SCORE_THRESH_TEST"), + ("MODEL.RETINANET.INFERENCE_TOPK_CANDIDATES", "MODEL.RETINANET.TOPK_CANDIDATES_TEST"), + ("MODEL.RETINANET.INFERENCE_NMS_THRESHOLD", "MODEL.RETINANET.NMS_THRESH_TEST"), + ("TEST.DETECTIONS_PER_IMG", "TEST.DETECTIONS_PER_IMAGE"), + ("TEST.AUG_ON", "TEST.AUG.ENABLED"), + ("TEST.AUG_MIN_SIZES", "TEST.AUG.MIN_SIZES"), + ("TEST.AUG_MAX_SIZE", "TEST.AUG.MAX_SIZE"), + ("TEST.AUG_FLIP", "TEST.AUG.FLIP"), + ] + + @classmethod + def upgrade(cls, cfg: CN) -> None: + super().upgrade(cfg) + + if cfg.MODEL.META_ARCHITECTURE == "RetinaNet": + _rename( + cfg, "MODEL.RETINANET.ANCHOR_ASPECT_RATIOS", "MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS" + ) + _rename(cfg, "MODEL.RETINANET.ANCHOR_SIZES", "MODEL.ANCHOR_GENERATOR.SIZES") + del cfg["MODEL"]["RPN"]["ANCHOR_SIZES"] + del cfg["MODEL"]["RPN"]["ANCHOR_ASPECT_RATIOS"] + else: + _rename(cfg, "MODEL.RPN.ANCHOR_ASPECT_RATIOS", "MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS") + _rename(cfg, "MODEL.RPN.ANCHOR_SIZES", "MODEL.ANCHOR_GENERATOR.SIZES") + del cfg["MODEL"]["RETINANET"]["ANCHOR_SIZES"] + del cfg["MODEL"]["RETINANET"]["ANCHOR_ASPECT_RATIOS"] + del cfg["MODEL"]["RETINANET"]["ANCHOR_STRIDES"] + + @classmethod + def downgrade(cls, cfg: CN) -> None: + super().downgrade(cfg) + + _rename(cfg, "MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS", "MODEL.RPN.ANCHOR_ASPECT_RATIOS") + _rename(cfg, "MODEL.ANCHOR_GENERATOR.SIZES", "MODEL.RPN.ANCHOR_SIZES") + cfg.MODEL.RETINANET.ANCHOR_ASPECT_RATIOS = cfg.MODEL.RPN.ANCHOR_ASPECT_RATIOS + cfg.MODEL.RETINANET.ANCHOR_SIZES = cfg.MODEL.RPN.ANCHOR_SIZES + cfg.MODEL.RETINANET.ANCHOR_STRIDES = [] # this is not used anywhere in any version diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/config/config.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/config/config.py new file mode 100644 index 0000000000000000000000000000000000000000..14ad524f00e706ddba567a62f805481c2f185a8e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/config/config.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import functools +import inspect +import logging +from fvcore.common.config import CfgNode as _CfgNode +from fvcore.common.file_io import PathManager + + +class CfgNode(_CfgNode): + """ + The same as `fvcore.common.config.CfgNode`, but different in: + + 1. Use unsafe yaml loading by default. + Note that this may lead to arbitrary code execution: you must not + load a config file from untrusted sources before manually inspecting + the content of the file. + 2. Support config versioning. + When attempting to merge an old config, it will convert the old config automatically. + """ + + # Note that the default value of allow_unsafe is changed to True + def merge_from_file(self, cfg_filename: str, allow_unsafe: bool = True) -> None: + assert PathManager.isfile(cfg_filename), f"Config file '{cfg_filename}' does not exist!" + loaded_cfg = _CfgNode.load_yaml_with_base(cfg_filename, allow_unsafe=allow_unsafe) + loaded_cfg = type(self)(loaded_cfg) + + # defaults.py needs to import CfgNode + from .defaults import _C + + latest_ver = _C.VERSION + assert ( + latest_ver == self.VERSION + ), "CfgNode.merge_from_file is only allowed on a config object of latest version!" + + logger = logging.getLogger(__name__) + + loaded_ver = loaded_cfg.get("VERSION", None) + if loaded_ver is None: + from .compat import guess_version + + loaded_ver = guess_version(loaded_cfg, cfg_filename) + assert loaded_ver <= self.VERSION, "Cannot merge a v{} config into a v{} config.".format( + loaded_ver, self.VERSION + ) + + if loaded_ver == self.VERSION: + self.merge_from_other_cfg(loaded_cfg) + else: + # compat.py needs to import CfgNode + from .compat import upgrade_config, downgrade_config + + logger.warning( + "Loading an old v{} config file '{}' by automatically upgrading to v{}. " + "See docs/CHANGELOG.md for instructions to update your files.".format( + loaded_ver, cfg_filename, self.VERSION + ) + ) + # To convert, first obtain a full config at an old version + old_self = downgrade_config(self, to_version=loaded_ver) + old_self.merge_from_other_cfg(loaded_cfg) + new_config = upgrade_config(old_self) + self.clear() + self.update(new_config) + + def dump(self, *args, **kwargs): + """ + Returns: + str: a yaml string representation of the config + """ + # to make it show up in docs + return super().dump(*args, **kwargs) + + +global_cfg = CfgNode() + + +def get_cfg() -> CfgNode: + """ + Get a copy of the default config. + + Returns: + a detectron2 CfgNode instance. + """ + from .defaults import _C + + return _C.clone() + + +def set_global_cfg(cfg: CfgNode) -> None: + """ + Let the global config point to the given cfg. + + Assume that the given "cfg" has the key "KEY", after calling + `set_global_cfg(cfg)`, the key can be accessed by: + + .. code-block:: python + + from detectron2.config import global_cfg + print(global_cfg.KEY) + + By using a hacky global config, you can access these configs anywhere, + without having to pass the config object or the values deep into the code. + This is a hacky feature introduced for quick prototyping / research exploration. + """ + global global_cfg + global_cfg.clear() + global_cfg.update(cfg) + + +def configurable(init_func): + """ + Decorate a class's __init__ method so that it can be called with a CfgNode + object using the class's from_config classmethod. + + Examples: + + .. code-block:: python + + class A: + @configurable + def __init__(self, a, b=2, c=3): + pass + + @classmethod + def from_config(cls, cfg): + # Returns kwargs to be passed to __init__ + return {"a": cfg.A, "b": cfg.B} + + a1 = A(a=1, b=2) # regular construction + a2 = A(cfg) # construct with a cfg + a3 = A(cfg, b=3, c=4) # construct with extra overwrite + """ + assert init_func.__name__ == "__init__", "@configurable should only be used for __init__!" + if init_func.__module__.startswith("detectron2."): + assert ( + init_func.__doc__ is not None and "experimental" in init_func.__doc__ + ), f"configurable {init_func} should be marked experimental" + + @functools.wraps(init_func) + def wrapped(self, *args, **kwargs): + try: + from_config_func = type(self).from_config + except AttributeError: + raise AttributeError("Class with @configurable must have a 'from_config' classmethod.") + if not inspect.ismethod(from_config_func): + raise TypeError("Class with @configurable must have a 'from_config' classmethod.") + + if _called_with_cfg(*args, **kwargs): + explicit_args = _get_args_from_config(from_config_func, *args, **kwargs) + init_func(self, **explicit_args) + else: + init_func(self, *args, **kwargs) + + return wrapped + + +def _get_args_from_config(from_config_func, *args, **kwargs): + """ + Use `from_config` to obtain explicit arguments. + + Returns: + dict: arguments to be used for cls.__init__ + """ + signature = inspect.signature(from_config_func) + if list(signature.parameters.keys())[0] != "cfg": + raise TypeError( + f"{from_config_func.__self__}.from_config must take 'cfg' as the first argument!" + ) + support_var_arg = any( + param.kind in [param.VAR_POSITIONAL, param.VAR_KEYWORD] + for param in signature.parameters.values() + ) + if support_var_arg: # forward all arguments to from_config, if from_config accepts them + ret = from_config_func(*args, **kwargs) + else: + # forward supported arguments to from_config + supported_arg_names = set(signature.parameters.keys()) + extra_kwargs = {} + for name in list(kwargs.keys()): + if name not in supported_arg_names: + extra_kwargs[name] = kwargs.pop(name) + ret = from_config_func(*args, **kwargs) + # forward the other arguments to __init__ + ret.update(extra_kwargs) + return ret + + +def _called_with_cfg(*args, **kwargs): + """ + Returns: + bool: whether the arguments contain CfgNode and should be considered + forwarded to from_config. + """ + if len(args) and isinstance(args[0], _CfgNode): + return True + if isinstance(kwargs.pop("cfg", None), _CfgNode): + return True + # `from_config`'s first argument is forced to be "cfg". + # So the above check covers all cases. + return False diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/config/defaults.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/config/defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..b9ad62f5f01606438082e012ba5a4a68381c3b3c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/config/defaults.py @@ -0,0 +1,598 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .config import CfgNode as CN + +# ----------------------------------------------------------------------------- +# Convention about Training / Test specific parameters +# ----------------------------------------------------------------------------- +# Whenever an argument can be either used for training or for testing, the +# corresponding name will be post-fixed by a _TRAIN for a training parameter, +# or _TEST for a test-specific parameter. +# For example, the number of images during training will be +# IMAGES_PER_BATCH_TRAIN, while the number of images for testing will be +# IMAGES_PER_BATCH_TEST + +# ----------------------------------------------------------------------------- +# Config definition +# ----------------------------------------------------------------------------- + +_C = CN() + +# The version number, to upgrade from old configs to new ones if any +# changes happen. It's recommended to keep a VERSION in your config file. +_C.VERSION = 2 + +_C.MODEL = CN() +_C.MODEL.LOAD_PROPOSALS = False +_C.MODEL.MASK_ON = False +_C.MODEL.KEYPOINT_ON = False +_C.MODEL.DEVICE = "cuda" +_C.MODEL.META_ARCHITECTURE = "GeneralizedRCNN" + +# Path (possibly with schema like catalog:// or detectron2://) to a checkpoint file +# to be loaded to the model. You can find available models in the model zoo. +_C.MODEL.WEIGHTS = "" + +# Values to be used for image normalization (BGR order, since INPUT.FORMAT defaults to BGR). +# To train on images of different number of channels, just set different mean & std. +# Default values are the mean pixel value from ImageNet: [103.53, 116.28, 123.675] +_C.MODEL.PIXEL_MEAN = [103.530, 116.280, 123.675] +# When using pre-trained models in Detectron1 or any MSRA models, +# std has been absorbed into its conv1 weights, so the std needs to be set 1. +# Otherwise, you can use [57.375, 57.120, 58.395] (ImageNet std) +_C.MODEL.PIXEL_STD = [1.0, 1.0, 1.0] + + +# ----------------------------------------------------------------------------- +# INPUT +# ----------------------------------------------------------------------------- +_C.INPUT = CN() +# Size of the smallest side of the image during training +_C.INPUT.MIN_SIZE_TRAIN = (800,) +# Sample size of smallest side by choice or random selection from range give by +# INPUT.MIN_SIZE_TRAIN +_C.INPUT.MIN_SIZE_TRAIN_SAMPLING = "choice" +# Maximum size of the side of the image during training +_C.INPUT.MAX_SIZE_TRAIN = 1333 +# Size of the smallest side of the image during testing. Set to zero to disable resize in testing. +_C.INPUT.MIN_SIZE_TEST = 800 +# Maximum size of the side of the image during testing +_C.INPUT.MAX_SIZE_TEST = 1333 + +# `True` if cropping is used for data augmentation during training +_C.INPUT.CROP = CN({"ENABLED": False}) +# Cropping type: +# - "relative" crop (H * CROP.SIZE[0], W * CROP.SIZE[1]) part of an input of size (H, W) +# - "relative_range" uniformly sample relative crop size from between [CROP.SIZE[0], [CROP.SIZE[1]]. +# and [1, 1] and use it as in "relative" scenario. +# - "absolute" crop part of an input with absolute size: (CROP.SIZE[0], CROP.SIZE[1]). +_C.INPUT.CROP.TYPE = "relative_range" +# Size of crop in range (0, 1] if CROP.TYPE is "relative" or "relative_range" and in number of +# pixels if CROP.TYPE is "absolute" +_C.INPUT.CROP.SIZE = [0.9, 0.9] + + +# Whether the model needs RGB, YUV, HSV etc. +# Should be one of the modes defined here, as we use PIL to read the image: +# https://pillow.readthedocs.io/en/stable/handbook/concepts.html#concept-modes +# with BGR being the one exception. One can set image format to BGR, we will +# internally use RGB for conversion and flip the channels over +_C.INPUT.FORMAT = "BGR" +# The ground truth mask format that the model will use. +# Mask R-CNN supports either "polygon" or "bitmask" as ground truth. +_C.INPUT.MASK_FORMAT = "polygon" # alternative: "bitmask" + + +# ----------------------------------------------------------------------------- +# Dataset +# ----------------------------------------------------------------------------- +_C.DATASETS = CN() +# List of the dataset names for training. Must be registered in DatasetCatalog +_C.DATASETS.TRAIN = () +# List of the pre-computed proposal files for training, which must be consistent +# with data listed in DATASETS.TRAIN. +_C.DATASETS.PROPOSAL_FILES_TRAIN = () +# Number of top scoring precomputed proposals to keep for training +_C.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TRAIN = 2000 +# List of the dataset names for testing. Must be registered in DatasetCatalog +_C.DATASETS.TEST = () +# List of the pre-computed proposal files for test, which must be consistent +# with data listed in DATASETS.TEST. +_C.DATASETS.PROPOSAL_FILES_TEST = () +# Number of top scoring precomputed proposals to keep for test +_C.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TEST = 1000 + +# ----------------------------------------------------------------------------- +# DataLoader +# ----------------------------------------------------------------------------- +_C.DATALOADER = CN() +# Number of data loading threads +_C.DATALOADER.NUM_WORKERS = 4 +# If True, each batch should contain only images for which the aspect ratio +# is compatible. This groups portrait images together, and landscape images +# are not batched with portrait images. +_C.DATALOADER.ASPECT_RATIO_GROUPING = True +# Options: TrainingSampler, RepeatFactorTrainingSampler +_C.DATALOADER.SAMPLER_TRAIN = "TrainingSampler" +# Repeat threshold for RepeatFactorTrainingSampler +_C.DATALOADER.REPEAT_THRESHOLD = 0.0 +# if True, the dataloader will filter out images that have no associated +# annotations at train time. +_C.DATALOADER.FILTER_EMPTY_ANNOTATIONS = True + +# ---------------------------------------------------------------------------- # +# Backbone options +# ---------------------------------------------------------------------------- # +_C.MODEL.BACKBONE = CN() + +_C.MODEL.BACKBONE.NAME = "build_resnet_backbone" +# Freeze the first several stages so they are not trained. +# There are 5 stages in ResNet. The first is a convolution, and the following +# stages are each group of residual blocks. +_C.MODEL.BACKBONE.FREEZE_AT = 2 + + +# ---------------------------------------------------------------------------- # +# FPN options +# ---------------------------------------------------------------------------- # +_C.MODEL.FPN = CN() +# Names of the input feature maps to be used by FPN +# They must have contiguous power of 2 strides +# e.g., ["res2", "res3", "res4", "res5"] +_C.MODEL.FPN.IN_FEATURES = [] +_C.MODEL.FPN.OUT_CHANNELS = 256 + +# Options: "" (no norm), "GN" +_C.MODEL.FPN.NORM = "" + +# Types for fusing the FPN top-down and lateral features. Can be either "sum" or "avg" +_C.MODEL.FPN.FUSE_TYPE = "sum" + + +# ---------------------------------------------------------------------------- # +# Proposal generator options +# ---------------------------------------------------------------------------- # +_C.MODEL.PROPOSAL_GENERATOR = CN() +# Current proposal generators include "RPN", "RRPN" and "PrecomputedProposals" +_C.MODEL.PROPOSAL_GENERATOR.NAME = "RPN" +# Proposal height and width both need to be greater than MIN_SIZE +# (a the scale used during training or inference) +_C.MODEL.PROPOSAL_GENERATOR.MIN_SIZE = 0 + + +# ---------------------------------------------------------------------------- # +# Anchor generator options +# ---------------------------------------------------------------------------- # +_C.MODEL.ANCHOR_GENERATOR = CN() +# The generator can be any name in the ANCHOR_GENERATOR registry +_C.MODEL.ANCHOR_GENERATOR.NAME = "DefaultAnchorGenerator" +# Anchor sizes (i.e. sqrt of area) in absolute pixels w.r.t. the network input. +# Format: list[list[float]]. SIZES[i] specifies the list of sizes +# to use for IN_FEATURES[i]; len(SIZES) == len(IN_FEATURES) must be true, +# or len(SIZES) == 1 is true and size list SIZES[0] is used for all +# IN_FEATURES. +_C.MODEL.ANCHOR_GENERATOR.SIZES = [[32, 64, 128, 256, 512]] +# Anchor aspect ratios. For each area given in `SIZES`, anchors with different aspect +# ratios are generated by an anchor generator. +# Format: list[list[float]]. ASPECT_RATIOS[i] specifies the list of aspect ratios (H/W) +# to use for IN_FEATURES[i]; len(ASPECT_RATIOS) == len(IN_FEATURES) must be true, +# or len(ASPECT_RATIOS) == 1 is true and aspect ratio list ASPECT_RATIOS[0] is used +# for all IN_FEATURES. +_C.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [[0.5, 1.0, 2.0]] +# Anchor angles. +# list[list[float]], the angle in degrees, for each input feature map. +# ANGLES[i] specifies the list of angles for IN_FEATURES[i]. +_C.MODEL.ANCHOR_GENERATOR.ANGLES = [[-90, 0, 90]] +# Relative offset between the center of the first anchor and the top-left corner of the image +# Value has to be in [0, 1). Recommend to use 0.5, which means half stride. +# The value is not expected to affect model accuracy. +_C.MODEL.ANCHOR_GENERATOR.OFFSET = 0.0 + +# ---------------------------------------------------------------------------- # +# RPN options +# ---------------------------------------------------------------------------- # +_C.MODEL.RPN = CN() +_C.MODEL.RPN.HEAD_NAME = "StandardRPNHead" # used by RPN_HEAD_REGISTRY + +# Names of the input feature maps to be used by RPN +# e.g., ["p2", "p3", "p4", "p5", "p6"] for FPN +_C.MODEL.RPN.IN_FEATURES = ["res4"] +# Remove RPN anchors that go outside the image by BOUNDARY_THRESH pixels +# Set to -1 or a large value, e.g. 100000, to disable pruning anchors +_C.MODEL.RPN.BOUNDARY_THRESH = -1 +# IOU overlap ratios [BG_IOU_THRESHOLD, FG_IOU_THRESHOLD] +# Minimum overlap required between an anchor and ground-truth box for the +# (anchor, gt box) pair to be a positive example (IoU >= FG_IOU_THRESHOLD +# ==> positive RPN example: 1) +# Maximum overlap allowed between an anchor and ground-truth box for the +# (anchor, gt box) pair to be a negative examples (IoU < BG_IOU_THRESHOLD +# ==> negative RPN example: 0) +# Anchors with overlap in between (BG_IOU_THRESHOLD <= IoU < FG_IOU_THRESHOLD) +# are ignored (-1) +_C.MODEL.RPN.IOU_THRESHOLDS = [0.3, 0.7] +_C.MODEL.RPN.IOU_LABELS = [0, -1, 1] +# Total number of RPN examples per image +_C.MODEL.RPN.BATCH_SIZE_PER_IMAGE = 256 +# Target fraction of foreground (positive) examples per RPN minibatch +_C.MODEL.RPN.POSITIVE_FRACTION = 0.5 +# Weights on (dx, dy, dw, dh) for normalizing RPN anchor regression targets +_C.MODEL.RPN.BBOX_REG_WEIGHTS = (1.0, 1.0, 1.0, 1.0) +# The transition point from L1 to L2 loss. Set to 0.0 to make the loss simply L1. +_C.MODEL.RPN.SMOOTH_L1_BETA = 0.0 +_C.MODEL.RPN.LOSS_WEIGHT = 1.0 +# Number of top scoring RPN proposals to keep before applying NMS +# When FPN is used, this is *per FPN level* (not total) +_C.MODEL.RPN.PRE_NMS_TOPK_TRAIN = 12000 +_C.MODEL.RPN.PRE_NMS_TOPK_TEST = 6000 +# Number of top scoring RPN proposals to keep after applying NMS +# When FPN is used, this limit is applied per level and then again to the union +# of proposals from all levels +# NOTE: When FPN is used, the meaning of this config is different from Detectron1. +# It means per-batch topk in Detectron1, but per-image topk here. +# See "modeling/rpn/rpn_outputs.py" for details. +_C.MODEL.RPN.POST_NMS_TOPK_TRAIN = 2000 +_C.MODEL.RPN.POST_NMS_TOPK_TEST = 1000 +# NMS threshold used on RPN proposals +_C.MODEL.RPN.NMS_THRESH = 0.7 + +# ---------------------------------------------------------------------------- # +# ROI HEADS options +# ---------------------------------------------------------------------------- # +_C.MODEL.ROI_HEADS = CN() +_C.MODEL.ROI_HEADS.NAME = "Res5ROIHeads" +# Number of foreground classes +_C.MODEL.ROI_HEADS.NUM_CLASSES = 80 +# Names of the input feature maps to be used by ROI heads +# Currently all heads (box, mask, ...) use the same input feature map list +# e.g., ["p2", "p3", "p4", "p5"] is commonly used for FPN +_C.MODEL.ROI_HEADS.IN_FEATURES = ["res4"] +# IOU overlap ratios [IOU_THRESHOLD] +# Overlap threshold for an RoI to be considered background (if < IOU_THRESHOLD) +# Overlap threshold for an RoI to be considered foreground (if >= IOU_THRESHOLD) +_C.MODEL.ROI_HEADS.IOU_THRESHOLDS = [0.5] +_C.MODEL.ROI_HEADS.IOU_LABELS = [0, 1] +# RoI minibatch size *per image* (number of regions of interest [ROIs]) +# Total number of RoIs per training minibatch = +# ROI_HEADS.BATCH_SIZE_PER_IMAGE * SOLVER.IMS_PER_BATCH +# E.g., a common configuration is: 512 * 16 = 8192 +_C.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 512 +# Target fraction of RoI minibatch that is labeled foreground (i.e. class > 0) +_C.MODEL.ROI_HEADS.POSITIVE_FRACTION = 0.25 + +# Only used on test mode + +# Minimum score threshold (assuming scores in a [0, 1] range); a value chosen to +# balance obtaining high recall with not having too many low precision +# detections that will slow down inference post processing steps (like NMS) +# A default threshold of 0.0 increases AP by ~0.2-0.3 but significantly slows down +# inference. +_C.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.05 +# Overlap threshold used for non-maximum suppression (suppress boxes with +# IoU >= this threshold) +_C.MODEL.ROI_HEADS.NMS_THRESH_TEST = 0.5 +# If True, augment proposals with ground-truth boxes before sampling proposals to +# train ROI heads. +_C.MODEL.ROI_HEADS.PROPOSAL_APPEND_GT = True + +# ---------------------------------------------------------------------------- # +# Box Head +# ---------------------------------------------------------------------------- # +_C.MODEL.ROI_BOX_HEAD = CN() +# C4 don't use head name option +# Options for non-C4 models: FastRCNNConvFCHead, +_C.MODEL.ROI_BOX_HEAD.NAME = "" +# Default weights on (dx, dy, dw, dh) for normalizing bbox regression targets +# These are empirically chosen to approximately lead to unit variance targets +_C.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS = (10.0, 10.0, 5.0, 5.0) +# The transition point from L1 to L2 loss. Set to 0.0 to make the loss simply L1. +_C.MODEL.ROI_BOX_HEAD.SMOOTH_L1_BETA = 0.0 +_C.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION = 14 +_C.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO = 0 +# Type of pooling operation applied to the incoming feature map for each RoI +_C.MODEL.ROI_BOX_HEAD.POOLER_TYPE = "ROIAlignV2" + +_C.MODEL.ROI_BOX_HEAD.NUM_FC = 0 +# Hidden layer dimension for FC layers in the RoI box head +_C.MODEL.ROI_BOX_HEAD.FC_DIM = 1024 +_C.MODEL.ROI_BOX_HEAD.NUM_CONV = 0 +# Channel dimension for Conv layers in the RoI box head +_C.MODEL.ROI_BOX_HEAD.CONV_DIM = 256 +# Normalization method for the convolution layers. +# Options: "" (no norm), "GN", "SyncBN". +_C.MODEL.ROI_BOX_HEAD.NORM = "" +# Whether to use class agnostic for bbox regression +_C.MODEL.ROI_BOX_HEAD.CLS_AGNOSTIC_BBOX_REG = False +# If true, RoI heads use bounding boxes predicted by the box head rather than proposal boxes. +_C.MODEL.ROI_BOX_HEAD.TRAIN_ON_PRED_BOXES = False + +# ---------------------------------------------------------------------------- # +# Cascaded Box Head +# ---------------------------------------------------------------------------- # +_C.MODEL.ROI_BOX_CASCADE_HEAD = CN() +# The number of cascade stages is implicitly defined by the length of the following two configs. +_C.MODEL.ROI_BOX_CASCADE_HEAD.BBOX_REG_WEIGHTS = ( + (10.0, 10.0, 5.0, 5.0), + (20.0, 20.0, 10.0, 10.0), + (30.0, 30.0, 15.0, 15.0), +) +_C.MODEL.ROI_BOX_CASCADE_HEAD.IOUS = (0.5, 0.6, 0.7) + + +# ---------------------------------------------------------------------------- # +# Mask Head +# ---------------------------------------------------------------------------- # +_C.MODEL.ROI_MASK_HEAD = CN() +_C.MODEL.ROI_MASK_HEAD.NAME = "MaskRCNNConvUpsampleHead" +_C.MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION = 14 +_C.MODEL.ROI_MASK_HEAD.POOLER_SAMPLING_RATIO = 0 +_C.MODEL.ROI_MASK_HEAD.NUM_CONV = 0 # The number of convs in the mask head +_C.MODEL.ROI_MASK_HEAD.CONV_DIM = 256 +# Normalization method for the convolution layers. +# Options: "" (no norm), "GN", "SyncBN". +_C.MODEL.ROI_MASK_HEAD.NORM = "" +# Whether to use class agnostic for mask prediction +_C.MODEL.ROI_MASK_HEAD.CLS_AGNOSTIC_MASK = False +# Type of pooling operation applied to the incoming feature map for each RoI +_C.MODEL.ROI_MASK_HEAD.POOLER_TYPE = "ROIAlignV2" + + +# ---------------------------------------------------------------------------- # +# Keypoint Head +# ---------------------------------------------------------------------------- # +_C.MODEL.ROI_KEYPOINT_HEAD = CN() +_C.MODEL.ROI_KEYPOINT_HEAD.NAME = "KRCNNConvDeconvUpsampleHead" +_C.MODEL.ROI_KEYPOINT_HEAD.POOLER_RESOLUTION = 14 +_C.MODEL.ROI_KEYPOINT_HEAD.POOLER_SAMPLING_RATIO = 0 +_C.MODEL.ROI_KEYPOINT_HEAD.CONV_DIMS = tuple(512 for _ in range(8)) +_C.MODEL.ROI_KEYPOINT_HEAD.NUM_KEYPOINTS = 17 # 17 is the number of keypoints in COCO. + +# Images with too few (or no) keypoints are excluded from training. +_C.MODEL.ROI_KEYPOINT_HEAD.MIN_KEYPOINTS_PER_IMAGE = 1 +# Normalize by the total number of visible keypoints in the minibatch if True. +# Otherwise, normalize by the total number of keypoints that could ever exist +# in the minibatch. +# The keypoint softmax loss is only calculated on visible keypoints. +# Since the number of visible keypoints can vary significantly between +# minibatches, this has the effect of up-weighting the importance of +# minibatches with few visible keypoints. (Imagine the extreme case of +# only one visible keypoint versus N: in the case of N, each one +# contributes 1/N to the gradient compared to the single keypoint +# determining the gradient direction). Instead, we can normalize the +# loss by the total number of keypoints, if it were the case that all +# keypoints were visible in a full minibatch. (Returning to the example, +# this means that the one visible keypoint contributes as much as each +# of the N keypoints.) +_C.MODEL.ROI_KEYPOINT_HEAD.NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS = True +# Multi-task loss weight to use for keypoints +# Recommended values: +# - use 1.0 if NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS is True +# - use 4.0 if NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS is False +_C.MODEL.ROI_KEYPOINT_HEAD.LOSS_WEIGHT = 1.0 +# Type of pooling operation applied to the incoming feature map for each RoI +_C.MODEL.ROI_KEYPOINT_HEAD.POOLER_TYPE = "ROIAlignV2" + +# ---------------------------------------------------------------------------- # +# Semantic Segmentation Head +# ---------------------------------------------------------------------------- # +_C.MODEL.SEM_SEG_HEAD = CN() +_C.MODEL.SEM_SEG_HEAD.NAME = "SemSegFPNHead" +_C.MODEL.SEM_SEG_HEAD.IN_FEATURES = ["p2", "p3", "p4", "p5"] +# Label in the semantic segmentation ground truth that is ignored, i.e., no loss is calculated for +# the correposnding pixel. +_C.MODEL.SEM_SEG_HEAD.IGNORE_VALUE = 255 +# Number of classes in the semantic segmentation head +_C.MODEL.SEM_SEG_HEAD.NUM_CLASSES = 54 +# Number of channels in the 3x3 convs inside semantic-FPN heads. +_C.MODEL.SEM_SEG_HEAD.CONVS_DIM = 128 +# Outputs from semantic-FPN heads are up-scaled to the COMMON_STRIDE stride. +_C.MODEL.SEM_SEG_HEAD.COMMON_STRIDE = 4 +# Normalization method for the convolution layers. Options: "" (no norm), "GN". +_C.MODEL.SEM_SEG_HEAD.NORM = "GN" +_C.MODEL.SEM_SEG_HEAD.LOSS_WEIGHT = 1.0 + +_C.MODEL.PANOPTIC_FPN = CN() +# Scaling of all losses from instance detection / segmentation head. +_C.MODEL.PANOPTIC_FPN.INSTANCE_LOSS_WEIGHT = 1.0 + +# options when combining instance & semantic segmentation outputs +_C.MODEL.PANOPTIC_FPN.COMBINE = CN({"ENABLED": True}) +_C.MODEL.PANOPTIC_FPN.COMBINE.OVERLAP_THRESH = 0.5 +_C.MODEL.PANOPTIC_FPN.COMBINE.STUFF_AREA_LIMIT = 4096 +_C.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH = 0.5 + + +# ---------------------------------------------------------------------------- # +# RetinaNet Head +# ---------------------------------------------------------------------------- # +_C.MODEL.RETINANET = CN() + +# This is the number of foreground classes. +_C.MODEL.RETINANET.NUM_CLASSES = 80 + +_C.MODEL.RETINANET.IN_FEATURES = ["p3", "p4", "p5", "p6", "p7"] + +# Convolutions to use in the cls and bbox tower +# NOTE: this doesn't include the last conv for logits +_C.MODEL.RETINANET.NUM_CONVS = 4 + +# IoU overlap ratio [bg, fg] for labeling anchors. +# Anchors with < bg are labeled negative (0) +# Anchors with >= bg and < fg are ignored (-1) +# Anchors with >= fg are labeled positive (1) +_C.MODEL.RETINANET.IOU_THRESHOLDS = [0.4, 0.5] +_C.MODEL.RETINANET.IOU_LABELS = [0, -1, 1] + +# Prior prob for rare case (i.e. foreground) at the beginning of training. +# This is used to set the bias for the logits layer of the classifier subnet. +# This improves training stability in the case of heavy class imbalance. +_C.MODEL.RETINANET.PRIOR_PROB = 0.01 + +# Inference cls score threshold, only anchors with score > INFERENCE_TH are +# considered for inference (to improve speed) +_C.MODEL.RETINANET.SCORE_THRESH_TEST = 0.05 +_C.MODEL.RETINANET.TOPK_CANDIDATES_TEST = 1000 +_C.MODEL.RETINANET.NMS_THRESH_TEST = 0.5 + +# Weights on (dx, dy, dw, dh) for normalizing Retinanet anchor regression targets +_C.MODEL.RETINANET.BBOX_REG_WEIGHTS = (1.0, 1.0, 1.0, 1.0) + +# Loss parameters +_C.MODEL.RETINANET.FOCAL_LOSS_GAMMA = 2.0 +_C.MODEL.RETINANET.FOCAL_LOSS_ALPHA = 0.25 +_C.MODEL.RETINANET.SMOOTH_L1_LOSS_BETA = 0.1 + + +# ---------------------------------------------------------------------------- # +# ResNe[X]t options (ResNets = {ResNet, ResNeXt} +# Note that parts of a resnet may be used for both the backbone and the head +# These options apply to both +# ---------------------------------------------------------------------------- # +_C.MODEL.RESNETS = CN() + +_C.MODEL.RESNETS.DEPTH = 50 +_C.MODEL.RESNETS.OUT_FEATURES = ["res4"] # res4 for C4 backbone, res2..5 for FPN backbone + +# Number of groups to use; 1 ==> ResNet; > 1 ==> ResNeXt +_C.MODEL.RESNETS.NUM_GROUPS = 1 + +# Options: FrozenBN, GN, "SyncBN", "BN" +_C.MODEL.RESNETS.NORM = "FrozenBN" + +# Baseline width of each group. +# Scaling this parameters will scale the width of all bottleneck layers. +_C.MODEL.RESNETS.WIDTH_PER_GROUP = 64 + +# Place the stride 2 conv on the 1x1 filter +# Use True only for the original MSRA ResNet; use False for C2 and Torch models +_C.MODEL.RESNETS.STRIDE_IN_1X1 = True + +# Apply dilation in stage "res5" +_C.MODEL.RESNETS.RES5_DILATION = 1 + +# Output width of res2. Scaling this parameters will scale the width of all 1x1 convs in ResNet +# For R18 and R34, this needs to be set to 64 +_C.MODEL.RESNETS.RES2_OUT_CHANNELS = 256 +_C.MODEL.RESNETS.STEM_OUT_CHANNELS = 64 + +# Apply Deformable Convolution in stages +# Specify if apply deform_conv on Res2, Res3, Res4, Res5 +_C.MODEL.RESNETS.DEFORM_ON_PER_STAGE = [False, False, False, False] +# Use True to use modulated deform_conv (DeformableV2, https://arxiv.org/abs/1811.11168); +# Use False for DeformableV1. +_C.MODEL.RESNETS.DEFORM_MODULATED = False +# Number of groups in deformable conv. +_C.MODEL.RESNETS.DEFORM_NUM_GROUPS = 1 + + +# ---------------------------------------------------------------------------- # +# Solver +# ---------------------------------------------------------------------------- # +_C.SOLVER = CN() + +# See detectron2/solver/build.py for LR scheduler options +_C.SOLVER.LR_SCHEDULER_NAME = "WarmupMultiStepLR" + +_C.SOLVER.MAX_ITER = 40000 + +_C.SOLVER.BASE_LR = 0.001 + +_C.SOLVER.MOMENTUM = 0.9 + +_C.SOLVER.NESTEROV = False + +_C.SOLVER.WEIGHT_DECAY = 0.0001 +# The weight decay that's applied to parameters of normalization layers +# (typically the affine transformation) +_C.SOLVER.WEIGHT_DECAY_NORM = 0.0 + +_C.SOLVER.GAMMA = 0.1 +# The iteration number to decrease learning rate by GAMMA. +_C.SOLVER.STEPS = (30000,) + +_C.SOLVER.WARMUP_FACTOR = 1.0 / 1000 +_C.SOLVER.WARMUP_ITERS = 1000 +_C.SOLVER.WARMUP_METHOD = "linear" + +# Save a checkpoint after every this number of iterations +_C.SOLVER.CHECKPOINT_PERIOD = 5000 + +# Number of images per batch across all machines. +# If we have 16 GPUs and IMS_PER_BATCH = 32, +# each GPU will see 2 images per batch. +_C.SOLVER.IMS_PER_BATCH = 16 + +# Detectron v1 (and previous detection code) used a 2x higher LR and 0 WD for +# biases. This is not useful (at least for recent models). You should avoid +# changing these and they exist only to reproduce Detectron v1 training if +# desired. +_C.SOLVER.BIAS_LR_FACTOR = 1.0 +_C.SOLVER.WEIGHT_DECAY_BIAS = _C.SOLVER.WEIGHT_DECAY + +# Gradient clipping +_C.SOLVER.CLIP_GRADIENTS = CN({"ENABLED": False}) +# Type of gradient clipping, currently 2 values are supported: +# - "value": the absolute values of elements of each gradients are clipped +# - "norm": the norm of the gradient for each parameter is clipped thus +# affecting all elements in the parameter +_C.SOLVER.CLIP_GRADIENTS.CLIP_TYPE = "value" +# Maximum absolute value used for clipping gradients +_C.SOLVER.CLIP_GRADIENTS.CLIP_VALUE = 1.0 +# Floating point number p for L-p norm to be used with the "norm" +# gradient clipping type; for L-inf, please specify .inf +_C.SOLVER.CLIP_GRADIENTS.NORM_TYPE = 2.0 + +# ---------------------------------------------------------------------------- # +# Specific test options +# ---------------------------------------------------------------------------- # +_C.TEST = CN() +# For end-to-end tests to verify the expected accuracy. +# Each item is [task, metric, value, tolerance] +# e.g.: [['bbox', 'AP', 38.5, 0.2]] +_C.TEST.EXPECTED_RESULTS = [] +# The period (in terms of steps) to evaluate the model during training. +# Set to 0 to disable. +_C.TEST.EVAL_PERIOD = 0 +# The sigmas used to calculate keypoint OKS. See http://cocodataset.org/#keypoints-eval +# When empty it will use the defaults in COCO. +# Otherwise it should have the same length as ROI_KEYPOINT_HEAD.NUM_KEYPOINTS. +_C.TEST.KEYPOINT_OKS_SIGMAS = [] +# Maximum number of detections to return per image during inference (100 is +# based on the limit established for the COCO dataset). +_C.TEST.DETECTIONS_PER_IMAGE = 100 + +_C.TEST.AUG = CN({"ENABLED": False}) +_C.TEST.AUG.MIN_SIZES = (400, 500, 600, 700, 800, 900, 1000, 1100, 1200) +_C.TEST.AUG.MAX_SIZE = 4000 +_C.TEST.AUG.FLIP = True + +_C.TEST.PRECISE_BN = CN({"ENABLED": False}) +_C.TEST.PRECISE_BN.NUM_ITER = 200 + +# ---------------------------------------------------------------------------- # +# Misc options +# ---------------------------------------------------------------------------- # +# Directory where output files are written +_C.OUTPUT_DIR = "./output" +# Set seed to negative to fully randomize everything. +# Set seed to positive to use a fixed seed. Note that a fixed seed increases +# reproducibility but does not guarantee fully deterministic behavior. +# Disabling all parallelism further increases reproducibility. +_C.SEED = -1 +# Benchmark different cudnn algorithms. +# If input images have very different sizes, this option will have large overhead +# for about 10k iterations. It usually hurts total time, but can benefit for certain models. +# If input images have the same or similar sizes, benchmark is often helpful. +_C.CUDNN_BENCHMARK = False +# The period (in terms of steps) for minibatch visualization at train time. +# Set to 0 to disable. +_C.VIS_PERIOD = 0 + +# global config is for quick hack purposes. +# You can set them in command line or config files, +# and access it with: +# +# from detectron2.config import global_cfg +# print(global_cfg.HACK) +# +# Do not commit any configs into it. +_C.GLOBAL = CN() +_C.GLOBAL.HACK = 1.0 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e8f72e0f45d6d683771f0d815dfd0e3d0db52b9d --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/__init__.py @@ -0,0 +1,18 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from . import transforms # isort:skip + +from .build import ( + build_detection_test_loader, + build_detection_train_loader, + get_detection_dataset_dicts, + load_proposals_into_dataset, + print_instances_class_histogram, +) +from .catalog import DatasetCatalog, MetadataCatalog +from .common import DatasetFromList, MapDataset +from .dataset_mapper import DatasetMapper + +# ensure the builtin data are registered +from . import datasets, samplers # isort:skip + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/build.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/build.py new file mode 100644 index 0000000000000000000000000000000000000000..cb7e85789d75daf4ee206449ce0d3254e948db16 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/build.py @@ -0,0 +1,397 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import bisect +import copy +import itertools +import logging +import numpy as np +import operator +import pickle +import torch.utils.data +from fvcore.common.file_io import PathManager +from tabulate import tabulate +from termcolor import colored + +from detectron2.structures import BoxMode +from detectron2.utils.comm import get_world_size +from detectron2.utils.env import seed_all_rng +from detectron2.utils.logger import log_first_n + +from . import samplers +from .catalog import DatasetCatalog, MetadataCatalog +from .common import AspectRatioGroupedDataset, DatasetFromList, MapDataset +from .dataset_mapper import DatasetMapper +from .detection_utils import check_metadata_consistency + +""" +This file contains the default logic to build a dataloader for training or testing. +""" + +__all__ = [ + "build_detection_train_loader", + "build_detection_test_loader", + "get_detection_dataset_dicts", + "load_proposals_into_dataset", + "print_instances_class_histogram", +] + + +def filter_images_with_only_crowd_annotations(dataset_dicts): + """ + Filter out images with none annotations or only crowd annotations + (i.e., images without non-crowd annotations). + A common training-time preprocessing on COCO dataset. + + Args: + dataset_dicts (list[dict]): annotations in Detectron2 Dataset format. + + Returns: + list[dict]: the same format, but filtered. + """ + num_before = len(dataset_dicts) + + def valid(anns): + for ann in anns: + if ann.get("iscrowd", 0) == 0: + return True + return False + + dataset_dicts = [x for x in dataset_dicts if valid(x["annotations"])] + num_after = len(dataset_dicts) + logger = logging.getLogger(__name__) + logger.info( + "Removed {} images with no usable annotations. {} images left.".format( + num_before - num_after, num_after + ) + ) + return dataset_dicts + + +def filter_images_with_few_keypoints(dataset_dicts, min_keypoints_per_image): + """ + Filter out images with too few number of keypoints. + + Args: + dataset_dicts (list[dict]): annotations in Detectron2 Dataset format. + + Returns: + list[dict]: the same format as dataset_dicts, but filtered. + """ + num_before = len(dataset_dicts) + + def visible_keypoints_in_image(dic): + # Each keypoints field has the format [x1, y1, v1, ...], where v is visibility + annotations = dic["annotations"] + return sum( + (np.array(ann["keypoints"][2::3]) > 0).sum() + for ann in annotations + if "keypoints" in ann + ) + + dataset_dicts = [ + x for x in dataset_dicts if visible_keypoints_in_image(x) >= min_keypoints_per_image + ] + num_after = len(dataset_dicts) + logger = logging.getLogger(__name__) + logger.info( + "Removed {} images with fewer than {} keypoints.".format( + num_before - num_after, min_keypoints_per_image + ) + ) + return dataset_dicts + + +def load_proposals_into_dataset(dataset_dicts, proposal_file): + """ + Load precomputed object proposals into the dataset. + + The proposal file should be a pickled dict with the following keys: + + - "ids": list[int] or list[str], the image ids + - "boxes": list[np.ndarray], each is an Nx4 array of boxes corresponding to the image id + - "objectness_logits": list[np.ndarray], each is an N sized array of objectness scores + corresponding to the boxes. + - "bbox_mode": the BoxMode of the boxes array. Defaults to ``BoxMode.XYXY_ABS``. + + Args: + dataset_dicts (list[dict]): annotations in Detectron2 Dataset format. + proposal_file (str): file path of pre-computed proposals, in pkl format. + + Returns: + list[dict]: the same format as dataset_dicts, but added proposal field. + """ + logger = logging.getLogger(__name__) + logger.info("Loading proposals from: {}".format(proposal_file)) + + with PathManager.open(proposal_file, "rb") as f: + proposals = pickle.load(f, encoding="latin1") + + # Rename the key names in D1 proposal files + rename_keys = {"indexes": "ids", "scores": "objectness_logits"} + for key in rename_keys: + if key in proposals: + proposals[rename_keys[key]] = proposals.pop(key) + + # Fetch the indexes of all proposals that are in the dataset + # Convert image_id to str since they could be int. + img_ids = set({str(record["image_id"]) for record in dataset_dicts}) + id_to_index = {str(id): i for i, id in enumerate(proposals["ids"]) if str(id) in img_ids} + + # Assuming default bbox_mode of precomputed proposals are 'XYXY_ABS' + bbox_mode = BoxMode(proposals["bbox_mode"]) if "bbox_mode" in proposals else BoxMode.XYXY_ABS + + for record in dataset_dicts: + # Get the index of the proposal + i = id_to_index[str(record["image_id"])] + + boxes = proposals["boxes"][i] + objectness_logits = proposals["objectness_logits"][i] + # Sort the proposals in descending order of the scores + inds = objectness_logits.argsort()[::-1] + record["proposal_boxes"] = boxes[inds] + record["proposal_objectness_logits"] = objectness_logits[inds] + record["proposal_bbox_mode"] = bbox_mode + + return dataset_dicts + + +def _quantize(x, bin_edges): + bin_edges = copy.copy(bin_edges) + bin_edges = sorted(bin_edges) + quantized = list(map(lambda y: bisect.bisect_right(bin_edges, y), x)) + return quantized + + +def print_instances_class_histogram(dataset_dicts, class_names): + """ + Args: + dataset_dicts (list[dict]): list of dataset dicts. + class_names (list[str]): list of class names (zero-indexed). + """ + num_classes = len(class_names) + hist_bins = np.arange(num_classes + 1) + histogram = np.zeros((num_classes,), dtype=np.int) + for entry in dataset_dicts: + annos = entry["annotations"] + classes = [x["category_id"] for x in annos if not x.get("iscrowd", 0)] + histogram += np.histogram(classes, bins=hist_bins)[0] + + N_COLS = min(6, len(class_names) * 2) + + def short_name(x): + # make long class names shorter. useful for lvis + if len(x) > 13: + return x[:11] + ".." + return x + + data = list( + itertools.chain(*[[short_name(class_names[i]), int(v)] for i, v in enumerate(histogram)]) + ) + total_num_instances = sum(data[1::2]) + data.extend([None] * (N_COLS - (len(data) % N_COLS))) + if num_classes > 1: + data.extend(["total", total_num_instances]) + data = itertools.zip_longest(*[data[i::N_COLS] for i in range(N_COLS)]) + table = tabulate( + data, + headers=["category", "#instances"] * (N_COLS // 2), + tablefmt="pipe", + numalign="left", + stralign="center", + ) + log_first_n( + logging.INFO, + "Distribution of instances among all {} categories:\n".format(num_classes) + + colored(table, "cyan"), + key="message", + ) + + +def get_detection_dataset_dicts( + dataset_names, filter_empty=True, min_keypoints=0, proposal_files=None +): + """ + Load and prepare dataset dicts for instance detection/segmentation and semantic segmentation. + + Args: + dataset_names (list[str]): a list of dataset names + filter_empty (bool): whether to filter out images without instance annotations + min_keypoints (int): filter out images with fewer keypoints than + `min_keypoints`. Set to 0 to do nothing. + proposal_files (list[str]): if given, a list of object proposal files + that match each dataset in `dataset_names`. + """ + assert len(dataset_names) + dataset_dicts = [DatasetCatalog.get(dataset_name) for dataset_name in dataset_names] + for dataset_name, dicts in zip(dataset_names, dataset_dicts): + assert len(dicts), "Dataset '{}' is empty!".format(dataset_name) + + if proposal_files is not None: + assert len(dataset_names) == len(proposal_files) + # load precomputed proposals from proposal files + dataset_dicts = [ + load_proposals_into_dataset(dataset_i_dicts, proposal_file) + for dataset_i_dicts, proposal_file in zip(dataset_dicts, proposal_files) + ] + + dataset_dicts = list(itertools.chain.from_iterable(dataset_dicts)) + + has_instances = "annotations" in dataset_dicts[0] + # Keep images without instance-level GT if the dataset has semantic labels. + if filter_empty and has_instances and "sem_seg_file_name" not in dataset_dicts[0]: + dataset_dicts = filter_images_with_only_crowd_annotations(dataset_dicts) + + if min_keypoints > 0 and has_instances: + dataset_dicts = filter_images_with_few_keypoints(dataset_dicts, min_keypoints) + + if has_instances: + try: + class_names = MetadataCatalog.get(dataset_names[0]).thing_classes + check_metadata_consistency("thing_classes", dataset_names) + print_instances_class_histogram(dataset_dicts, class_names) + except AttributeError: # class names are not available for this dataset + pass + return dataset_dicts + + +def build_detection_train_loader(cfg, mapper=None): + """ + A data loader is created by the following steps: + + 1. Use the dataset names in config to query :class:`DatasetCatalog`, and obtain a list of dicts. + 2. Coordinate a random shuffle order shared among all processes (all GPUs) + 3. Each process spawn another few workers to process the dicts. Each worker will: + * Map each metadata dict into another format to be consumed by the model. + * Batch them by simply putting dicts into a list. + + The batched ``list[mapped_dict]`` is what this dataloader will yield. + + Args: + cfg (CfgNode): the config + mapper (callable): a callable which takes a sample (dict) from dataset and + returns the format to be consumed by the model. + By default it will be `DatasetMapper(cfg, True)`. + + Returns: + an infinite iterator of training data + """ + num_workers = get_world_size() + images_per_batch = cfg.SOLVER.IMS_PER_BATCH + assert ( + images_per_batch % num_workers == 0 + ), "SOLVER.IMS_PER_BATCH ({}) must be divisible by the number of workers ({}).".format( + images_per_batch, num_workers + ) + assert ( + images_per_batch >= num_workers + ), "SOLVER.IMS_PER_BATCH ({}) must be larger than the number of workers ({}).".format( + images_per_batch, num_workers + ) + images_per_worker = images_per_batch // num_workers + + dataset_dicts = get_detection_dataset_dicts( + cfg.DATASETS.TRAIN, + filter_empty=cfg.DATALOADER.FILTER_EMPTY_ANNOTATIONS, + min_keypoints=cfg.MODEL.ROI_KEYPOINT_HEAD.MIN_KEYPOINTS_PER_IMAGE + if cfg.MODEL.KEYPOINT_ON + else 0, + proposal_files=cfg.DATASETS.PROPOSAL_FILES_TRAIN if cfg.MODEL.LOAD_PROPOSALS else None, + ) + dataset = DatasetFromList(dataset_dicts, copy=False) + + if mapper is None: + mapper = DatasetMapper(cfg, True) + dataset = MapDataset(dataset, mapper) + + sampler_name = cfg.DATALOADER.SAMPLER_TRAIN + logger = logging.getLogger(__name__) + logger.info("Using training sampler {}".format(sampler_name)) + if sampler_name == "TrainingSampler": + sampler = samplers.TrainingSampler(len(dataset)) + elif sampler_name == "RepeatFactorTrainingSampler": + sampler = samplers.RepeatFactorTrainingSampler( + dataset_dicts, cfg.DATALOADER.REPEAT_THRESHOLD + ) + else: + raise ValueError("Unknown training sampler: {}".format(sampler_name)) + + if cfg.DATALOADER.ASPECT_RATIO_GROUPING: + data_loader = torch.utils.data.DataLoader( + dataset, + sampler=sampler, + num_workers=cfg.DATALOADER.NUM_WORKERS, + batch_sampler=None, + collate_fn=operator.itemgetter(0), # don't batch, but yield individual elements + worker_init_fn=worker_init_reset_seed, + ) # yield individual mapped dict + data_loader = AspectRatioGroupedDataset(data_loader, images_per_worker) + else: + batch_sampler = torch.utils.data.sampler.BatchSampler( + sampler, images_per_worker, drop_last=True + ) + # drop_last so the batch always have the same size + data_loader = torch.utils.data.DataLoader( + dataset, + num_workers=cfg.DATALOADER.NUM_WORKERS, + batch_sampler=batch_sampler, + collate_fn=trivial_batch_collator, + worker_init_fn=worker_init_reset_seed, + ) + + return data_loader + + +def build_detection_test_loader(cfg, dataset_name, mapper=None): + """ + Similar to `build_detection_train_loader`. + But this function uses the given `dataset_name` argument (instead of the names in cfg), + and uses batch size 1. + + Args: + cfg: a detectron2 CfgNode + dataset_name (str): a name of the dataset that's available in the DatasetCatalog + mapper (callable): a callable which takes a sample (dict) from dataset + and returns the format to be consumed by the model. + By default it will be `DatasetMapper(cfg, False)`. + + Returns: + DataLoader: a torch DataLoader, that loads the given detection + dataset, with test-time transformation and batching. + """ + dataset_dicts = get_detection_dataset_dicts( + [dataset_name], + filter_empty=False, + proposal_files=[ + cfg.DATASETS.PROPOSAL_FILES_TEST[list(cfg.DATASETS.TEST).index(dataset_name)] + ] + if cfg.MODEL.LOAD_PROPOSALS + else None, + ) + + dataset = DatasetFromList(dataset_dicts) + if mapper is None: + mapper = DatasetMapper(cfg, False) + dataset = MapDataset(dataset, mapper) + + sampler = samplers.InferenceSampler(len(dataset)) + # Always use 1 image per worker during inference since this is the + # standard when reporting inference time in papers. + batch_sampler = torch.utils.data.sampler.BatchSampler(sampler, 1, drop_last=False) + + data_loader = torch.utils.data.DataLoader( + dataset, + num_workers=cfg.DATALOADER.NUM_WORKERS, + batch_sampler=batch_sampler, + collate_fn=trivial_batch_collator, + ) + return data_loader + + +def trivial_batch_collator(batch): + """ + A batch collator that does nothing. + """ + return batch + + +def worker_init_reset_seed(worker_id): + seed_all_rng(np.random.randint(2 ** 31) + worker_id) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/catalog.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/catalog.py new file mode 100644 index 0000000000000000000000000000000000000000..57f18c8705363fdcc79182f0abd0b28d6b2dde8b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/catalog.py @@ -0,0 +1,221 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import copy +import logging +import types +from typing import List + +from detectron2.utils.logger import log_first_n + +__all__ = ["DatasetCatalog", "MetadataCatalog"] + + +class DatasetCatalog(object): + """ + A catalog that stores information about the data and how to obtain them. + + It contains a mapping from strings + (which are names that identify a dataset, e.g. "coco_2014_train") + to a function which parses the dataset and returns the samples in the + format of `list[dict]`. + + The returned dicts should be in Detectron2 Dataset format (See DATASETS.md for details) + if used with the data loader functionalities in `data/build.py,data/detection_transform.py`. + + The purpose of having this catalog is to make it easy to choose + different data, by just using the strings in the config. + """ + + _REGISTERED = {} + + @staticmethod + def register(name, func): + """ + Args: + name (str): the name that identifies a dataset, e.g. "coco_2014_train". + func (callable): a callable which takes no arguments and returns a list of dicts. + """ + assert callable(func), "You must register a function with `DatasetCatalog.register`!" + assert name not in DatasetCatalog._REGISTERED, "Dataset '{}' is already registered!".format( + name + ) + DatasetCatalog._REGISTERED[name] = func + + @staticmethod + def get(name): + """ + Call the registered function and return its results. + + Args: + name (str): the name that identifies a dataset, e.g. "coco_2014_train". + + Returns: + list[dict]: dataset annotations.0 + """ + try: + f = DatasetCatalog._REGISTERED[name] + except KeyError: + raise KeyError( + "Dataset '{}' is not registered! Available data are: {}".format( + name, ", ".join(DatasetCatalog._REGISTERED.keys()) + ) + ) + return f() + + @staticmethod + def list() -> List[str]: + """ + List all registered data. + + Returns: + list[str] + """ + return list(DatasetCatalog._REGISTERED.keys()) + + @staticmethod + def clear(): + """ + Remove all registered dataset. + """ + DatasetCatalog._REGISTERED.clear() + + +class Metadata(types.SimpleNamespace): + """ + A class that supports simple attribute setter/getter. + It is intended for storing metadata of a dataset and make it accessible globally. + + Examples: + + .. code-block:: python + + # somewhere when you load the data: + MetadataCatalog.get("mydataset").thing_classes = ["person", "dog"] + + # somewhere when you print statistics or visualize: + classes = MetadataCatalog.get("mydataset").thing_classes + """ + + # the name of the dataset + # set default to N/A so that `self.name` in the errors will not trigger getattr again + name: str = "N/A" + + _RENAMED = { + "class_names": "thing_classes", + "dataset_id_to_contiguous_id": "thing_dataset_id_to_contiguous_id", + "stuff_class_names": "stuff_classes", + } + + def __getattr__(self, key): + if key in self._RENAMED: + log_first_n( + logging.WARNING, + "Metadata '{}' was renamed to '{}'!".format(key, self._RENAMED[key]), + n=10, + ) + return getattr(self, self._RENAMED[key]) + + raise AttributeError( + "Attribute '{}' does not exist in the metadata of '{}'. Available keys are {}.".format( + key, self.name, str(self.__dict__.keys()) + ) + ) + + def __setattr__(self, key, val): + if key in self._RENAMED: + log_first_n( + logging.WARNING, + "Metadata '{}' was renamed to '{}'!".format(key, self._RENAMED[key]), + n=10, + ) + setattr(self, self._RENAMED[key], val) + + # Ensure that metadata of the same name stays consistent + try: + oldval = getattr(self, key) + assert oldval == val, ( + "Attribute '{}' in the metadata of '{}' cannot be set " + "to a different value!\n{} != {}".format(key, self.name, oldval, val) + ) + except AttributeError: + super().__setattr__(key, val) + + def as_dict(self): + """ + Returns all the metadata as a dict. + Note that modifications to the returned dict will not reflect on the Metadata object. + """ + return copy.copy(self.__dict__) + + def set(self, **kwargs): + """ + Set multiple metadata with kwargs. + """ + for k, v in kwargs.items(): + setattr(self, k, v) + return self + + def get(self, key, default=None): + """ + Access an attribute and return its value if exists. + Otherwise return default. + """ + try: + return getattr(self, key) + except AttributeError: + return default + + +class MetadataCatalog: + """ + MetadataCatalog provides access to "Metadata" of a given dataset. + + The metadata associated with a certain name is a singleton: once created, + the metadata will stay alive and will be returned by future calls to `get(name)`. + + It's like global variables, so don't abuse it. + It's meant for storing knowledge that's constant and shared across the execution + of the program, e.g.: the class names in COCO. + """ + + _NAME_TO_META = {} + + @staticmethod + def get(name): + """ + Args: + name (str): name of a dataset (e.g. coco_2014_train). + + Returns: + Metadata: The :class:`Metadata` instance associated with this name, + or create an empty one if none is available. + """ + assert len(name) + if name in MetadataCatalog._NAME_TO_META: + ret = MetadataCatalog._NAME_TO_META[name] + # TODO this is for the BC breaking change in D15247032. + # Remove this in the future. + if hasattr(ret, "dataset_name"): + logger = logging.getLogger() + logger.warning( + """ +The 'dataset_name' key in metadata is no longer used for +sharing metadata among splits after D15247032! Add +metadata to each split (now called dataset) separately! + """ + ) + parent_meta = MetadataCatalog.get(ret.dataset_name).as_dict() + ret.set(**parent_meta) + return ret + else: + m = MetadataCatalog._NAME_TO_META[name] = Metadata(name=name) + return m + + @staticmethod + def list(): + """ + List all registered metadata. + + Returns: + list[str]: keys (names of data) of all registered metadata + """ + return list(MetadataCatalog._NAME_TO_META.keys()) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/common.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/common.py new file mode 100644 index 0000000000000000000000000000000000000000..a42c8b21b86338a3f034d01c3484dd32b1b845a9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/common.py @@ -0,0 +1,149 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import copy +import logging +import numpy as np +import pickle +import random +import torch.utils.data as data + +from detectron2.utils.serialize import PicklableWrapper + +__all__ = ["MapDataset", "DatasetFromList", "AspectRatioGroupedDataset"] + + +class MapDataset(data.Dataset): + """ + Map a function over the elements in a dataset. + + Args: + dataset: a dataset where map function is applied. + map_func: a callable which maps the element in dataset. map_func is + responsible for error handling, when error happens, it needs to + return None so the MapDataset will randomly use other + elements from the dataset. + """ + + def __init__(self, dataset, map_func): + self._dataset = dataset + self._map_func = PicklableWrapper(map_func) # wrap so that a lambda will work + + self._rng = random.Random(42) + self._fallback_candidates = set(range(len(dataset))) + + def __len__(self): + return len(self._dataset) + + def __getitem__(self, idx): + retry_count = 0 + cur_idx = int(idx) + + while True: + data = self._map_func(self._dataset[cur_idx]) + if data is not None: + self._fallback_candidates.add(cur_idx) + return data + + # _map_func fails for this idx, use a random new index from the pool + retry_count += 1 + self._fallback_candidates.discard(cur_idx) + cur_idx = self._rng.sample(self._fallback_candidates, k=1)[0] + + if retry_count >= 3: + logger = logging.getLogger(__name__) + logger.warning( + "Failed to apply `_map_func` for idx: {}, retry count: {}".format( + idx, retry_count + ) + ) + + +class DatasetFromList(data.Dataset): + """ + Wrap a list to a torch Dataset. It produces elements of the list as data. + """ + + def __init__(self, lst: list, copy: bool = True, serialize: bool = True): + """ + Args: + lst (list): a list which contains elements to produce. + copy (bool): whether to deepcopy the element when producing it, + so that the result can be modified in place without affecting the + source in the list. + serialize (bool): whether to hold memory using serialized objects, when + enabled, data loader workers can use shared RAM from master + process instead of making a copy. + """ + self._lst = lst + self._copy = copy + self._serialize = serialize + + def _serialize(data): + buffer = pickle.dumps(data, protocol=-1) + return np.frombuffer(buffer, dtype=np.uint8) + + if self._serialize: + logger = logging.getLogger(__name__) + logger.info( + "Serializing {} elements to byte tensors and concatenating them all ...".format( + len(self._lst) + ) + ) + self._lst = [_serialize(x) for x in self._lst] + self._addr = np.asarray([len(x) for x in self._lst], dtype=np.int64) + self._addr = np.cumsum(self._addr) + self._lst = np.concatenate(self._lst) + logger.info("Serialized dataset takes {:.2f} MiB".format(len(self._lst) / 1024 ** 2)) + + def __len__(self): + if self._serialize: + return len(self._addr) + else: + return len(self._lst) + + def __getitem__(self, idx): + if self._serialize: + start_addr = 0 if idx == 0 else self._addr[idx - 1].item() + end_addr = self._addr[idx].item() + bytes = memoryview(self._lst[start_addr:end_addr]) + return pickle.loads(bytes) + elif self._copy: + return copy.deepcopy(self._lst[idx]) + else: + return self._lst[idx] + + +class AspectRatioGroupedDataset(data.IterableDataset): + """ + Batch data that have similar aspect ratio together. + In this implementation, images whose aspect ratio < (or >) 1 will + be batched together. + This improves training speed because the images then need less padding + to form a batch. + + It assumes the underlying dataset produces dicts with "width" and "height" keys. + It will then produce a list of original dicts with length = batch_size, + all with similar aspect ratios. + """ + + def __init__(self, dataset, batch_size): + """ + Args: + dataset: an iterable. Each element must be a dict with keys + "width" and "height", which will be used to batch data. + batch_size (int): + """ + self.dataset = dataset + self.batch_size = batch_size + self._buckets = [[] for _ in range(2)] + # Hard-coded two aspect ratio groups: w > h and w < h. + # Can add support for more aspect ratio groups, but doesn't seem useful + + def __iter__(self): + for d in self.dataset: + w, h = d["width"], d["height"] + bucket_id = 0 if w > h else 1 + bucket = self._buckets[bucket_id] + bucket.append(d) + if len(bucket) == self.batch_size: + yield bucket[:] + del bucket[:] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/dataset_mapper.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/dataset_mapper.py new file mode 100644 index 0000000000000000000000000000000000000000..db73b378a6c2938a3beb700010a13172e6cc549f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/dataset_mapper.py @@ -0,0 +1,149 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import copy +import logging +import numpy as np +import torch +from fvcore.common.file_io import PathManager +from PIL import Image + +from . import detection_utils as utils +from . import transforms as T + +""" +This file contains the default mapping that's applied to "dataset dicts". +""" + +__all__ = ["DatasetMapper"] + + +class DatasetMapper: + """ + A callable which takes a dataset dict in Detectron2 Dataset format, + and map it into a format used by the model. + + This is the default callable to be used to map your dataset dict into training data. + You may need to follow it to implement your own one for customized logic, + such as a different way to read or transform images. + See :doc:`/tutorials/data_loading` for details. + + The callable currently does the following: + + 1. Read the image from "file_name" + 2. Applies cropping/geometric transforms to the image and annotations + 3. Prepare data and annotations to Tensor and :class:`Instances` + """ + + def __init__(self, cfg, is_train=True): + if cfg.INPUT.CROP.ENABLED and is_train: + self.crop_gen = T.RandomCrop(cfg.INPUT.CROP.TYPE, cfg.INPUT.CROP.SIZE) + logging.getLogger(__name__).info("CropGen used in training: " + str(self.crop_gen)) + else: + self.crop_gen = None + + self.tfm_gens = utils.build_transform_gen(cfg, is_train) + + # fmt: off + self.img_format = cfg.INPUT.FORMAT + self.mask_on = cfg.MODEL.MASK_ON + self.mask_format = cfg.INPUT.MASK_FORMAT + self.keypoint_on = cfg.MODEL.KEYPOINT_ON + self.load_proposals = cfg.MODEL.LOAD_PROPOSALS + # fmt: on + if self.keypoint_on and is_train: + # Flip only makes sense in training + self.keypoint_hflip_indices = utils.create_keypoint_hflip_indices(cfg.DATASETS.TRAIN) + else: + self.keypoint_hflip_indices = None + + if self.load_proposals: + self.min_box_side_len = cfg.MODEL.PROPOSAL_GENERATOR.MIN_SIZE + self.proposal_topk = ( + cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TRAIN + if is_train + else cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TEST + ) + self.is_train = is_train + + def __call__(self, dataset_dict): + """ + Args: + dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format. + + Returns: + dict: a format that builtin models in detectron2 accept + """ + dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code below + # USER: Write your own image loading if it's not from a file + image = utils.read_image(dataset_dict["file_name"], format=self.img_format) + utils.check_image_size(dataset_dict, image) + + if "annotations" not in dataset_dict: + image, transforms = T.apply_transform_gens( + ([self.crop_gen] if self.crop_gen else []) + self.tfm_gens, image + ) + else: + # Crop around an instance if there are instances in the image. + # USER: Remove if you don't use cropping + if self.crop_gen: + crop_tfm = utils.gen_crop_transform_with_instance( + self.crop_gen.get_crop_size(image.shape[:2]), + image.shape[:2], + np.random.choice(dataset_dict["annotations"]), + ) + image = crop_tfm.apply_image(image) + image, transforms = T.apply_transform_gens(self.tfm_gens, image) + if self.crop_gen: + transforms = crop_tfm + transforms + + image_shape = image.shape[:2] # h, w + + # Pytorch's dataloader is efficient on torch.Tensor due to shared-memory, + # but not efficient on large generic data structures due to the use of pickle & mp.Queue. + # Therefore it's important to use torch.Tensor. + dataset_dict["image"] = torch.as_tensor(np.ascontiguousarray(image.transpose(2, 0, 1))) + + # USER: Remove if you don't use pre-computed proposals. + if self.load_proposals: + utils.transform_proposals( + dataset_dict, image_shape, transforms, self.min_box_side_len, self.proposal_topk + ) + + if not self.is_train: + # USER: Modify this if you want to keep them for some reason. + dataset_dict.pop("annotations", None) + dataset_dict.pop("sem_seg_file_name", None) + return dataset_dict + + if "annotations" in dataset_dict: + # USER: Modify this if you want to keep them for some reason. + for anno in dataset_dict["annotations"]: + if not self.mask_on: + anno.pop("segmentation", None) + if not self.keypoint_on: + anno.pop("keypoints", None) + + # USER: Implement additional transformations if you have other types of data + annos = [ + utils.transform_instance_annotations( + obj, transforms, image_shape, keypoint_hflip_indices=self.keypoint_hflip_indices + ) + for obj in dataset_dict.pop("annotations") + if obj.get("iscrowd", 0) == 0 + ] + instances = utils.annotations_to_instances( + annos, image_shape, mask_format=self.mask_format + ) + # Create a tight bounding box from masks, useful when image is cropped + if self.crop_gen and instances.has("gt_masks"): + instances.gt_boxes = instances.gt_masks.get_bounding_boxes() + dataset_dict["instances"] = utils.filter_empty_instances(instances) + + # USER: Remove if you don't do semantic/panoptic segmentation. + if "sem_seg_file_name" in dataset_dict: + with PathManager.open(dataset_dict.pop("sem_seg_file_name"), "rb") as f: + sem_seg_gt = Image.open(f) + sem_seg_gt = np.asarray(sem_seg_gt, dtype="uint8") + sem_seg_gt = transforms.apply_segmentation(sem_seg_gt) + sem_seg_gt = torch.as_tensor(sem_seg_gt.astype("long")) + dataset_dict["sem_seg"] = sem_seg_gt + return dataset_dict diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9fb3e4f7afec17137c95c78be6ef06d520ec8032 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/README.md @@ -0,0 +1,9 @@ + + +### Common Datasets + +The dataset implemented here do not need to load the data into the final format. +It should provide the minimal data structure needed to use the dataset, so it can be very efficient. + +For example, for an image dataset, just provide the file names and labels, but don't read the images. +Let the downstream decide how to read. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9c3f556bd201890fcca901d26efb5f9d8c3304f5 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/__init__.py @@ -0,0 +1,9 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .cityscapes import load_cityscapes_instances +from .coco import load_coco_json, load_sem_seg +from .lvis import load_lvis_json, register_lvis_instances, get_lvis_instances_meta +from .register_coco import register_coco_instances, register_coco_panoptic_separated +from . import builtin # ensure the builtin data are registered + + +__all__ = [k for k in globals().keys() if "builtin" not in k and not k.startswith("_")] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/builtin.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/builtin.py new file mode 100644 index 0000000000000000000000000000000000000000..21ac2228c56d59b38c9288fd720aab5fdc63ac0b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/builtin.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + + +""" +This file registers pre-defined data at hard-coded paths, and their metadata. + +We hard-code metadata for common data. This will enable: +1. Consistency check when loading the data +2. Use models on these standard data directly and run demos, + without having to download the dataset annotations + +We hard-code some paths to the dataset that's assumed to +exist in "./data/". + +Users SHOULD NOT use this file to create new dataset / metadata for new dataset. +To add new dataset, refer to the tutorial "docs/DATASETS.md". +""" + +import os + +from detectron2.data import DatasetCatalog, MetadataCatalog + +from .builtin_meta import _get_builtin_metadata +from .cityscapes import load_cityscapes_instances, load_cityscapes_semantic +from .lvis import get_lvis_instances_meta, register_lvis_instances +from .pascal_voc import register_pascal_voc +from .register_coco import register_coco_instances, register_coco_panoptic_separated + +# ==== Predefined data and splits for COCO ========== + +_PREDEFINED_SPLITS_COCO = {} +_PREDEFINED_SPLITS_COCO["coco"] = { + "coco_2014_train": ("coco/train2014", "coco/annotations/instances_train2014.json"), + "coco_2014_val": ("coco/val2014", "coco/annotations/instances_val2014.json"), + "coco_2014_minival": ("coco/val2014", "coco/annotations/instances_minival2014.json"), + "coco_2014_minival_100": ("coco/val2014", "coco/annotations/instances_minival2014_100.json"), + "coco_2014_valminusminival": ( + "coco/val2014", + "coco/annotations/instances_valminusminival2014.json", + ), + "coco_2017_train": ("coco/train2017", "coco/annotations/instances_train2017.json"), + "coco_2017_val": ("coco/val2017", "coco/annotations/instances_val2017.json"), + "coco_2017_test": ("coco/test2017", "coco/annotations/image_info_test2017.json"), + "coco_2017_test-dev": ("coco/test2017", "coco/annotations/image_info_test-dev2017.json"), + "coco_2017_val_100": ("coco/val2017", "coco/annotations/instances_val2017_100.json"), +} + +_PREDEFINED_SPLITS_COCO["coco_person"] = { + "keypoints_coco_2014_train": ( + "coco/train2014", + "coco/annotations/person_keypoints_train2014.json", + ), + "keypoints_coco_2014_val": ("coco/val2014", "coco/annotations/person_keypoints_val2014.json"), + "keypoints_coco_2014_minival": ( + "coco/val2014", + "coco/annotations/person_keypoints_minival2014.json", + ), + "keypoints_coco_2014_valminusminival": ( + "coco/val2014", + "coco/annotations/person_keypoints_valminusminival2014.json", + ), + "keypoints_coco_2014_minival_100": ( + "coco/val2014", + "coco/annotations/person_keypoints_minival2014_100.json", + ), + "keypoints_coco_2017_train": ( + "coco/train2017", + "coco/annotations/person_keypoints_train2017.json", + ), + "keypoints_coco_2017_val": ("coco/val2017", "coco/annotations/person_keypoints_val2017.json"), + "keypoints_coco_2017_val_100": ( + "coco/val2017", + "coco/annotations/person_keypoints_val2017_100.json", + ), +} + + +_PREDEFINED_SPLITS_COCO_PANOPTIC = { + "coco_2017_train_panoptic": ( + # This is the original panoptic annotation directory + "coco/panoptic_train2017", + "coco/annotations/panoptic_train2017.json", + # This directory contains semantic annotations that are + # converted from panoptic annotations. + # It is used by PanopticFPN. + # You can use the script at detectron2/data/prepare_panoptic_fpn.py + # to create these directories. + "coco/panoptic_stuff_train2017", + ), + "coco_2017_val_panoptic": ( + "coco/panoptic_val2017", + "coco/annotations/panoptic_val2017.json", + "coco/panoptic_stuff_val2017", + ), + "coco_2017_val_100_panoptic": ( + "coco/panoptic_val2017_100", + "coco/annotations/panoptic_val2017_100.json", + "coco/panoptic_stuff_val2017_100", + ), +} + + +def register_all_coco(root): + for dataset_name, splits_per_dataset in _PREDEFINED_SPLITS_COCO.items(): + for key, (image_root, json_file) in splits_per_dataset.items(): + # Assume pre-defined data live in `./data`. + register_coco_instances( + key, + _get_builtin_metadata(dataset_name), + os.path.join(root, json_file) if "://" not in json_file else json_file, + os.path.join(root, image_root), + ) + + for ( + prefix, + (panoptic_root, panoptic_json, semantic_root), + ) in _PREDEFINED_SPLITS_COCO_PANOPTIC.items(): + prefix_instances = prefix[: -len("_panoptic")] + instances_meta = MetadataCatalog.get(prefix_instances) + image_root, instances_json = instances_meta.image_root, instances_meta.json_file + register_coco_panoptic_separated( + prefix, + _get_builtin_metadata("coco_panoptic_separated"), + image_root, + os.path.join(root, panoptic_root), + os.path.join(root, panoptic_json), + os.path.join(root, semantic_root), + instances_json, + ) + + +# ==== Predefined data and splits for LVIS ========== + + +_PREDEFINED_SPLITS_LVIS = { + "lvis_v0.5": { + "lvis_v0.5_train": ("coco/train2017", "lvis/lvis_v0.5_train.json"), + "lvis_v0.5_val": ("coco/val2017", "lvis/lvis_v0.5_val.json"), + "lvis_v0.5_val_rand_100": ("coco/val2017", "lvis/lvis_v0.5_val_rand_100.json"), + "lvis_v0.5_test": ("coco/test2017", "lvis/lvis_v0.5_image_info_test.json"), + }, + "lvis_v0.5_cocofied": { + "lvis_v0.5_train_cocofied": ("coco/train2017", "lvis/lvis_v0.5_train_cocofied.json"), + "lvis_v0.5_val_cocofied": ("coco/val2017", "lvis/lvis_v0.5_val_cocofied.json"), + }, +} + + +def register_all_lvis(root): + for dataset_name, splits_per_dataset in _PREDEFINED_SPLITS_LVIS.items(): + for key, (image_root, json_file) in splits_per_dataset.items(): + # Assume pre-defined data live in `./data`. + register_lvis_instances( + key, + get_lvis_instances_meta(dataset_name), + os.path.join(root, json_file) if "://" not in json_file else json_file, + os.path.join(root, image_root), + ) + + +# ==== Predefined splits for raw cityscapes images =========== + + +_RAW_CITYSCAPES_SPLITS = { + "cityscapes_fine_{task}_train": ("cityscapes/leftImg8bit/train", "cityscapes/gtFine/train"), + "cityscapes_fine_{task}_val": ("cityscapes/leftImg8bit/val", "cityscapes/gtFine/val"), + "cityscapes_fine_{task}_test": ("cityscapes/leftImg8bit/test", "cityscapes/gtFine/test"), +} + + +def register_all_cityscapes(root): + for key, (image_dir, gt_dir) in _RAW_CITYSCAPES_SPLITS.items(): + meta = _get_builtin_metadata("cityscapes") + image_dir = os.path.join(root, image_dir) + gt_dir = os.path.join(root, gt_dir) + + inst_key = key.format(task="instance_seg") + DatasetCatalog.register( + inst_key, + lambda x=image_dir, y=gt_dir: load_cityscapes_instances( + x, y, from_json=True, to_polygons=True + ), + ) + MetadataCatalog.get(inst_key).set( + image_dir=image_dir, gt_dir=gt_dir, evaluator_type="cityscapes_instance", **meta + ) + + sem_key = key.format(task="sem_seg") + DatasetCatalog.register( + sem_key, lambda x=image_dir, y=gt_dir: load_cityscapes_semantic(x, y) + ) + MetadataCatalog.get(sem_key).set( + image_dir=image_dir, gt_dir=gt_dir, evaluator_type="cityscapes_sem_seg", **meta + ) + + +# ==== Predefined splits for PASCAL VOC =========== +def register_all_pascal_voc(root): + SPLITS = [ + ("voc_2007_trainval", "VOC2007", "trainval"), + ("voc_2007_train", "VOC2007", "train"), + ("voc_2007_val", "VOC2007", "val"), + ("voc_2007_test", "VOC2007", "test"), + ("voc_2012_trainval", "VOC2012", "trainval"), + ("voc_2012_train", "VOC2012", "train"), + ("voc_2012_val", "VOC2012", "val"), + ] + for name, dirname, split in SPLITS: + year = 2007 if "2007" in name else 2012 + register_pascal_voc(name, os.path.join(root, dirname), split, year) + MetadataCatalog.get(name).evaluator_type = "pascal_voc" + + +# Register them all under "./data" +_root = os.getenv("DETECTRON2_DATASETS", "data") +register_all_coco(_root) +register_all_lvis(_root) +register_all_cityscapes(_root) +register_all_pascal_voc(_root) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/builtin_meta.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/builtin_meta.py new file mode 100644 index 0000000000000000000000000000000000000000..74c79863a9d1ef5df9b5ce64f97d6be8e4e37d59 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/builtin_meta.py @@ -0,0 +1,267 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + + +# All coco categories, together with their nice-looking visualization colors +# It's from https://github.com/cocodataset/panopticapi/blob/master/panoptic_coco_categories.json +COCO_CATEGORIES = [ + {"color": [220, 20, 60], "isthing": 1, "id": 1, "name": "person"}, + {"color": [119, 11, 32], "isthing": 1, "id": 2, "name": "bicycle"}, + {"color": [0, 0, 142], "isthing": 1, "id": 3, "name": "car"}, + {"color": [0, 0, 230], "isthing": 1, "id": 4, "name": "motorcycle"}, + {"color": [106, 0, 228], "isthing": 1, "id": 5, "name": "airplane"}, + {"color": [0, 60, 100], "isthing": 1, "id": 6, "name": "bus"}, + {"color": [0, 80, 100], "isthing": 1, "id": 7, "name": "train"}, + {"color": [0, 0, 70], "isthing": 1, "id": 8, "name": "truck"}, + {"color": [0, 0, 192], "isthing": 1, "id": 9, "name": "boat"}, + {"color": [250, 170, 30], "isthing": 1, "id": 10, "name": "traffic light"}, + {"color": [100, 170, 30], "isthing": 1, "id": 11, "name": "fire hydrant"}, + {"color": [220, 220, 0], "isthing": 1, "id": 13, "name": "stop sign"}, + {"color": [175, 116, 175], "isthing": 1, "id": 14, "name": "parking meter"}, + {"color": [250, 0, 30], "isthing": 1, "id": 15, "name": "bench"}, + {"color": [165, 42, 42], "isthing": 1, "id": 16, "name": "bird"}, + {"color": [255, 77, 255], "isthing": 1, "id": 17, "name": "cat"}, + {"color": [0, 226, 252], "isthing": 1, "id": 18, "name": "dog"}, + {"color": [182, 182, 255], "isthing": 1, "id": 19, "name": "horse"}, + {"color": [0, 82, 0], "isthing": 1, "id": 20, "name": "sheep"}, + {"color": [120, 166, 157], "isthing": 1, "id": 21, "name": "cow"}, + {"color": [110, 76, 0], "isthing": 1, "id": 22, "name": "elephant"}, + {"color": [174, 57, 255], "isthing": 1, "id": 23, "name": "bear"}, + {"color": [199, 100, 0], "isthing": 1, "id": 24, "name": "zebra"}, + {"color": [72, 0, 118], "isthing": 1, "id": 25, "name": "giraffe"}, + {"color": [255, 179, 240], "isthing": 1, "id": 27, "name": "backpack"}, + {"color": [0, 125, 92], "isthing": 1, "id": 28, "name": "umbrella"}, + {"color": [209, 0, 151], "isthing": 1, "id": 31, "name": "handbag"}, + {"color": [188, 208, 182], "isthing": 1, "id": 32, "name": "tie"}, + {"color": [0, 220, 176], "isthing": 1, "id": 33, "name": "suitcase"}, + {"color": [255, 99, 164], "isthing": 1, "id": 34, "name": "frisbee"}, + {"color": [92, 0, 73], "isthing": 1, "id": 35, "name": "skis"}, + {"color": [133, 129, 255], "isthing": 1, "id": 36, "name": "snowboard"}, + {"color": [78, 180, 255], "isthing": 1, "id": 37, "name": "sports ball"}, + {"color": [0, 228, 0], "isthing": 1, "id": 38, "name": "kite"}, + {"color": [174, 255, 243], "isthing": 1, "id": 39, "name": "baseball bat"}, + {"color": [45, 89, 255], "isthing": 1, "id": 40, "name": "baseball glove"}, + {"color": [134, 134, 103], "isthing": 1, "id": 41, "name": "skateboard"}, + {"color": [145, 148, 174], "isthing": 1, "id": 42, "name": "surfboard"}, + {"color": [255, 208, 186], "isthing": 1, "id": 43, "name": "tennis racket"}, + {"color": [197, 226, 255], "isthing": 1, "id": 44, "name": "bottle"}, + {"color": [171, 134, 1], "isthing": 1, "id": 46, "name": "wine glass"}, + {"color": [109, 63, 54], "isthing": 1, "id": 47, "name": "cup"}, + {"color": [207, 138, 255], "isthing": 1, "id": 48, "name": "fork"}, + {"color": [151, 0, 95], "isthing": 1, "id": 49, "name": "knife"}, + {"color": [9, 80, 61], "isthing": 1, "id": 50, "name": "spoon"}, + {"color": [84, 105, 51], "isthing": 1, "id": 51, "name": "bowl"}, + {"color": [74, 65, 105], "isthing": 1, "id": 52, "name": "banana"}, + {"color": [166, 196, 102], "isthing": 1, "id": 53, "name": "apple"}, + {"color": [208, 195, 210], "isthing": 1, "id": 54, "name": "sandwich"}, + {"color": [255, 109, 65], "isthing": 1, "id": 55, "name": "orange"}, + {"color": [0, 143, 149], "isthing": 1, "id": 56, "name": "broccoli"}, + {"color": [179, 0, 194], "isthing": 1, "id": 57, "name": "carrot"}, + {"color": [209, 99, 106], "isthing": 1, "id": 58, "name": "hot dog"}, + {"color": [5, 121, 0], "isthing": 1, "id": 59, "name": "pizza"}, + {"color": [227, 255, 205], "isthing": 1, "id": 60, "name": "donut"}, + {"color": [147, 186, 208], "isthing": 1, "id": 61, "name": "cake"}, + {"color": [153, 69, 1], "isthing": 1, "id": 62, "name": "chair"}, + {"color": [3, 95, 161], "isthing": 1, "id": 63, "name": "couch"}, + {"color": [163, 255, 0], "isthing": 1, "id": 64, "name": "potted plant"}, + {"color": [119, 0, 170], "isthing": 1, "id": 65, "name": "bed"}, + {"color": [0, 182, 199], "isthing": 1, "id": 67, "name": "dining table"}, + {"color": [0, 165, 120], "isthing": 1, "id": 70, "name": "toilet"}, + {"color": [183, 130, 88], "isthing": 1, "id": 72, "name": "tv"}, + {"color": [95, 32, 0], "isthing": 1, "id": 73, "name": "laptop"}, + {"color": [130, 114, 135], "isthing": 1, "id": 74, "name": "mouse"}, + {"color": [110, 129, 133], "isthing": 1, "id": 75, "name": "remote"}, + {"color": [166, 74, 118], "isthing": 1, "id": 76, "name": "keyboard"}, + {"color": [219, 142, 185], "isthing": 1, "id": 77, "name": "cell phone"}, + {"color": [79, 210, 114], "isthing": 1, "id": 78, "name": "microwave"}, + {"color": [178, 90, 62], "isthing": 1, "id": 79, "name": "oven"}, + {"color": [65, 70, 15], "isthing": 1, "id": 80, "name": "toaster"}, + {"color": [127, 167, 115], "isthing": 1, "id": 81, "name": "sink"}, + {"color": [59, 105, 106], "isthing": 1, "id": 82, "name": "refrigerator"}, + {"color": [142, 108, 45], "isthing": 1, "id": 84, "name": "book"}, + {"color": [196, 172, 0], "isthing": 1, "id": 85, "name": "clock"}, + {"color": [95, 54, 80], "isthing": 1, "id": 86, "name": "vase"}, + {"color": [128, 76, 255], "isthing": 1, "id": 87, "name": "scissors"}, + {"color": [201, 57, 1], "isthing": 1, "id": 88, "name": "teddy bear"}, + {"color": [246, 0, 122], "isthing": 1, "id": 89, "name": "hair drier"}, + {"color": [191, 162, 208], "isthing": 1, "id": 90, "name": "toothbrush"}, + {"color": [255, 255, 128], "isthing": 0, "id": 92, "name": "banner"}, + {"color": [147, 211, 203], "isthing": 0, "id": 93, "name": "blanket"}, + {"color": [150, 100, 100], "isthing": 0, "id": 95, "name": "bridge"}, + {"color": [168, 171, 172], "isthing": 0, "id": 100, "name": "cardboard"}, + {"color": [146, 112, 198], "isthing": 0, "id": 107, "name": "counter"}, + {"color": [210, 170, 100], "isthing": 0, "id": 109, "name": "curtain"}, + {"color": [92, 136, 89], "isthing": 0, "id": 112, "name": "door-stuff"}, + {"color": [218, 88, 184], "isthing": 0, "id": 118, "name": "floor-wood"}, + {"color": [241, 129, 0], "isthing": 0, "id": 119, "name": "flower"}, + {"color": [217, 17, 255], "isthing": 0, "id": 122, "name": "fruit"}, + {"color": [124, 74, 181], "isthing": 0, "id": 125, "name": "gravel"}, + {"color": [70, 70, 70], "isthing": 0, "id": 128, "name": "house"}, + {"color": [255, 228, 255], "isthing": 0, "id": 130, "name": "light"}, + {"color": [154, 208, 0], "isthing": 0, "id": 133, "name": "mirror-stuff"}, + {"color": [193, 0, 92], "isthing": 0, "id": 138, "name": "net"}, + {"color": [76, 91, 113], "isthing": 0, "id": 141, "name": "pillow"}, + {"color": [255, 180, 195], "isthing": 0, "id": 144, "name": "platform"}, + {"color": [106, 154, 176], "isthing": 0, "id": 145, "name": "playingfield"}, + {"color": [230, 150, 140], "isthing": 0, "id": 147, "name": "railroad"}, + {"color": [60, 143, 255], "isthing": 0, "id": 148, "name": "river"}, + {"color": [128, 64, 128], "isthing": 0, "id": 149, "name": "road"}, + {"color": [92, 82, 55], "isthing": 0, "id": 151, "name": "roof"}, + {"color": [254, 212, 124], "isthing": 0, "id": 154, "name": "sand"}, + {"color": [73, 77, 174], "isthing": 0, "id": 155, "name": "sea"}, + {"color": [255, 160, 98], "isthing": 0, "id": 156, "name": "shelf"}, + {"color": [255, 255, 255], "isthing": 0, "id": 159, "name": "snow"}, + {"color": [104, 84, 109], "isthing": 0, "id": 161, "name": "stairs"}, + {"color": [169, 164, 131], "isthing": 0, "id": 166, "name": "tent"}, + {"color": [225, 199, 255], "isthing": 0, "id": 168, "name": "towel"}, + {"color": [137, 54, 74], "isthing": 0, "id": 171, "name": "wall-brick"}, + {"color": [135, 158, 223], "isthing": 0, "id": 175, "name": "wall-stone"}, + {"color": [7, 246, 231], "isthing": 0, "id": 176, "name": "wall-tile"}, + {"color": [107, 255, 200], "isthing": 0, "id": 177, "name": "wall-wood"}, + {"color": [58, 41, 149], "isthing": 0, "id": 178, "name": "water-other"}, + {"color": [183, 121, 142], "isthing": 0, "id": 180, "name": "window-blind"}, + {"color": [255, 73, 97], "isthing": 0, "id": 181, "name": "window-other"}, + {"color": [107, 142, 35], "isthing": 0, "id": 184, "name": "tree-merged"}, + {"color": [190, 153, 153], "isthing": 0, "id": 185, "name": "fence-merged"}, + {"color": [146, 139, 141], "isthing": 0, "id": 186, "name": "ceiling-merged"}, + {"color": [70, 130, 180], "isthing": 0, "id": 187, "name": "sky-other-merged"}, + {"color": [134, 199, 156], "isthing": 0, "id": 188, "name": "cabinet-merged"}, + {"color": [209, 226, 140], "isthing": 0, "id": 189, "name": "table-merged"}, + {"color": [96, 36, 108], "isthing": 0, "id": 190, "name": "floor-other-merged"}, + {"color": [96, 96, 96], "isthing": 0, "id": 191, "name": "pavement-merged"}, + {"color": [64, 170, 64], "isthing": 0, "id": 192, "name": "mountain-merged"}, + {"color": [152, 251, 152], "isthing": 0, "id": 193, "name": "grass-merged"}, + {"color": [208, 229, 228], "isthing": 0, "id": 194, "name": "dirt-merged"}, + {"color": [206, 186, 171], "isthing": 0, "id": 195, "name": "paper-merged"}, + {"color": [152, 161, 64], "isthing": 0, "id": 196, "name": "food-other-merged"}, + {"color": [116, 112, 0], "isthing": 0, "id": 197, "name": "building-other-merged"}, + {"color": [0, 114, 143], "isthing": 0, "id": 198, "name": "rock-merged"}, + {"color": [102, 102, 156], "isthing": 0, "id": 199, "name": "wall-other-merged"}, + {"color": [250, 141, 255], "isthing": 0, "id": 200, "name": "rug-merged"}, +] + +# fmt: off +COCO_PERSON_KEYPOINT_NAMES = ( + "nose", + "left_eye", "right_eye", + "left_ear", "right_ear", + "left_shoulder", "right_shoulder", + "left_elbow", "right_elbow", + "left_wrist", "right_wrist", + "left_hip", "right_hip", + "left_knee", "right_knee", + "left_ankle", "right_ankle", +) +# fmt: on + +# Pairs of keypoints that should be exchanged under horizontal flipping +COCO_PERSON_KEYPOINT_FLIP_MAP = ( + ("left_eye", "right_eye"), + ("left_ear", "right_ear"), + ("left_shoulder", "right_shoulder"), + ("left_elbow", "right_elbow"), + ("left_wrist", "right_wrist"), + ("left_hip", "right_hip"), + ("left_knee", "right_knee"), + ("left_ankle", "right_ankle"), +) + +# rules for pairs of keypoints to draw a line between, and the line color to use. +KEYPOINT_CONNECTION_RULES = [ + # face + ("left_ear", "left_eye", (102, 204, 255)), + ("right_ear", "right_eye", (51, 153, 255)), + ("left_eye", "nose", (102, 0, 204)), + ("nose", "right_eye", (51, 102, 255)), + # upper-body + ("left_shoulder", "right_shoulder", (255, 128, 0)), + ("left_shoulder", "left_elbow", (153, 255, 204)), + ("right_shoulder", "right_elbow", (128, 229, 255)), + ("left_elbow", "left_wrist", (153, 255, 153)), + ("right_elbow", "right_wrist", (102, 255, 224)), + # lower-body + ("left_hip", "right_hip", (255, 102, 0)), + ("left_hip", "left_knee", (255, 255, 77)), + ("right_hip", "right_knee", (153, 255, 204)), + ("left_knee", "left_ankle", (191, 255, 128)), + ("right_knee", "right_ankle", (255, 195, 77)), +] + + +def _get_coco_instances_meta(): + thing_ids = [k["id"] for k in COCO_CATEGORIES if k["isthing"] == 1] + thing_colors = [k["color"] for k in COCO_CATEGORIES if k["isthing"] == 1] + assert len(thing_ids) == 80, len(thing_ids) + # Mapping from the incontiguous COCO category id to an id in [0, 79] + thing_dataset_id_to_contiguous_id = {k: i for i, k in enumerate(thing_ids)} + thing_classes = [k["name"] for k in COCO_CATEGORIES if k["isthing"] == 1] + ret = { + "thing_dataset_id_to_contiguous_id": thing_dataset_id_to_contiguous_id, + "thing_classes": thing_classes, + "thing_colors": thing_colors, + } + return ret + + +def _get_coco_panoptic_separated_meta(): + """ + Returns metadata for "separated" version of the panoptic segmentation dataset. + """ + stuff_ids = [k["id"] for k in COCO_CATEGORIES if k["isthing"] == 0] + assert len(stuff_ids) == 53, len(stuff_ids) + + # For semantic segmentation, this mapping maps from contiguous stuff id + # (in [0, 53], used in models) to ids in the dataset (used for processing results) + # The id 0 is mapped to an extra category "thing". + stuff_dataset_id_to_contiguous_id = {k: i + 1 for i, k in enumerate(stuff_ids)} + # When converting COCO panoptic annotations to semantic annotations + # We label the "thing" category to 0 + stuff_dataset_id_to_contiguous_id[0] = 0 + + # 54 names for COCO stuff categories (including "things") + stuff_classes = ["things"] + [ + k["name"].replace("-other", "").replace("-merged", "") + for k in COCO_CATEGORIES + if k["isthing"] == 0 + ] + + # NOTE: I randomly picked a color for things + stuff_colors = [[82, 18, 128]] + [k["color"] for k in COCO_CATEGORIES if k["isthing"] == 0] + ret = { + "stuff_dataset_id_to_contiguous_id": stuff_dataset_id_to_contiguous_id, + "stuff_classes": stuff_classes, + "stuff_colors": stuff_colors, + } + ret.update(_get_coco_instances_meta()) + return ret + + +def _get_builtin_metadata(dataset_name): + if dataset_name == "coco": + return _get_coco_instances_meta() + if dataset_name == "coco_panoptic_separated": + return _get_coco_panoptic_separated_meta() + elif dataset_name == "coco_person": + return { + "thing_classes": ["person"], + "keypoint_names": COCO_PERSON_KEYPOINT_NAMES, + "keypoint_flip_map": COCO_PERSON_KEYPOINT_FLIP_MAP, + "keypoint_connection_rules": KEYPOINT_CONNECTION_RULES, + } + elif dataset_name == "cityscapes": + # fmt: off + CITYSCAPES_THING_CLASSES = [ + "person", "rider", "car", "truck", + "bus", "train", "motorcycle", "bicycle", + ] + CITYSCAPES_STUFF_CLASSES = [ + "road", "sidewalk", "building", "wall", "fence", "pole", "traffic light", + "traffic sign", "vegetation", "terrain", "sky", "person", "rider", "car", + "truck", "bus", "train", "motorcycle", "bicycle", "license plate", + ] + # fmt: on + return { + "thing_classes": CITYSCAPES_THING_CLASSES, + "stuff_classes": CITYSCAPES_STUFF_CLASSES, + } + raise KeyError("No built-in metadata for dataset {}".format(dataset_name)) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/cityscapes.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/cityscapes.py new file mode 100644 index 0000000000000000000000000000000000000000..062a555b959582eca525087ffc9859d298e926b8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/cityscapes.py @@ -0,0 +1,329 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import functools +import json +import logging +import multiprocessing as mp +import numpy as np +import os +from itertools import chain +import pycocotools.mask as mask_util +from fvcore.common.file_io import PathManager +from PIL import Image + +from detectron2.structures import BoxMode +from detectron2.utils.comm import get_world_size +from detectron2.utils.logger import setup_logger + +try: + import cv2 # noqa +except ImportError: + # OpenCV is an optional dependency at the moment + pass + + +logger = logging.getLogger(__name__) + + +def get_cityscapes_files(image_dir, gt_dir): + files = [] + # scan through the directory + cities = PathManager.ls(image_dir) + logger.info(f"{len(cities)} cities found in '{image_dir}'.") + for city in cities: + city_img_dir = os.path.join(image_dir, city) + city_gt_dir = os.path.join(gt_dir, city) + for basename in PathManager.ls(city_img_dir): + image_file = os.path.join(city_img_dir, basename) + + suffix = "leftImg8bit.png" + assert basename.endswith(suffix) + basename = basename[: -len(suffix)] + + instance_file = os.path.join(city_gt_dir, basename + "gtFine_instanceIds.png") + label_file = os.path.join(city_gt_dir, basename + "gtFine_labelIds.png") + json_file = os.path.join(city_gt_dir, basename + "gtFine_polygons.json") + + files.append((image_file, instance_file, label_file, json_file)) + assert len(files), "No images found in {}".format(image_dir) + for f in files[0]: + assert PathManager.isfile(f), f + return files + + +def load_cityscapes_instances(image_dir, gt_dir, from_json=True, to_polygons=True): + """ + Args: + image_dir (str): path to the raw dataset. e.g., "~/cityscapes/leftImg8bit/train". + gt_dir (str): path to the raw annotations. e.g., "~/cityscapes/gtFine/train". + from_json (bool): whether to read annotations from the raw json file or the png files. + to_polygons (bool): whether to represent the segmentation as polygons + (COCO's format) instead of masks (cityscapes's format). + + Returns: + list[dict]: a list of dicts in Detectron2 standard format. (See + `Using Custom Datasets `_ ) + """ + if from_json: + assert to_polygons, ( + "Cityscapes's json annotations are in polygon format. " + "Converting to mask format is not supported now." + ) + files = get_cityscapes_files(image_dir, gt_dir) + + logger.info("Preprocessing cityscapes annotations ...") + # This is still not fast: all workers will execute duplicate works and will + # take up to 10m on a 8GPU server. + pool = mp.Pool(processes=max(mp.cpu_count() // get_world_size() // 2, 4)) + + ret = pool.map( + functools.partial(cityscapes_files_to_dict, from_json=from_json, to_polygons=to_polygons), + files, + ) + logger.info("Loaded {} images from {}".format(len(ret), image_dir)) + + # Map cityscape ids to contiguous ids + from cityscapesscripts.helpers.labels import labels + + labels = [l for l in labels if l.hasInstances and not l.ignoreInEval] + dataset_id_to_contiguous_id = {l.id: idx for idx, l in enumerate(labels)} + for dict_per_image in ret: + for anno in dict_per_image["annotations"]: + anno["category_id"] = dataset_id_to_contiguous_id[anno["category_id"]] + return ret + + +def load_cityscapes_semantic(image_dir, gt_dir): + """ + Args: + image_dir (str): path to the raw dataset. e.g., "~/cityscapes/leftImg8bit/train". + gt_dir (str): path to the raw annotations. e.g., "~/cityscapes/gtFine/train". + + Returns: + list[dict]: a list of dict, each has "file_name" and + "sem_seg_file_name". + """ + ret = [] + # gt_dir is small and contain many small files. make sense to fetch to local first + gt_dir = PathManager.get_local_path(gt_dir) + for image_file, _, label_file, json_file in get_cityscapes_files(image_dir, gt_dir): + label_file = label_file.replace("labelIds", "labelTrainIds") + + with PathManager.open(json_file, "r") as f: + jsonobj = json.load(f) + ret.append( + { + "file_name": image_file, + "sem_seg_file_name": label_file, + "height": jsonobj["imgHeight"], + "width": jsonobj["imgWidth"], + } + ) + assert len(ret), f"No images found in {image_dir}!" + assert PathManager.isfile( + ret[0]["sem_seg_file_name"] + ), "Please generate labelTrainIds.png with cityscapesscripts/preparation/createTrainIdLabelImgs.py" # noqa + return ret + + +def cityscapes_files_to_dict(files, from_json, to_polygons): + """ + Parse cityscapes annotation files to a instance segmentation dataset dict. + + Args: + files (tuple): consists of (image_file, instance_id_file, label_id_file, json_file) + from_json (bool): whether to read annotations from the raw json file or the png files. + to_polygons (bool): whether to represent the segmentation as polygons + (COCO's format) instead of masks (cityscapes's format). + + Returns: + A dict in Detectron2 Dataset format. + """ + from cityscapesscripts.helpers.labels import id2label, name2label + + image_file, instance_id_file, _, json_file = files + + annos = [] + + if from_json: + from shapely.geometry import MultiPolygon, Polygon + + with PathManager.open(json_file, "r") as f: + jsonobj = json.load(f) + ret = { + "file_name": image_file, + "image_id": os.path.basename(image_file), + "height": jsonobj["imgHeight"], + "width": jsonobj["imgWidth"], + } + + # `polygons_union` contains the union of all valid polygons. + polygons_union = Polygon() + + # CityscapesScripts draw the polygons in sequential order + # and each polygon *overwrites* existing ones. See + # (https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/preparation/json2instanceImg.py) # noqa + # We use reverse order, and each polygon *avoids* early ones. + # This will resolve the ploygon overlaps in the same way as CityscapesScripts. + for obj in jsonobj["objects"][::-1]: + if "deleted" in obj: # cityscapes data format specific + continue + label_name = obj["label"] + + try: + label = name2label[label_name] + except KeyError: + if label_name.endswith("group"): # crowd area + label = name2label[label_name[: -len("group")]] + else: + raise + if label.id < 0: # cityscapes data format + continue + + # Cityscapes's raw annotations uses integer coordinates + # Therefore +0.5 here + poly_coord = np.asarray(obj["polygon"], dtype="f4") + 0.5 + # CityscapesScript uses PIL.ImageDraw.polygon to rasterize + # polygons for evaluation. This function operates in integer space + # and draws each pixel whose center falls into the polygon. + # Therefore it draws a polygon which is 0.5 "fatter" in expectation. + # We therefore dilate the input polygon by 0.5 as our input. + poly = Polygon(poly_coord).buffer(0.5, resolution=4) + + if not label.hasInstances or label.ignoreInEval: + # even if we won't store the polygon it still contributes to overlaps resolution + polygons_union = polygons_union.union(poly) + continue + + # Take non-overlapping part of the polygon + poly_wo_overlaps = poly.difference(polygons_union) + if poly_wo_overlaps.is_empty: + continue + polygons_union = polygons_union.union(poly) + + anno = {} + anno["iscrowd"] = label_name.endswith("group") + anno["category_id"] = label.id + + if isinstance(poly_wo_overlaps, Polygon): + poly_list = [poly_wo_overlaps] + elif isinstance(poly_wo_overlaps, MultiPolygon): + poly_list = poly_wo_overlaps.geoms + else: + raise NotImplementedError("Unknown geometric structure {}".format(poly_wo_overlaps)) + + poly_coord = [] + for poly_el in poly_list: + # COCO API can work only with exterior boundaries now, hence we store only them. + # TODO: store both exterior and interior boundaries once other parts of the + # codebase support holes in polygons. + poly_coord.append(list(chain(*poly_el.exterior.coords))) + anno["segmentation"] = poly_coord + (xmin, ymin, xmax, ymax) = poly_wo_overlaps.bounds + + anno["bbox"] = (xmin, ymin, xmax, ymax) + anno["bbox_mode"] = BoxMode.XYXY_ABS + + annos.append(anno) + else: + # See also the official annotation parsing scripts at + # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/instances2dict.py # noqa + with PathManager.open(instance_id_file, "rb") as f: + inst_image = np.asarray(Image.open(f), order="F") + # ids < 24 are stuff labels (filtering them first is about 5% faster) + flattened_ids = np.unique(inst_image[inst_image >= 24]) + + ret = { + "file_name": image_file, + "image_id": os.path.basename(image_file), + "height": inst_image.shape[0], + "width": inst_image.shape[1], + } + + for instance_id in flattened_ids: + # For non-crowd annotations, instance_id // 1000 is the label_id + # Crowd annotations have <1000 instance ids + label_id = instance_id // 1000 if instance_id >= 1000 else instance_id + label = id2label[label_id] + if not label.hasInstances or label.ignoreInEval: + continue + + anno = {} + anno["iscrowd"] = instance_id < 1000 + anno["category_id"] = label.id + + mask = np.asarray(inst_image == instance_id, dtype=np.uint8, order="F") + + inds = np.nonzero(mask) + ymin, ymax = inds[0].min(), inds[0].max() + xmin, xmax = inds[1].min(), inds[1].max() + anno["bbox"] = (xmin, ymin, xmax, ymax) + if xmax <= xmin or ymax <= ymin: + continue + anno["bbox_mode"] = BoxMode.XYXY_ABS + if to_polygons: + # This conversion comes from D4809743 and D5171122, + # when Mask-RCNN was first developed. + contours = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)[ + -2 + ] + polygons = [c.reshape(-1).tolist() for c in contours if len(c) >= 3] + # opencv's can produce invalid polygons + if len(polygons) == 0: + continue + anno["segmentation"] = polygons + else: + anno["segmentation"] = mask_util.encode(mask[:, :, None])[0] + annos.append(anno) + ret["annotations"] = annos + return ret + + +if __name__ == "__main__": + """ + Test the cityscapes dataset loader. + + Usage: + python -m detectron2.data.data.cityscapes \ + cityscapes/leftImg8bit/train cityscapes/gtFine/train + """ + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("image_dir") + parser.add_argument("gt_dir") + parser.add_argument("--type", choices=["instance", "semantic"], default="instance") + args = parser.parse_args() + from detectron2.data.catalog import Metadata + from detectron2.utils.visualizer import Visualizer + from cityscapesscripts.helpers.labels import labels + + logger = setup_logger(name=__name__) + + dirname = "cityscapes-data-vis" + os.makedirs(dirname, exist_ok=True) + + if args.type == "instance": + dicts = load_cityscapes_instances( + args.image_dir, args.gt_dir, from_json=True, to_polygons=True + ) + logger.info("Done loading {} samples.".format(len(dicts))) + + thing_classes = [k.name for k in labels if k.hasInstances and not k.ignoreInEval] + meta = Metadata().set(thing_classes=thing_classes) + + else: + dicts = load_cityscapes_semantic(args.image_dir, args.gt_dir) + logger.info("Done loading {} samples.".format(len(dicts))) + + stuff_names = [k.name for k in labels if k.trainId != 255] + stuff_colors = [k.color for k in labels if k.trainId != 255] + meta = Metadata().set(stuff_names=stuff_names, stuff_colors=stuff_colors) + + for d in dicts: + img = np.array(Image.open(PathManager.open(d["file_name"], "rb"))) + visualizer = Visualizer(img, metadata=meta) + vis = visualizer.draw_dataset_dict(d) + # cv2.imshow("a", vis.get_image()[:, :, ::-1]) + # cv2.waitKey() + fpath = os.path.join(dirname, os.path.basename(d["file_name"])) + vis.save(fpath) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/coco.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f6f099e778e34cf89d267e13424d4f69240b7878 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/coco.py @@ -0,0 +1,466 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import contextlib +import datetime +import io +import json +import logging +import numpy as np +import os +import pycocotools.mask as mask_util +from fvcore.common.file_io import PathManager, file_lock +from fvcore.common.timer import Timer +from PIL import Image + +from detectron2.structures import Boxes, BoxMode, PolygonMasks + +from .. import DatasetCatalog, MetadataCatalog + +""" +This file contains functions to parse COCO-format annotations into dicts in "Detectron2 format". +""" + + +logger = logging.getLogger(__name__) + +__all__ = ["load_coco_json", "load_sem_seg", "convert_to_coco_json"] + + +def load_coco_json(json_file, image_root, dataset_name=None, extra_annotation_keys=None): + """ + Load a json file with COCO's instances annotation format. + Currently supports instance detection, instance segmentation, + and person keypoints annotations. + + Args: + json_file (str): full path to the json file in COCO instances annotation format. + image_root (str or path-like): the directory where the images in this json file exists. + dataset_name (str): the name of the dataset (e.g., coco_2017_train). + If provided, this function will also put "thing_classes" into + the metadata associated with this dataset. + extra_annotation_keys (list[str]): list of per-annotation keys that should also be + loaded into the dataset dict (besides "iscrowd", "bbox", "keypoints", + "category_id", "segmentation"). The values for these keys will be returned as-is. + For example, the densepose annotations are loaded in this way. + + Returns: + list[dict]: a list of dicts in Detectron2 standard dataset dicts format. (See + `Using Custom Datasets `_ ) + + Notes: + 1. This function does not read the image files. + The results do not have the "image" field. + """ + from pycocotools.coco import COCO + + timer = Timer() + json_file = PathManager.get_local_path(json_file) + with contextlib.redirect_stdout(io.StringIO()): + coco_api = COCO(json_file) + if timer.seconds() > 1: + logger.info("Loading {} takes {:.2f} seconds.".format(json_file, timer.seconds())) + + id_map = None + if dataset_name is not None: + meta = MetadataCatalog.get(dataset_name) + cat_ids = sorted(coco_api.getCatIds()) + cats = coco_api.loadCats(cat_ids) + # The categories in a custom json file may not be sorted. + thing_classes = [c["name"] for c in sorted(cats, key=lambda x: x["id"])] + meta.thing_classes = thing_classes + + # In COCO, certain category ids are artificially removed, + # and by convention they are always ignored. + # We deal with COCO's id issue and translate + # the category ids to contiguous ids in [0, 80). + + # It works by looking at the "categories" field in the json, therefore + # if users' own json also have incontiguous ids, we'll + # apply this mapping as well but print a warning. + if not (min(cat_ids) == 1 and max(cat_ids) == len(cat_ids)): + if "coco" not in dataset_name: + logger.warning( + """ +Category ids in annotations are not in [1, #categories]! We'll apply a mapping for you. +""" + ) + id_map = {v: i for i, v in enumerate(cat_ids)} + meta.thing_dataset_id_to_contiguous_id = id_map + + # sort indices for reproducible results + img_ids = sorted(coco_api.imgs.keys()) + # imgs is a list of dicts, each looks something like: + # {'license': 4, + # 'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg', + # 'file_name': 'COCO_val2014_000000001268.jpg', + # 'height': 427, + # 'width': 640, + # 'date_captured': '2013-11-17 05:57:24', + # 'id': 1268} + imgs = coco_api.loadImgs(img_ids) + # anns is a list[list[dict]], where each dict is an annotation + # record for an object. The inner list enumerates the objects in an image + # and the outer list enumerates over images. Example of anns[0]: + # [{'segmentation': [[192.81, + # 247.09, + # ... + # 219.03, + # 249.06]], + # 'area': 1035.749, + # 'iscrowd': 0, + # 'image_id': 1268, + # 'bbox': [192.81, 224.8, 74.73, 33.43], + # 'category_id': 16, + # 'id': 42986}, + # ...] + anns = [coco_api.imgToAnns[img_id] for img_id in img_ids] + + if "minival" not in json_file: + # The popular valminusminival & minival annotations for COCO2014 contain this bug. + # However the ratio of buggy annotations there is tiny and does not affect accuracy. + # Therefore we explicitly white-list them. + ann_ids = [ann["id"] for anns_per_image in anns for ann in anns_per_image] + assert len(set(ann_ids)) == len(ann_ids), "Annotation ids in '{}' are not unique!".format( + json_file + ) + + imgs_anns = list(zip(imgs, anns)) + + logger.info("Loaded {} images in COCO format from {}".format(len(imgs_anns), json_file)) + + dataset_dicts = [] + + ann_keys = ["iscrowd", "bbox", "keypoints", "category_id"] + (extra_annotation_keys or []) + + num_instances_without_valid_segmentation = 0 + + for (img_dict, anno_dict_list) in imgs_anns: + record = {} + record["file_name"] = os.path.join(image_root, img_dict["file_name"]) + record["height"] = img_dict["height"] + record["width"] = img_dict["width"] + image_id = record["image_id"] = img_dict["id"] + + objs = [] + for anno in anno_dict_list: + # Check that the image_id in this annotation is the same as + # the image_id we're looking at. + # This fails only when the data parsing logic or the annotation file is buggy. + + # The original COCO valminusminival2014 & minival2014 annotation files + # actually contains bugs that, together with certain ways of using COCO API, + # can trigger this assertion. + assert anno["image_id"] == image_id + + assert anno.get("ignore", 0) == 0, '"ignore" in COCO json file is not supported.' + + obj = {key: anno[key] for key in ann_keys if key in anno} + + segm = anno.get("segmentation", None) + if segm: # either list[list[float]] or dict(RLE) + if not isinstance(segm, dict): + # filter out invalid polygons (< 3 points) + segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6] + if len(segm) == 0: + num_instances_without_valid_segmentation += 1 + continue # ignore this instance + obj["segmentation"] = segm + + keypts = anno.get("keypoints", None) + if keypts: # list[int] + for idx, v in enumerate(keypts): + if idx % 3 != 2: + # COCO's segmentation coordinates are floating points in [0, H or W], + # but keypoint coordinates are integers in [0, H-1 or W-1] + # Therefore we assume the coordinates are "pixel indices" and + # add 0.5 to convert to floating point coordinates. + keypts[idx] = v + 0.5 + obj["keypoints"] = keypts + + obj["bbox_mode"] = BoxMode.XYWH_ABS + if id_map: + obj["category_id"] = id_map[obj["category_id"]] + objs.append(obj) + record["annotations"] = objs + dataset_dicts.append(record) + + if num_instances_without_valid_segmentation > 0: + logger.warning( + "Filtered out {} instances without valid segmentation. " + "There might be issues in your dataset generation process.".format( + num_instances_without_valid_segmentation + ) + ) + return dataset_dicts + + +def load_sem_seg(gt_root, image_root, gt_ext="png", image_ext="jpg"): + """ + Load semantic segmentation data. All files under "gt_root" with "gt_ext" extension are + treated as ground truth annotations and all files under "image_root" with "image_ext" extension + as input images. Ground truth and input images are matched using file paths relative to + "gt_root" and "image_root" respectively without taking into account file extensions. + This works for COCO as well as some other data. + + Args: + gt_root (str): full path to ground truth semantic segmentation files. Semantic segmentation + annotations are stored as images with integer values in pixels that represent + corresponding semantic labels. + image_root (str): the directory where the input images are. + gt_ext (str): file extension for ground truth annotations. + image_ext (str): file extension for input images. + + Returns: + list[dict]: + a list of dicts in detectron2 standard format without instance-level + annotation. + + Notes: + 1. This function does not read the image and ground truth files. + The results do not have the "image" and "sem_seg" fields. + """ + + # We match input images with ground truth based on their relative filepaths (without file + # extensions) starting from 'image_root' and 'gt_root' respectively. + def file2id(folder_path, file_path): + # extract relative path starting from `folder_path` + image_id = os.path.normpath(os.path.relpath(file_path, start=folder_path)) + # remove file extension + image_id = os.path.splitext(image_id)[0] + return image_id + + input_files = sorted( + (os.path.join(image_root, f) for f in PathManager.ls(image_root) if f.endswith(image_ext)), + key=lambda file_path: file2id(image_root, file_path), + ) + gt_files = sorted( + (os.path.join(gt_root, f) for f in PathManager.ls(gt_root) if f.endswith(gt_ext)), + key=lambda file_path: file2id(gt_root, file_path), + ) + + assert len(gt_files) > 0, "No annotations found in {}.".format(gt_root) + + # Use the intersection, so that val2017_100 annotations can run smoothly with val2017 images + if len(input_files) != len(gt_files): + logger.warn( + "Directory {} and {} has {} and {} files, respectively.".format( + image_root, gt_root, len(input_files), len(gt_files) + ) + ) + input_basenames = [os.path.basename(f)[: -len(image_ext)] for f in input_files] + gt_basenames = [os.path.basename(f)[: -len(gt_ext)] for f in gt_files] + intersect = list(set(input_basenames) & set(gt_basenames)) + # sort, otherwise each worker may obtain a list[dict] in different order + intersect = sorted(intersect) + logger.warn("Will use their intersection of {} files.".format(len(intersect))) + input_files = [os.path.join(image_root, f + image_ext) for f in intersect] + gt_files = [os.path.join(gt_root, f + gt_ext) for f in intersect] + + logger.info( + "Loaded {} images with semantic segmentation from {}".format(len(input_files), image_root) + ) + + dataset_dicts = [] + for (img_path, gt_path) in zip(input_files, gt_files): + record = {} + record["file_name"] = img_path + record["sem_seg_file_name"] = gt_path + dataset_dicts.append(record) + + return dataset_dicts + + +def convert_to_coco_dict(dataset_name): + """ + Convert an instance detection/segmentation or keypoint detection dataset + in detectron2's standard format into COCO json format. + + Generic dataset description can be found here: + https://detectron2.readthedocs.io/tutorials/datasets.html#register-a-dataset + + COCO data format description can be found here: + http://cocodataset.org/#format-data + + Args: + dataset_name (str): + name of the source dataset + Must be registered in DatastCatalog and in detectron2's standard format. + Must have corresponding metadata "thing_classes" + Returns: + coco_dict: serializable dict in COCO json format + """ + + dataset_dicts = DatasetCatalog.get(dataset_name) + metadata = MetadataCatalog.get(dataset_name) + + # unmap the category mapping ids for COCO + if hasattr(metadata, "thing_dataset_id_to_contiguous_id"): + reverse_id_mapping = {v: k for k, v in metadata.thing_dataset_id_to_contiguous_id.items()} + reverse_id_mapper = lambda contiguous_id: reverse_id_mapping[contiguous_id] # noqa + else: + reverse_id_mapper = lambda contiguous_id: contiguous_id # noqa + + categories = [ + {"id": reverse_id_mapper(id), "name": name} + for id, name in enumerate(metadata.thing_classes) + ] + + logger.info("Converting dataset dicts into COCO format") + coco_images = [] + coco_annotations = [] + + for image_id, image_dict in enumerate(dataset_dicts): + coco_image = { + "id": image_dict.get("image_id", image_id), + "width": image_dict["width"], + "height": image_dict["height"], + "file_name": image_dict["file_name"], + } + coco_images.append(coco_image) + + anns_per_image = image_dict["annotations"] + for annotation in anns_per_image: + # create a new dict with only COCO fields + coco_annotation = {} + + # COCO requirement: XYWH box format + bbox = annotation["bbox"] + bbox_mode = annotation["bbox_mode"] + bbox = BoxMode.convert(bbox, bbox_mode, BoxMode.XYWH_ABS) + + # COCO requirement: instance area + if "segmentation" in annotation: + # Computing areas for instances by counting the pixels + segmentation = annotation["segmentation"] + # TODO: check segmentation type: RLE, BinaryMask or Polygon + if isinstance(segmentation, list): + polygons = PolygonMasks([segmentation]) + area = polygons.area()[0].item() + elif isinstance(segmentation, dict): # RLE + area = mask_util.area(segmentation).item() + else: + raise TypeError(f"Unknown segmentation type {type(segmentation)}!") + else: + # Computing areas using bounding boxes + bbox_xy = BoxMode.convert(bbox, BoxMode.XYWH_ABS, BoxMode.XYXY_ABS) + area = Boxes([bbox_xy]).area()[0].item() + + if "keypoints" in annotation: + keypoints = annotation["keypoints"] # list[int] + for idx, v in enumerate(keypoints): + if idx % 3 != 2: + # COCO's segmentation coordinates are floating points in [0, H or W], + # but keypoint coordinates are integers in [0, H-1 or W-1] + # For COCO format consistency we substract 0.5 + # https://github.com/facebookresearch/detectron2/pull/175#issuecomment-551202163 + keypoints[idx] = v - 0.5 + if "num_keypoints" in annotation: + num_keypoints = annotation["num_keypoints"] + else: + num_keypoints = sum(kp > 0 for kp in keypoints[2::3]) + + # COCO requirement: + # linking annotations to images + # "id" field must start with 1 + coco_annotation["id"] = len(coco_annotations) + 1 + coco_annotation["image_id"] = coco_image["id"] + coco_annotation["bbox"] = [round(float(x), 3) for x in bbox] + coco_annotation["area"] = float(area) + coco_annotation["iscrowd"] = annotation.get("iscrowd", 0) + coco_annotation["category_id"] = reverse_id_mapper(annotation["category_id"]) + + # Add optional fields + if "keypoints" in annotation: + coco_annotation["keypoints"] = keypoints + coco_annotation["num_keypoints"] = num_keypoints + + if "segmentation" in annotation: + coco_annotation["segmentation"] = annotation["segmentation"] + if isinstance(coco_annotation["segmentation"], dict): # RLE + coco_annotation["segmentation"]["counts"] = coco_annotation["segmentation"][ + "counts" + ].decode("ascii") + + coco_annotations.append(coco_annotation) + + logger.info( + "Conversion finished, " + f"#images: {len(coco_images)}, #annotations: {len(coco_annotations)}" + ) + + info = { + "date_created": str(datetime.datetime.now()), + "description": "Automatically generated COCO json file for Detectron2.", + } + coco_dict = { + "info": info, + "images": coco_images, + "annotations": coco_annotations, + "categories": categories, + "licenses": None, + } + return coco_dict + + +def convert_to_coco_json(dataset_name, output_file, allow_cached=True): + """ + Converts dataset into COCO format and saves it to a json file. + dataset_name must be registered in DatasetCatalog and in detectron2's standard format. + + Args: + dataset_name: + reference from the config file to the catalogs + must be registered in DatasetCatalog and in detectron2's standard format + output_file: path of json file that will be saved to + allow_cached: if json file is already present then skip conversion + """ + + # TODO: The dataset or the conversion script *may* change, + # a checksum would be useful for validating the cached data + + PathManager.mkdirs(os.path.dirname(output_file)) + with file_lock(output_file): + if PathManager.exists(output_file) and allow_cached: + logger.warning( + f"Using previously cached COCO format annotations at '{output_file}'. " + "You need to clear the cache file if your dataset has been modified." + ) + else: + logger.info(f"Converting annotations of dataset '{dataset_name}' to COCO format ...)") + coco_dict = convert_to_coco_dict(dataset_name) + + logger.info(f"Caching COCO format annotations at '{output_file}' ...") + with PathManager.open(output_file, "w") as f: + json.dump(coco_dict, f) + + +if __name__ == "__main__": + """ + Test the COCO json dataset loader. + + Usage: + python -m detectron2.data.data.coco \ + path/to/json path/to/image_root dataset_name + + "dataset_name" can be "coco_2014_minival_100", or other + pre-registered ones + """ + from detectron2.utils.logger import setup_logger + from detectron2.utils.visualizer import Visualizer + import detectron2.data.datasets # noqa # add pre-defined metadata + import sys + + logger = setup_logger(name=__name__) + assert sys.argv[3] in DatasetCatalog.list() + meta = MetadataCatalog.get(sys.argv[3]) + + dicts = load_coco_json(sys.argv[1], sys.argv[2], sys.argv[3]) + logger.info("Done loading {} samples.".format(len(dicts))) + + dirname = "coco-data-vis" + os.makedirs(dirname, exist_ok=True) + for d in dicts: + img = np.array(Image.open(d["file_name"])) + visualizer = Visualizer(img, metadata=meta) + vis = visualizer.draw_dataset_dict(d) + fpath = os.path.join(dirname, os.path.basename(d["file_name"])) + vis.save(fpath) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/lvis.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/lvis.py new file mode 100644 index 0000000000000000000000000000000000000000..7b95be350a775af78aa6412f560a29e825ba61a1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/lvis.py @@ -0,0 +1,209 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import os +from fvcore.common.file_io import PathManager +from fvcore.common.timer import Timer + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.structures import BoxMode + +from .builtin_meta import _get_coco_instances_meta +from .lvis_v0_5_categories import LVIS_CATEGORIES + +""" +This file contains functions to parse LVIS-format annotations into dicts in the +"Detectron2 format". +""" + +logger = logging.getLogger(__name__) + +__all__ = ["load_lvis_json", "register_lvis_instances", "get_lvis_instances_meta"] + + +def register_lvis_instances(name, metadata, json_file, image_root): + """ + Register a dataset in LVIS's json annotation format for instance detection and segmentation. + + Args: + name (str): a name that identifies the dataset, e.g. "lvis_v0.5_train". + metadata (dict): extra metadata associated with this dataset. It can be an empty dict. + json_file (str): path to the json instance annotation file. + image_root (str or path-like): directory which contains all the images. + """ + DatasetCatalog.register(name, lambda: load_lvis_json(json_file, image_root, name)) + MetadataCatalog.get(name).set( + json_file=json_file, image_root=image_root, evaluator_type="lvis", **metadata + ) + + +def load_lvis_json(json_file, image_root, dataset_name=None): + """ + Load a json file in LVIS's annotation format. + + Args: + json_file (str): full path to the LVIS json annotation file. + image_root (str): the directory where the images in this json file exists. + dataset_name (str): the name of the dataset (e.g., "lvis_v0.5_train"). + If provided, this function will put "thing_classes" into the metadata + associated with this dataset. + + Returns: + list[dict]: a list of dicts in Detectron2 standard format. (See + `Using Custom Datasets `_ ) + + Notes: + 1. This function does not read the image files. + The results do not have the "image" field. + """ + from lvis import LVIS + + json_file = PathManager.get_local_path(json_file) + + timer = Timer() + lvis_api = LVIS(json_file) + if timer.seconds() > 1: + logger.info("Loading {} takes {:.2f} seconds.".format(json_file, timer.seconds())) + + if dataset_name is not None: + meta = get_lvis_instances_meta(dataset_name) + MetadataCatalog.get(dataset_name).set(**meta) + + # sort indices for reproducible results + img_ids = sorted(lvis_api.imgs.keys()) + # imgs is a list of dicts, each looks something like: + # {'license': 4, + # 'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg', + # 'file_name': 'COCO_val2014_000000001268.jpg', + # 'height': 427, + # 'width': 640, + # 'date_captured': '2013-11-17 05:57:24', + # 'id': 1268} + imgs = lvis_api.load_imgs(img_ids) + # anns is a list[list[dict]], where each dict is an annotation + # record for an object. The inner list enumerates the objects in an image + # and the outer list enumerates over images. Example of anns[0]: + # [{'segmentation': [[192.81, + # 247.09, + # ... + # 219.03, + # 249.06]], + # 'area': 1035.749, + # 'image_id': 1268, + # 'bbox': [192.81, 224.8, 74.73, 33.43], + # 'category_id': 16, + # 'id': 42986}, + # ...] + anns = [lvis_api.img_ann_map[img_id] for img_id in img_ids] + + # Sanity check that each annotation has a unique id + ann_ids = [ann["id"] for anns_per_image in anns for ann in anns_per_image] + assert len(set(ann_ids)) == len(ann_ids), "Annotation ids in '{}' are not unique".format( + json_file + ) + + imgs_anns = list(zip(imgs, anns)) + + logger.info("Loaded {} images in the LVIS format from {}".format(len(imgs_anns), json_file)) + + dataset_dicts = [] + + for (img_dict, anno_dict_list) in imgs_anns: + record = {} + file_name = img_dict["file_name"] + if img_dict["file_name"].startswith("COCO"): + # Convert form the COCO 2014 file naming convention of + # COCO_[train/val/test]2014_000000000000.jpg to the 2017 naming convention of + # 000000000000.jpg (LVIS v1 will fix this naming issue) + file_name = file_name[-16:] + record["file_name"] = os.path.join(image_root, file_name) + record["height"] = img_dict["height"] + record["width"] = img_dict["width"] + record["not_exhaustive_category_ids"] = img_dict.get("not_exhaustive_category_ids", []) + record["neg_category_ids"] = img_dict.get("neg_category_ids", []) + image_id = record["image_id"] = img_dict["id"] + + objs = [] + for anno in anno_dict_list: + # Check that the image_id in this annotation is the same as + # the image_id we're looking at. + # This fails only when the data parsing logic or the annotation file is buggy. + assert anno["image_id"] == image_id + obj = {"bbox": anno["bbox"], "bbox_mode": BoxMode.XYWH_ABS} + obj["category_id"] = anno["category_id"] - 1 # Convert 1-indexed to 0-indexed + segm = anno["segmentation"] # list[list[float]] + # filter out invalid polygons (< 3 points) + valid_segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6] + assert len(segm) == len( + valid_segm + ), "Annotation contains an invalid polygon with < 3 points" + assert len(segm) > 0 + obj["segmentation"] = segm + objs.append(obj) + record["annotations"] = objs + dataset_dicts.append(record) + + return dataset_dicts + + +def get_lvis_instances_meta(dataset_name): + """ + Load LVIS metadata. + + Args: + dataset_name (str): LVIS dataset name without the split name (e.g., "lvis_v0.5"). + + Returns: + dict: LVIS metadata with keys: thing_classes + """ + if "cocofied" in dataset_name: + return _get_coco_instances_meta() + if "v0.5" in dataset_name: + return _get_lvis_instances_meta_v0_5() + # There will be a v1 in the future + # elif dataset_name == "lvis_v1": + # return get_lvis_instances_meta_v1() + raise ValueError("No built-in metadata for dataset {}".format(dataset_name)) + + +def _get_lvis_instances_meta_v0_5(): + assert len(LVIS_CATEGORIES) == 1230 + cat_ids = [k["id"] for k in LVIS_CATEGORIES] + assert min(cat_ids) == 1 and max(cat_ids) == len( + cat_ids + ), "Category ids are not in [1, #categories], as expected" + # Ensure that the category list is sorted by id + lvis_categories = sorted(LVIS_CATEGORIES, key=lambda x: x["id"]) + thing_classes = [k["synonyms"][0] for k in lvis_categories] + meta = {"thing_classes": thing_classes} + return meta + + +if __name__ == "__main__": + """ + Test the LVIS json dataset loader. + + Usage: + python -m detectron2.data.data.lvis \ + path/to/json path/to/image_root dataset_name vis_limit + """ + import sys + import numpy as np + from detectron2.utils.logger import setup_logger + from PIL import Image + import detectron2.data.datasets # noqa # add pre-defined metadata + from detectron2.utils.visualizer import Visualizer + + logger = setup_logger(name=__name__) + meta = MetadataCatalog.get(sys.argv[3]) + + dicts = load_lvis_json(sys.argv[1], sys.argv[2], sys.argv[3]) + logger.info("Done loading {} samples.".format(len(dicts))) + + dirname = "lvis-data-vis" + os.makedirs(dirname, exist_ok=True) + for d in dicts[: int(sys.argv[4])]: + img = np.array(Image.open(d["file_name"])) + visualizer = Visualizer(img, metadata=meta) + vis = visualizer.draw_dataset_dict(d) + fpath = os.path.join(dirname, os.path.basename(d["file_name"])) + vis.save(fpath) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/lvis_v0_5_categories.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/lvis_v0_5_categories.py new file mode 100644 index 0000000000000000000000000000000000000000..8205e605f85dab3674c6f1600d7675eef86b160f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/lvis_v0_5_categories.py @@ -0,0 +1,13 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Autogen with +# with open("lvis_v0.5_val.json", "r") as f: +# a = json.load(f) +# c = a["categories"] +# for x in c: +# del x["image_count"] +# del x["instance_count"] +# LVIS_CATEGORIES = repr(c) + " # noqa" + +# fmt: off +LVIS_CATEGORIES = [{'frequency': 'r', 'id': 1, 'synset': 'acorn.n.01', 'synonyms': ['acorn'], 'def': 'nut from an oak tree', 'name': 'acorn'}, {'frequency': 'c', 'id': 2, 'synset': 'aerosol.n.02', 'synonyms': ['aerosol_can', 'spray_can'], 'def': 'a dispenser that holds a substance under pressure', 'name': 'aerosol_can'}, {'frequency': 'f', 'id': 3, 'synset': 'air_conditioner.n.01', 'synonyms': ['air_conditioner'], 'def': 'a machine that keeps air cool and dry', 'name': 'air_conditioner'}, {'frequency': 'f', 'id': 4, 'synset': 'airplane.n.01', 'synonyms': ['airplane', 'aeroplane'], 'def': 'an aircraft that has a fixed wing and is powered by propellers or jets', 'name': 'airplane'}, {'frequency': 'c', 'id': 5, 'synset': 'alarm_clock.n.01', 'synonyms': ['alarm_clock'], 'def': 'a clock that wakes a sleeper at some preset time', 'name': 'alarm_clock'}, {'frequency': 'c', 'id': 6, 'synset': 'alcohol.n.01', 'synonyms': ['alcohol', 'alcoholic_beverage'], 'def': 'a liquor or brew containing alcohol as the active agent', 'name': 'alcohol'}, {'frequency': 'r', 'id': 7, 'synset': 'alligator.n.02', 'synonyms': ['alligator', 'gator'], 'def': 'amphibious reptiles related to crocodiles but with shorter broader snouts', 'name': 'alligator'}, {'frequency': 'c', 'id': 8, 'synset': 'almond.n.02', 'synonyms': ['almond'], 'def': 'oval-shaped edible seed of the almond tree', 'name': 'almond'}, {'frequency': 'c', 'id': 9, 'synset': 'ambulance.n.01', 'synonyms': ['ambulance'], 'def': 'a vehicle that takes people to and from hospitals', 'name': 'ambulance'}, {'frequency': 'r', 'id': 10, 'synset': 'amplifier.n.01', 'synonyms': ['amplifier'], 'def': 'electronic equipment that increases strength of signals', 'name': 'amplifier'}, {'frequency': 'c', 'id': 11, 'synset': 'anklet.n.03', 'synonyms': ['anklet', 'ankle_bracelet'], 'def': 'an ornament worn around the ankle', 'name': 'anklet'}, {'frequency': 'f', 'id': 12, 'synset': 'antenna.n.01', 'synonyms': ['antenna', 'aerial', 'transmitting_aerial'], 'def': 'an electrical device that sends or receives radio or television signals', 'name': 'antenna'}, {'frequency': 'f', 'id': 13, 'synset': 'apple.n.01', 'synonyms': ['apple'], 'def': 'fruit with red or yellow or green skin and sweet to tart crisp whitish flesh', 'name': 'apple'}, {'frequency': 'r', 'id': 14, 'synset': 'apple_juice.n.01', 'synonyms': ['apple_juice'], 'def': 'the juice of apples', 'name': 'apple_juice'}, {'frequency': 'r', 'id': 15, 'synset': 'applesauce.n.01', 'synonyms': ['applesauce'], 'def': 'puree of stewed apples usually sweetened and spiced', 'name': 'applesauce'}, {'frequency': 'r', 'id': 16, 'synset': 'apricot.n.02', 'synonyms': ['apricot'], 'def': 'downy yellow to rosy-colored fruit resembling a small peach', 'name': 'apricot'}, {'frequency': 'f', 'id': 17, 'synset': 'apron.n.01', 'synonyms': ['apron'], 'def': 'a garment of cloth that is tied about the waist and worn to protect clothing', 'name': 'apron'}, {'frequency': 'c', 'id': 18, 'synset': 'aquarium.n.01', 'synonyms': ['aquarium', 'fish_tank'], 'def': 'a tank/pool/bowl filled with water for keeping live fish and underwater animals', 'name': 'aquarium'}, {'frequency': 'c', 'id': 19, 'synset': 'armband.n.02', 'synonyms': ['armband'], 'def': 'a band worn around the upper arm', 'name': 'armband'}, {'frequency': 'f', 'id': 20, 'synset': 'armchair.n.01', 'synonyms': ['armchair'], 'def': 'chair with a support on each side for arms', 'name': 'armchair'}, {'frequency': 'r', 'id': 21, 'synset': 'armoire.n.01', 'synonyms': ['armoire'], 'def': 'a large wardrobe or cabinet', 'name': 'armoire'}, {'frequency': 'r', 'id': 22, 'synset': 'armor.n.01', 'synonyms': ['armor', 'armour'], 'def': 'protective covering made of metal and used in combat', 'name': 'armor'}, {'frequency': 'c', 'id': 23, 'synset': 'artichoke.n.02', 'synonyms': ['artichoke'], 'def': 'a thistlelike flower head with edible fleshy leaves and heart', 'name': 'artichoke'}, {'frequency': 'f', 'id': 24, 'synset': 'ashcan.n.01', 'synonyms': ['trash_can', 'garbage_can', 'wastebin', 'dustbin', 'trash_barrel', 'trash_bin'], 'def': 'a bin that holds rubbish until it is collected', 'name': 'trash_can'}, {'frequency': 'c', 'id': 25, 'synset': 'ashtray.n.01', 'synonyms': ['ashtray'], 'def': "a receptacle for the ash from smokers' cigars or cigarettes", 'name': 'ashtray'}, {'frequency': 'c', 'id': 26, 'synset': 'asparagus.n.02', 'synonyms': ['asparagus'], 'def': 'edible young shoots of the asparagus plant', 'name': 'asparagus'}, {'frequency': 'c', 'id': 27, 'synset': 'atomizer.n.01', 'synonyms': ['atomizer', 'atomiser', 'spray', 'sprayer', 'nebulizer', 'nebuliser'], 'def': 'a dispenser that turns a liquid (such as perfume) into a fine mist', 'name': 'atomizer'}, {'frequency': 'c', 'id': 28, 'synset': 'avocado.n.01', 'synonyms': ['avocado'], 'def': 'a pear-shaped fruit with green or blackish skin and rich yellowish pulp enclosing a single large seed', 'name': 'avocado'}, {'frequency': 'c', 'id': 29, 'synset': 'award.n.02', 'synonyms': ['award', 'accolade'], 'def': 'a tangible symbol signifying approval or distinction', 'name': 'award'}, {'frequency': 'f', 'id': 30, 'synset': 'awning.n.01', 'synonyms': ['awning'], 'def': 'a canopy made of canvas to shelter people or things from rain or sun', 'name': 'awning'}, {'frequency': 'r', 'id': 31, 'synset': 'ax.n.01', 'synonyms': ['ax', 'axe'], 'def': 'an edge tool with a heavy bladed head mounted across a handle', 'name': 'ax'}, {'frequency': 'f', 'id': 32, 'synset': 'baby_buggy.n.01', 'synonyms': ['baby_buggy', 'baby_carriage', 'perambulator', 'pram', 'stroller'], 'def': 'a small vehicle with four wheels in which a baby or child is pushed around', 'name': 'baby_buggy'}, {'frequency': 'c', 'id': 33, 'synset': 'backboard.n.01', 'synonyms': ['basketball_backboard'], 'def': 'a raised vertical board with basket attached; used to play basketball', 'name': 'basketball_backboard'}, {'frequency': 'f', 'id': 34, 'synset': 'backpack.n.01', 'synonyms': ['backpack', 'knapsack', 'packsack', 'rucksack', 'haversack'], 'def': 'a bag carried by a strap on your back or shoulder', 'name': 'backpack'}, {'frequency': 'f', 'id': 35, 'synset': 'bag.n.04', 'synonyms': ['handbag', 'purse', 'pocketbook'], 'def': 'a container used for carrying money and small personal items or accessories', 'name': 'handbag'}, {'frequency': 'f', 'id': 36, 'synset': 'bag.n.06', 'synonyms': ['suitcase', 'baggage', 'luggage'], 'def': 'cases used to carry belongings when traveling', 'name': 'suitcase'}, {'frequency': 'c', 'id': 37, 'synset': 'bagel.n.01', 'synonyms': ['bagel', 'beigel'], 'def': 'glazed yeast-raised doughnut-shaped roll with hard crust', 'name': 'bagel'}, {'frequency': 'r', 'id': 38, 'synset': 'bagpipe.n.01', 'synonyms': ['bagpipe'], 'def': 'a tubular wind instrument; the player blows air into a bag and squeezes it out', 'name': 'bagpipe'}, {'frequency': 'r', 'id': 39, 'synset': 'baguet.n.01', 'synonyms': ['baguet', 'baguette'], 'def': 'narrow French stick loaf', 'name': 'baguet'}, {'frequency': 'r', 'id': 40, 'synset': 'bait.n.02', 'synonyms': ['bait', 'lure'], 'def': 'something used to lure fish or other animals into danger so they can be trapped or killed', 'name': 'bait'}, {'frequency': 'f', 'id': 41, 'synset': 'ball.n.06', 'synonyms': ['ball'], 'def': 'a spherical object used as a plaything', 'name': 'ball'}, {'frequency': 'r', 'id': 42, 'synset': 'ballet_skirt.n.01', 'synonyms': ['ballet_skirt', 'tutu'], 'def': 'very short skirt worn by ballerinas', 'name': 'ballet_skirt'}, {'frequency': 'f', 'id': 43, 'synset': 'balloon.n.01', 'synonyms': ['balloon'], 'def': 'large tough nonrigid bag filled with gas or heated air', 'name': 'balloon'}, {'frequency': 'c', 'id': 44, 'synset': 'bamboo.n.02', 'synonyms': ['bamboo'], 'def': 'woody tropical grass having hollow woody stems', 'name': 'bamboo'}, {'frequency': 'f', 'id': 45, 'synset': 'banana.n.02', 'synonyms': ['banana'], 'def': 'elongated crescent-shaped yellow fruit with soft sweet flesh', 'name': 'banana'}, {'frequency': 'r', 'id': 46, 'synset': 'band_aid.n.01', 'synonyms': ['Band_Aid'], 'def': 'trade name for an adhesive bandage to cover small cuts or blisters', 'name': 'Band_Aid'}, {'frequency': 'c', 'id': 47, 'synset': 'bandage.n.01', 'synonyms': ['bandage'], 'def': 'a piece of soft material that covers and protects an injured part of the body', 'name': 'bandage'}, {'frequency': 'c', 'id': 48, 'synset': 'bandanna.n.01', 'synonyms': ['bandanna', 'bandana'], 'def': 'large and brightly colored handkerchief; often used as a neckerchief', 'name': 'bandanna'}, {'frequency': 'r', 'id': 49, 'synset': 'banjo.n.01', 'synonyms': ['banjo'], 'def': 'a stringed instrument of the guitar family with a long neck and circular body', 'name': 'banjo'}, {'frequency': 'f', 'id': 50, 'synset': 'banner.n.01', 'synonyms': ['banner', 'streamer'], 'def': 'long strip of cloth or paper used for decoration or advertising', 'name': 'banner'}, {'frequency': 'r', 'id': 51, 'synset': 'barbell.n.01', 'synonyms': ['barbell'], 'def': 'a bar to which heavy discs are attached at each end; used in weightlifting', 'name': 'barbell'}, {'frequency': 'r', 'id': 52, 'synset': 'barge.n.01', 'synonyms': ['barge'], 'def': 'a flatbottom boat for carrying heavy loads (especially on canals)', 'name': 'barge'}, {'frequency': 'f', 'id': 53, 'synset': 'barrel.n.02', 'synonyms': ['barrel', 'cask'], 'def': 'a cylindrical container that holds liquids', 'name': 'barrel'}, {'frequency': 'c', 'id': 54, 'synset': 'barrette.n.01', 'synonyms': ['barrette'], 'def': "a pin for holding women's hair in place", 'name': 'barrette'}, {'frequency': 'c', 'id': 55, 'synset': 'barrow.n.03', 'synonyms': ['barrow', 'garden_cart', 'lawn_cart', 'wheelbarrow'], 'def': 'a cart for carrying small loads; has handles and one or more wheels', 'name': 'barrow'}, {'frequency': 'f', 'id': 56, 'synset': 'base.n.03', 'synonyms': ['baseball_base'], 'def': 'a place that the runner must touch before scoring', 'name': 'baseball_base'}, {'frequency': 'f', 'id': 57, 'synset': 'baseball.n.02', 'synonyms': ['baseball'], 'def': 'a ball used in playing baseball', 'name': 'baseball'}, {'frequency': 'f', 'id': 58, 'synset': 'baseball_bat.n.01', 'synonyms': ['baseball_bat'], 'def': 'an implement used in baseball by the batter', 'name': 'baseball_bat'}, {'frequency': 'f', 'id': 59, 'synset': 'baseball_cap.n.01', 'synonyms': ['baseball_cap', 'jockey_cap', 'golf_cap'], 'def': 'a cap with a bill', 'name': 'baseball_cap'}, {'frequency': 'f', 'id': 60, 'synset': 'baseball_glove.n.01', 'synonyms': ['baseball_glove', 'baseball_mitt'], 'def': 'the handwear used by fielders in playing baseball', 'name': 'baseball_glove'}, {'frequency': 'f', 'id': 61, 'synset': 'basket.n.01', 'synonyms': ['basket', 'handbasket'], 'def': 'a container that is usually woven and has handles', 'name': 'basket'}, {'frequency': 'c', 'id': 62, 'synset': 'basket.n.03', 'synonyms': ['basketball_hoop'], 'def': 'metal hoop supporting a net through which players try to throw the basketball', 'name': 'basketball_hoop'}, {'frequency': 'c', 'id': 63, 'synset': 'basketball.n.02', 'synonyms': ['basketball'], 'def': 'an inflated ball used in playing basketball', 'name': 'basketball'}, {'frequency': 'r', 'id': 64, 'synset': 'bass_horn.n.01', 'synonyms': ['bass_horn', 'sousaphone', 'tuba'], 'def': 'the lowest brass wind instrument', 'name': 'bass_horn'}, {'frequency': 'r', 'id': 65, 'synset': 'bat.n.01', 'synonyms': ['bat_(animal)'], 'def': 'nocturnal mouselike mammal with forelimbs modified to form membranous wings', 'name': 'bat_(animal)'}, {'frequency': 'f', 'id': 66, 'synset': 'bath_mat.n.01', 'synonyms': ['bath_mat'], 'def': 'a heavy towel or mat to stand on while drying yourself after a bath', 'name': 'bath_mat'}, {'frequency': 'f', 'id': 67, 'synset': 'bath_towel.n.01', 'synonyms': ['bath_towel'], 'def': 'a large towel; to dry yourself after a bath', 'name': 'bath_towel'}, {'frequency': 'c', 'id': 68, 'synset': 'bathrobe.n.01', 'synonyms': ['bathrobe'], 'def': 'a loose-fitting robe of towelling; worn after a bath or swim', 'name': 'bathrobe'}, {'frequency': 'f', 'id': 69, 'synset': 'bathtub.n.01', 'synonyms': ['bathtub', 'bathing_tub'], 'def': 'a large open container that you fill with water and use to wash the body', 'name': 'bathtub'}, {'frequency': 'r', 'id': 70, 'synset': 'batter.n.02', 'synonyms': ['batter_(food)'], 'def': 'a liquid or semiliquid mixture, as of flour, eggs, and milk, used in cooking', 'name': 'batter_(food)'}, {'frequency': 'c', 'id': 71, 'synset': 'battery.n.02', 'synonyms': ['battery'], 'def': 'a portable device that produces electricity', 'name': 'battery'}, {'frequency': 'r', 'id': 72, 'synset': 'beach_ball.n.01', 'synonyms': ['beachball'], 'def': 'large and light ball; for play at the seaside', 'name': 'beachball'}, {'frequency': 'c', 'id': 73, 'synset': 'bead.n.01', 'synonyms': ['bead'], 'def': 'a small ball with a hole through the middle used for ornamentation, jewellery, etc.', 'name': 'bead'}, {'frequency': 'r', 'id': 74, 'synset': 'beaker.n.01', 'synonyms': ['beaker'], 'def': 'a flatbottomed jar made of glass or plastic; used for chemistry', 'name': 'beaker'}, {'frequency': 'c', 'id': 75, 'synset': 'bean_curd.n.01', 'synonyms': ['bean_curd', 'tofu'], 'def': 'cheeselike food made of curdled soybean milk', 'name': 'bean_curd'}, {'frequency': 'c', 'id': 76, 'synset': 'beanbag.n.01', 'synonyms': ['beanbag'], 'def': 'a bag filled with dried beans or similar items; used in games or to sit on', 'name': 'beanbag'}, {'frequency': 'f', 'id': 77, 'synset': 'beanie.n.01', 'synonyms': ['beanie', 'beany'], 'def': 'a small skullcap; formerly worn by schoolboys and college freshmen', 'name': 'beanie'}, {'frequency': 'f', 'id': 78, 'synset': 'bear.n.01', 'synonyms': ['bear'], 'def': 'large carnivorous or omnivorous mammals with shaggy coats and claws', 'name': 'bear'}, {'frequency': 'f', 'id': 79, 'synset': 'bed.n.01', 'synonyms': ['bed'], 'def': 'a piece of furniture that provides a place to sleep', 'name': 'bed'}, {'frequency': 'c', 'id': 80, 'synset': 'bedspread.n.01', 'synonyms': ['bedspread', 'bedcover', 'bed_covering', 'counterpane', 'spread'], 'def': 'decorative cover for a bed', 'name': 'bedspread'}, {'frequency': 'f', 'id': 81, 'synset': 'beef.n.01', 'synonyms': ['cow'], 'def': 'cattle that are reared for their meat', 'name': 'cow'}, {'frequency': 'c', 'id': 82, 'synset': 'beef.n.02', 'synonyms': ['beef_(food)', 'boeuf_(food)'], 'def': 'meat from an adult domestic bovine', 'name': 'beef_(food)'}, {'frequency': 'r', 'id': 83, 'synset': 'beeper.n.01', 'synonyms': ['beeper', 'pager'], 'def': 'an device that beeps when the person carrying it is being paged', 'name': 'beeper'}, {'frequency': 'f', 'id': 84, 'synset': 'beer_bottle.n.01', 'synonyms': ['beer_bottle'], 'def': 'a bottle that holds beer', 'name': 'beer_bottle'}, {'frequency': 'c', 'id': 85, 'synset': 'beer_can.n.01', 'synonyms': ['beer_can'], 'def': 'a can that holds beer', 'name': 'beer_can'}, {'frequency': 'r', 'id': 86, 'synset': 'beetle.n.01', 'synonyms': ['beetle'], 'def': 'insect with hard wing covers', 'name': 'beetle'}, {'frequency': 'f', 'id': 87, 'synset': 'bell.n.01', 'synonyms': ['bell'], 'def': 'a hollow device made of metal that makes a ringing sound when struck', 'name': 'bell'}, {'frequency': 'f', 'id': 88, 'synset': 'bell_pepper.n.02', 'synonyms': ['bell_pepper', 'capsicum'], 'def': 'large bell-shaped sweet pepper in green or red or yellow or orange or black varieties', 'name': 'bell_pepper'}, {'frequency': 'f', 'id': 89, 'synset': 'belt.n.02', 'synonyms': ['belt'], 'def': 'a band to tie or buckle around the body (usually at the waist)', 'name': 'belt'}, {'frequency': 'f', 'id': 90, 'synset': 'belt_buckle.n.01', 'synonyms': ['belt_buckle'], 'def': 'the buckle used to fasten a belt', 'name': 'belt_buckle'}, {'frequency': 'f', 'id': 91, 'synset': 'bench.n.01', 'synonyms': ['bench'], 'def': 'a long seat for more than one person', 'name': 'bench'}, {'frequency': 'c', 'id': 92, 'synset': 'beret.n.01', 'synonyms': ['beret'], 'def': 'a cap with no brim or bill; made of soft cloth', 'name': 'beret'}, {'frequency': 'c', 'id': 93, 'synset': 'bib.n.02', 'synonyms': ['bib'], 'def': 'a napkin tied under the chin of a child while eating', 'name': 'bib'}, {'frequency': 'r', 'id': 94, 'synset': 'bible.n.01', 'synonyms': ['Bible'], 'def': 'the sacred writings of the Christian religions', 'name': 'Bible'}, {'frequency': 'f', 'id': 95, 'synset': 'bicycle.n.01', 'synonyms': ['bicycle', 'bike_(bicycle)'], 'def': 'a wheeled vehicle that has two wheels and is moved by foot pedals', 'name': 'bicycle'}, {'frequency': 'f', 'id': 96, 'synset': 'bill.n.09', 'synonyms': ['visor', 'vizor'], 'def': 'a brim that projects to the front to shade the eyes', 'name': 'visor'}, {'frequency': 'c', 'id': 97, 'synset': 'binder.n.03', 'synonyms': ['binder', 'ring-binder'], 'def': 'holds loose papers or magazines', 'name': 'binder'}, {'frequency': 'c', 'id': 98, 'synset': 'binoculars.n.01', 'synonyms': ['binoculars', 'field_glasses', 'opera_glasses'], 'def': 'an optical instrument designed for simultaneous use by both eyes', 'name': 'binoculars'}, {'frequency': 'f', 'id': 99, 'synset': 'bird.n.01', 'synonyms': ['bird'], 'def': 'animal characterized by feathers and wings', 'name': 'bird'}, {'frequency': 'r', 'id': 100, 'synset': 'bird_feeder.n.01', 'synonyms': ['birdfeeder'], 'def': 'an outdoor device that supplies food for wild birds', 'name': 'birdfeeder'}, {'frequency': 'r', 'id': 101, 'synset': 'birdbath.n.01', 'synonyms': ['birdbath'], 'def': 'an ornamental basin (usually in a garden) for birds to bathe in', 'name': 'birdbath'}, {'frequency': 'c', 'id': 102, 'synset': 'birdcage.n.01', 'synonyms': ['birdcage'], 'def': 'a cage in which a bird can be kept', 'name': 'birdcage'}, {'frequency': 'c', 'id': 103, 'synset': 'birdhouse.n.01', 'synonyms': ['birdhouse'], 'def': 'a shelter for birds', 'name': 'birdhouse'}, {'frequency': 'f', 'id': 104, 'synset': 'birthday_cake.n.01', 'synonyms': ['birthday_cake'], 'def': 'decorated cake served at a birthday party', 'name': 'birthday_cake'}, {'frequency': 'r', 'id': 105, 'synset': 'birthday_card.n.01', 'synonyms': ['birthday_card'], 'def': 'a card expressing a birthday greeting', 'name': 'birthday_card'}, {'frequency': 'r', 'id': 106, 'synset': 'biscuit.n.01', 'synonyms': ['biscuit_(bread)'], 'def': 'small round bread leavened with baking-powder or soda', 'name': 'biscuit_(bread)'}, {'frequency': 'r', 'id': 107, 'synset': 'black_flag.n.01', 'synonyms': ['pirate_flag'], 'def': 'a flag usually bearing a white skull and crossbones on a black background', 'name': 'pirate_flag'}, {'frequency': 'c', 'id': 108, 'synset': 'black_sheep.n.02', 'synonyms': ['black_sheep'], 'def': 'sheep with a black coat', 'name': 'black_sheep'}, {'frequency': 'c', 'id': 109, 'synset': 'blackboard.n.01', 'synonyms': ['blackboard', 'chalkboard'], 'def': 'sheet of slate; for writing with chalk', 'name': 'blackboard'}, {'frequency': 'f', 'id': 110, 'synset': 'blanket.n.01', 'synonyms': ['blanket'], 'def': 'bedding that keeps a person warm in bed', 'name': 'blanket'}, {'frequency': 'c', 'id': 111, 'synset': 'blazer.n.01', 'synonyms': ['blazer', 'sport_jacket', 'sport_coat', 'sports_jacket', 'sports_coat'], 'def': 'lightweight jacket; often striped in the colors of a club or school', 'name': 'blazer'}, {'frequency': 'f', 'id': 112, 'synset': 'blender.n.01', 'synonyms': ['blender', 'liquidizer', 'liquidiser'], 'def': 'an electrically powered mixer that mix or chop or liquefy foods', 'name': 'blender'}, {'frequency': 'r', 'id': 113, 'synset': 'blimp.n.02', 'synonyms': ['blimp'], 'def': 'a small nonrigid airship used for observation or as a barrage balloon', 'name': 'blimp'}, {'frequency': 'c', 'id': 114, 'synset': 'blinker.n.01', 'synonyms': ['blinker', 'flasher'], 'def': 'a light that flashes on and off; used as a signal or to send messages', 'name': 'blinker'}, {'frequency': 'c', 'id': 115, 'synset': 'blueberry.n.02', 'synonyms': ['blueberry'], 'def': 'sweet edible dark-blue berries of blueberry plants', 'name': 'blueberry'}, {'frequency': 'r', 'id': 116, 'synset': 'boar.n.02', 'synonyms': ['boar'], 'def': 'an uncastrated male hog', 'name': 'boar'}, {'frequency': 'r', 'id': 117, 'synset': 'board.n.09', 'synonyms': ['gameboard'], 'def': 'a flat portable surface (usually rectangular) designed for board games', 'name': 'gameboard'}, {'frequency': 'f', 'id': 118, 'synset': 'boat.n.01', 'synonyms': ['boat', 'ship_(boat)'], 'def': 'a vessel for travel on water', 'name': 'boat'}, {'frequency': 'c', 'id': 119, 'synset': 'bobbin.n.01', 'synonyms': ['bobbin', 'spool', 'reel'], 'def': 'a thing around which thread/tape/film or other flexible materials can be wound', 'name': 'bobbin'}, {'frequency': 'r', 'id': 120, 'synset': 'bobby_pin.n.01', 'synonyms': ['bobby_pin', 'hairgrip'], 'def': 'a flat wire hairpin used to hold bobbed hair in place', 'name': 'bobby_pin'}, {'frequency': 'c', 'id': 121, 'synset': 'boiled_egg.n.01', 'synonyms': ['boiled_egg', 'coddled_egg'], 'def': 'egg cooked briefly in the shell in gently boiling water', 'name': 'boiled_egg'}, {'frequency': 'r', 'id': 122, 'synset': 'bolo_tie.n.01', 'synonyms': ['bolo_tie', 'bolo', 'bola_tie', 'bola'], 'def': 'a cord fastened around the neck with an ornamental clasp and worn as a necktie', 'name': 'bolo_tie'}, {'frequency': 'c', 'id': 123, 'synset': 'bolt.n.03', 'synonyms': ['deadbolt'], 'def': 'the part of a lock that is engaged or withdrawn with a key', 'name': 'deadbolt'}, {'frequency': 'f', 'id': 124, 'synset': 'bolt.n.06', 'synonyms': ['bolt'], 'def': 'a screw that screws into a nut to form a fastener', 'name': 'bolt'}, {'frequency': 'r', 'id': 125, 'synset': 'bonnet.n.01', 'synonyms': ['bonnet'], 'def': 'a hat tied under the chin', 'name': 'bonnet'}, {'frequency': 'f', 'id': 126, 'synset': 'book.n.01', 'synonyms': ['book'], 'def': 'a written work or composition that has been published', 'name': 'book'}, {'frequency': 'r', 'id': 127, 'synset': 'book_bag.n.01', 'synonyms': ['book_bag'], 'def': 'a bag in which students carry their books', 'name': 'book_bag'}, {'frequency': 'c', 'id': 128, 'synset': 'bookcase.n.01', 'synonyms': ['bookcase'], 'def': 'a piece of furniture with shelves for storing books', 'name': 'bookcase'}, {'frequency': 'c', 'id': 129, 'synset': 'booklet.n.01', 'synonyms': ['booklet', 'brochure', 'leaflet', 'pamphlet'], 'def': 'a small book usually having a paper cover', 'name': 'booklet'}, {'frequency': 'r', 'id': 130, 'synset': 'bookmark.n.01', 'synonyms': ['bookmark', 'bookmarker'], 'def': 'a marker (a piece of paper or ribbon) placed between the pages of a book', 'name': 'bookmark'}, {'frequency': 'r', 'id': 131, 'synset': 'boom.n.04', 'synonyms': ['boom_microphone', 'microphone_boom'], 'def': 'a pole carrying an overhead microphone projected over a film or tv set', 'name': 'boom_microphone'}, {'frequency': 'f', 'id': 132, 'synset': 'boot.n.01', 'synonyms': ['boot'], 'def': 'footwear that covers the whole foot and lower leg', 'name': 'boot'}, {'frequency': 'f', 'id': 133, 'synset': 'bottle.n.01', 'synonyms': ['bottle'], 'def': 'a glass or plastic vessel used for storing drinks or other liquids', 'name': 'bottle'}, {'frequency': 'c', 'id': 134, 'synset': 'bottle_opener.n.01', 'synonyms': ['bottle_opener'], 'def': 'an opener for removing caps or corks from bottles', 'name': 'bottle_opener'}, {'frequency': 'c', 'id': 135, 'synset': 'bouquet.n.01', 'synonyms': ['bouquet'], 'def': 'an arrangement of flowers that is usually given as a present', 'name': 'bouquet'}, {'frequency': 'r', 'id': 136, 'synset': 'bow.n.04', 'synonyms': ['bow_(weapon)'], 'def': 'a weapon for shooting arrows', 'name': 'bow_(weapon)'}, {'frequency': 'f', 'id': 137, 'synset': 'bow.n.08', 'synonyms': ['bow_(decorative_ribbons)'], 'def': 'a decorative interlacing of ribbons', 'name': 'bow_(decorative_ribbons)'}, {'frequency': 'f', 'id': 138, 'synset': 'bow_tie.n.01', 'synonyms': ['bow-tie', 'bowtie'], 'def': "a man's tie that ties in a bow", 'name': 'bow-tie'}, {'frequency': 'f', 'id': 139, 'synset': 'bowl.n.03', 'synonyms': ['bowl'], 'def': 'a dish that is round and open at the top for serving foods', 'name': 'bowl'}, {'frequency': 'r', 'id': 140, 'synset': 'bowl.n.08', 'synonyms': ['pipe_bowl'], 'def': 'a small round container that is open at the top for holding tobacco', 'name': 'pipe_bowl'}, {'frequency': 'c', 'id': 141, 'synset': 'bowler_hat.n.01', 'synonyms': ['bowler_hat', 'bowler', 'derby_hat', 'derby', 'plug_hat'], 'def': 'a felt hat that is round and hard with a narrow brim', 'name': 'bowler_hat'}, {'frequency': 'r', 'id': 142, 'synset': 'bowling_ball.n.01', 'synonyms': ['bowling_ball'], 'def': 'a large ball with finger holes used in the sport of bowling', 'name': 'bowling_ball'}, {'frequency': 'r', 'id': 143, 'synset': 'bowling_pin.n.01', 'synonyms': ['bowling_pin'], 'def': 'a club-shaped wooden object used in bowling', 'name': 'bowling_pin'}, {'frequency': 'r', 'id': 144, 'synset': 'boxing_glove.n.01', 'synonyms': ['boxing_glove'], 'def': 'large glove coverings the fists of a fighter worn for the sport of boxing', 'name': 'boxing_glove'}, {'frequency': 'c', 'id': 145, 'synset': 'brace.n.06', 'synonyms': ['suspenders'], 'def': 'elastic straps that hold trousers up (usually used in the plural)', 'name': 'suspenders'}, {'frequency': 'f', 'id': 146, 'synset': 'bracelet.n.02', 'synonyms': ['bracelet', 'bangle'], 'def': 'jewelry worn around the wrist for decoration', 'name': 'bracelet'}, {'frequency': 'r', 'id': 147, 'synset': 'brass.n.07', 'synonyms': ['brass_plaque'], 'def': 'a memorial made of brass', 'name': 'brass_plaque'}, {'frequency': 'c', 'id': 148, 'synset': 'brassiere.n.01', 'synonyms': ['brassiere', 'bra', 'bandeau'], 'def': 'an undergarment worn by women to support their breasts', 'name': 'brassiere'}, {'frequency': 'c', 'id': 149, 'synset': 'bread-bin.n.01', 'synonyms': ['bread-bin', 'breadbox'], 'def': 'a container used to keep bread or cake in', 'name': 'bread-bin'}, {'frequency': 'r', 'id': 150, 'synset': 'breechcloth.n.01', 'synonyms': ['breechcloth', 'breechclout', 'loincloth'], 'def': 'a garment that provides covering for the loins', 'name': 'breechcloth'}, {'frequency': 'c', 'id': 151, 'synset': 'bridal_gown.n.01', 'synonyms': ['bridal_gown', 'wedding_gown', 'wedding_dress'], 'def': 'a gown worn by the bride at a wedding', 'name': 'bridal_gown'}, {'frequency': 'c', 'id': 152, 'synset': 'briefcase.n.01', 'synonyms': ['briefcase'], 'def': 'a case with a handle; for carrying papers or files or books', 'name': 'briefcase'}, {'frequency': 'c', 'id': 153, 'synset': 'bristle_brush.n.01', 'synonyms': ['bristle_brush'], 'def': 'a brush that is made with the short stiff hairs of an animal or plant', 'name': 'bristle_brush'}, {'frequency': 'f', 'id': 154, 'synset': 'broccoli.n.01', 'synonyms': ['broccoli'], 'def': 'plant with dense clusters of tight green flower buds', 'name': 'broccoli'}, {'frequency': 'r', 'id': 155, 'synset': 'brooch.n.01', 'synonyms': ['broach'], 'def': 'a decorative pin worn by women', 'name': 'broach'}, {'frequency': 'c', 'id': 156, 'synset': 'broom.n.01', 'synonyms': ['broom'], 'def': 'bundle of straws or twigs attached to a long handle; used for cleaning', 'name': 'broom'}, {'frequency': 'c', 'id': 157, 'synset': 'brownie.n.03', 'synonyms': ['brownie'], 'def': 'square or bar of very rich chocolate cake usually with nuts', 'name': 'brownie'}, {'frequency': 'c', 'id': 158, 'synset': 'brussels_sprouts.n.01', 'synonyms': ['brussels_sprouts'], 'def': 'the small edible cabbage-like buds growing along a stalk', 'name': 'brussels_sprouts'}, {'frequency': 'r', 'id': 159, 'synset': 'bubble_gum.n.01', 'synonyms': ['bubble_gum'], 'def': 'a kind of chewing gum that can be blown into bubbles', 'name': 'bubble_gum'}, {'frequency': 'f', 'id': 160, 'synset': 'bucket.n.01', 'synonyms': ['bucket', 'pail'], 'def': 'a roughly cylindrical vessel that is open at the top', 'name': 'bucket'}, {'frequency': 'r', 'id': 161, 'synset': 'buggy.n.01', 'synonyms': ['horse_buggy'], 'def': 'a small lightweight carriage; drawn by a single horse', 'name': 'horse_buggy'}, {'frequency': 'c', 'id': 162, 'synset': 'bull.n.11', 'synonyms': ['bull'], 'def': 'mature male cow', 'name': 'bull'}, {'frequency': 'r', 'id': 163, 'synset': 'bulldog.n.01', 'synonyms': ['bulldog'], 'def': 'a thickset short-haired dog with a large head and strong undershot lower jaw', 'name': 'bulldog'}, {'frequency': 'r', 'id': 164, 'synset': 'bulldozer.n.01', 'synonyms': ['bulldozer', 'dozer'], 'def': 'large powerful tractor; a large blade in front flattens areas of ground', 'name': 'bulldozer'}, {'frequency': 'c', 'id': 165, 'synset': 'bullet_train.n.01', 'synonyms': ['bullet_train'], 'def': 'a high-speed passenger train', 'name': 'bullet_train'}, {'frequency': 'c', 'id': 166, 'synset': 'bulletin_board.n.02', 'synonyms': ['bulletin_board', 'notice_board'], 'def': 'a board that hangs on a wall; displays announcements', 'name': 'bulletin_board'}, {'frequency': 'r', 'id': 167, 'synset': 'bulletproof_vest.n.01', 'synonyms': ['bulletproof_vest'], 'def': 'a vest capable of resisting the impact of a bullet', 'name': 'bulletproof_vest'}, {'frequency': 'c', 'id': 168, 'synset': 'bullhorn.n.01', 'synonyms': ['bullhorn', 'megaphone'], 'def': 'a portable loudspeaker with built-in microphone and amplifier', 'name': 'bullhorn'}, {'frequency': 'r', 'id': 169, 'synset': 'bully_beef.n.01', 'synonyms': ['corned_beef', 'corn_beef'], 'def': 'beef cured or pickled in brine', 'name': 'corned_beef'}, {'frequency': 'f', 'id': 170, 'synset': 'bun.n.01', 'synonyms': ['bun', 'roll'], 'def': 'small rounded bread either plain or sweet', 'name': 'bun'}, {'frequency': 'c', 'id': 171, 'synset': 'bunk_bed.n.01', 'synonyms': ['bunk_bed'], 'def': 'beds built one above the other', 'name': 'bunk_bed'}, {'frequency': 'f', 'id': 172, 'synset': 'buoy.n.01', 'synonyms': ['buoy'], 'def': 'a float attached by rope to the seabed to mark channels in a harbor or underwater hazards', 'name': 'buoy'}, {'frequency': 'r', 'id': 173, 'synset': 'burrito.n.01', 'synonyms': ['burrito'], 'def': 'a flour tortilla folded around a filling', 'name': 'burrito'}, {'frequency': 'f', 'id': 174, 'synset': 'bus.n.01', 'synonyms': ['bus_(vehicle)', 'autobus', 'charabanc', 'double-decker', 'motorbus', 'motorcoach'], 'def': 'a vehicle carrying many passengers; used for public transport', 'name': 'bus_(vehicle)'}, {'frequency': 'c', 'id': 175, 'synset': 'business_card.n.01', 'synonyms': ['business_card'], 'def': "a card on which are printed the person's name and business affiliation", 'name': 'business_card'}, {'frequency': 'c', 'id': 176, 'synset': 'butcher_knife.n.01', 'synonyms': ['butcher_knife'], 'def': 'a large sharp knife for cutting or trimming meat', 'name': 'butcher_knife'}, {'frequency': 'c', 'id': 177, 'synset': 'butter.n.01', 'synonyms': ['butter'], 'def': 'an edible emulsion of fat globules made by churning milk or cream; for cooking and table use', 'name': 'butter'}, {'frequency': 'c', 'id': 178, 'synset': 'butterfly.n.01', 'synonyms': ['butterfly'], 'def': 'insect typically having a slender body with knobbed antennae and broad colorful wings', 'name': 'butterfly'}, {'frequency': 'f', 'id': 179, 'synset': 'button.n.01', 'synonyms': ['button'], 'def': 'a round fastener sewn to shirts and coats etc to fit through buttonholes', 'name': 'button'}, {'frequency': 'f', 'id': 180, 'synset': 'cab.n.03', 'synonyms': ['cab_(taxi)', 'taxi', 'taxicab'], 'def': 'a car that takes passengers where they want to go in exchange for money', 'name': 'cab_(taxi)'}, {'frequency': 'r', 'id': 181, 'synset': 'cabana.n.01', 'synonyms': ['cabana'], 'def': 'a small tent used as a dressing room beside the sea or a swimming pool', 'name': 'cabana'}, {'frequency': 'r', 'id': 182, 'synset': 'cabin_car.n.01', 'synonyms': ['cabin_car', 'caboose'], 'def': 'a car on a freight train for use of the train crew; usually the last car on the train', 'name': 'cabin_car'}, {'frequency': 'f', 'id': 183, 'synset': 'cabinet.n.01', 'synonyms': ['cabinet'], 'def': 'a piece of furniture resembling a cupboard with doors and shelves and drawers', 'name': 'cabinet'}, {'frequency': 'r', 'id': 184, 'synset': 'cabinet.n.03', 'synonyms': ['locker', 'storage_locker'], 'def': 'a storage compartment for clothes and valuables; usually it has a lock', 'name': 'locker'}, {'frequency': 'f', 'id': 185, 'synset': 'cake.n.03', 'synonyms': ['cake'], 'def': 'baked goods made from or based on a mixture of flour, sugar, eggs, and fat', 'name': 'cake'}, {'frequency': 'c', 'id': 186, 'synset': 'calculator.n.02', 'synonyms': ['calculator'], 'def': 'a small machine that is used for mathematical calculations', 'name': 'calculator'}, {'frequency': 'f', 'id': 187, 'synset': 'calendar.n.02', 'synonyms': ['calendar'], 'def': 'a list or register of events (appointments/social events/court cases, etc)', 'name': 'calendar'}, {'frequency': 'c', 'id': 188, 'synset': 'calf.n.01', 'synonyms': ['calf'], 'def': 'young of domestic cattle', 'name': 'calf'}, {'frequency': 'c', 'id': 189, 'synset': 'camcorder.n.01', 'synonyms': ['camcorder'], 'def': 'a portable television camera and videocassette recorder', 'name': 'camcorder'}, {'frequency': 'c', 'id': 190, 'synset': 'camel.n.01', 'synonyms': ['camel'], 'def': 'cud-chewing mammal used as a draft or saddle animal in desert regions', 'name': 'camel'}, {'frequency': 'f', 'id': 191, 'synset': 'camera.n.01', 'synonyms': ['camera'], 'def': 'equipment for taking photographs', 'name': 'camera'}, {'frequency': 'c', 'id': 192, 'synset': 'camera_lens.n.01', 'synonyms': ['camera_lens'], 'def': 'a lens that focuses the image in a camera', 'name': 'camera_lens'}, {'frequency': 'c', 'id': 193, 'synset': 'camper.n.02', 'synonyms': ['camper_(vehicle)', 'camping_bus', 'motor_home'], 'def': 'a recreational vehicle equipped for camping out while traveling', 'name': 'camper_(vehicle)'}, {'frequency': 'f', 'id': 194, 'synset': 'can.n.01', 'synonyms': ['can', 'tin_can'], 'def': 'airtight sealed metal container for food or drink or paint etc.', 'name': 'can'}, {'frequency': 'c', 'id': 195, 'synset': 'can_opener.n.01', 'synonyms': ['can_opener', 'tin_opener'], 'def': 'a device for cutting cans open', 'name': 'can_opener'}, {'frequency': 'r', 'id': 196, 'synset': 'candelabrum.n.01', 'synonyms': ['candelabrum', 'candelabra'], 'def': 'branched candlestick; ornamental; has several lights', 'name': 'candelabrum'}, {'frequency': 'f', 'id': 197, 'synset': 'candle.n.01', 'synonyms': ['candle', 'candlestick'], 'def': 'stick of wax with a wick in the middle', 'name': 'candle'}, {'frequency': 'f', 'id': 198, 'synset': 'candlestick.n.01', 'synonyms': ['candle_holder'], 'def': 'a holder with sockets for candles', 'name': 'candle_holder'}, {'frequency': 'r', 'id': 199, 'synset': 'candy_bar.n.01', 'synonyms': ['candy_bar'], 'def': 'a candy shaped as a bar', 'name': 'candy_bar'}, {'frequency': 'c', 'id': 200, 'synset': 'candy_cane.n.01', 'synonyms': ['candy_cane'], 'def': 'a hard candy in the shape of a rod (usually with stripes)', 'name': 'candy_cane'}, {'frequency': 'c', 'id': 201, 'synset': 'cane.n.01', 'synonyms': ['walking_cane'], 'def': 'a stick that people can lean on to help them walk', 'name': 'walking_cane'}, {'frequency': 'c', 'id': 202, 'synset': 'canister.n.02', 'synonyms': ['canister', 'cannister'], 'def': 'metal container for storing dry foods such as tea or flour', 'name': 'canister'}, {'frequency': 'r', 'id': 203, 'synset': 'cannon.n.02', 'synonyms': ['cannon'], 'def': 'heavy gun fired from a tank', 'name': 'cannon'}, {'frequency': 'c', 'id': 204, 'synset': 'canoe.n.01', 'synonyms': ['canoe'], 'def': 'small and light boat; pointed at both ends; propelled with a paddle', 'name': 'canoe'}, {'frequency': 'r', 'id': 205, 'synset': 'cantaloup.n.02', 'synonyms': ['cantaloup', 'cantaloupe'], 'def': 'the fruit of a cantaloup vine; small to medium-sized melon with yellowish flesh', 'name': 'cantaloup'}, {'frequency': 'r', 'id': 206, 'synset': 'canteen.n.01', 'synonyms': ['canteen'], 'def': 'a flask for carrying water; used by soldiers or travelers', 'name': 'canteen'}, {'frequency': 'c', 'id': 207, 'synset': 'cap.n.01', 'synonyms': ['cap_(headwear)'], 'def': 'a tight-fitting headwear', 'name': 'cap_(headwear)'}, {'frequency': 'f', 'id': 208, 'synset': 'cap.n.02', 'synonyms': ['bottle_cap', 'cap_(container_lid)'], 'def': 'a top (as for a bottle)', 'name': 'bottle_cap'}, {'frequency': 'r', 'id': 209, 'synset': 'cape.n.02', 'synonyms': ['cape'], 'def': 'a sleeveless garment like a cloak but shorter', 'name': 'cape'}, {'frequency': 'c', 'id': 210, 'synset': 'cappuccino.n.01', 'synonyms': ['cappuccino', 'coffee_cappuccino'], 'def': 'equal parts of espresso and steamed milk', 'name': 'cappuccino'}, {'frequency': 'f', 'id': 211, 'synset': 'car.n.01', 'synonyms': ['car_(automobile)', 'auto_(automobile)', 'automobile'], 'def': 'a motor vehicle with four wheels', 'name': 'car_(automobile)'}, {'frequency': 'f', 'id': 212, 'synset': 'car.n.02', 'synonyms': ['railcar_(part_of_a_train)', 'railway_car_(part_of_a_train)', 'railroad_car_(part_of_a_train)'], 'def': 'a wheeled vehicle adapted to the rails of railroad', 'name': 'railcar_(part_of_a_train)'}, {'frequency': 'r', 'id': 213, 'synset': 'car.n.04', 'synonyms': ['elevator_car'], 'def': 'where passengers ride up and down', 'name': 'elevator_car'}, {'frequency': 'r', 'id': 214, 'synset': 'car_battery.n.01', 'synonyms': ['car_battery', 'automobile_battery'], 'def': 'a battery in a motor vehicle', 'name': 'car_battery'}, {'frequency': 'c', 'id': 215, 'synset': 'card.n.02', 'synonyms': ['identity_card'], 'def': 'a card certifying the identity of the bearer', 'name': 'identity_card'}, {'frequency': 'c', 'id': 216, 'synset': 'card.n.03', 'synonyms': ['card'], 'def': 'a rectangular piece of paper used to send messages (e.g. greetings or pictures)', 'name': 'card'}, {'frequency': 'r', 'id': 217, 'synset': 'cardigan.n.01', 'synonyms': ['cardigan'], 'def': 'knitted jacket that is fastened up the front with buttons or a zipper', 'name': 'cardigan'}, {'frequency': 'r', 'id': 218, 'synset': 'cargo_ship.n.01', 'synonyms': ['cargo_ship', 'cargo_vessel'], 'def': 'a ship designed to carry cargo', 'name': 'cargo_ship'}, {'frequency': 'r', 'id': 219, 'synset': 'carnation.n.01', 'synonyms': ['carnation'], 'def': 'plant with pink to purple-red spice-scented usually double flowers', 'name': 'carnation'}, {'frequency': 'c', 'id': 220, 'synset': 'carriage.n.02', 'synonyms': ['horse_carriage'], 'def': 'a vehicle with wheels drawn by one or more horses', 'name': 'horse_carriage'}, {'frequency': 'f', 'id': 221, 'synset': 'carrot.n.01', 'synonyms': ['carrot'], 'def': 'deep orange edible root of the cultivated carrot plant', 'name': 'carrot'}, {'frequency': 'c', 'id': 222, 'synset': 'carryall.n.01', 'synonyms': ['tote_bag'], 'def': 'a capacious bag or basket', 'name': 'tote_bag'}, {'frequency': 'c', 'id': 223, 'synset': 'cart.n.01', 'synonyms': ['cart'], 'def': 'a heavy open wagon usually having two wheels and drawn by an animal', 'name': 'cart'}, {'frequency': 'c', 'id': 224, 'synset': 'carton.n.02', 'synonyms': ['carton'], 'def': 'a box made of cardboard; opens by flaps on top', 'name': 'carton'}, {'frequency': 'c', 'id': 225, 'synset': 'cash_register.n.01', 'synonyms': ['cash_register', 'register_(for_cash_transactions)'], 'def': 'a cashbox with an adding machine to register transactions', 'name': 'cash_register'}, {'frequency': 'r', 'id': 226, 'synset': 'casserole.n.01', 'synonyms': ['casserole'], 'def': 'food cooked and served in a casserole', 'name': 'casserole'}, {'frequency': 'r', 'id': 227, 'synset': 'cassette.n.01', 'synonyms': ['cassette'], 'def': 'a container that holds a magnetic tape used for recording or playing sound or video', 'name': 'cassette'}, {'frequency': 'c', 'id': 228, 'synset': 'cast.n.05', 'synonyms': ['cast', 'plaster_cast', 'plaster_bandage'], 'def': 'bandage consisting of a firm covering that immobilizes broken bones while they heal', 'name': 'cast'}, {'frequency': 'f', 'id': 229, 'synset': 'cat.n.01', 'synonyms': ['cat'], 'def': 'a domestic house cat', 'name': 'cat'}, {'frequency': 'c', 'id': 230, 'synset': 'cauliflower.n.02', 'synonyms': ['cauliflower'], 'def': 'edible compact head of white undeveloped flowers', 'name': 'cauliflower'}, {'frequency': 'r', 'id': 231, 'synset': 'caviar.n.01', 'synonyms': ['caviar', 'caviare'], 'def': "salted roe of sturgeon or other large fish; usually served as an hors d'oeuvre", 'name': 'caviar'}, {'frequency': 'c', 'id': 232, 'synset': 'cayenne.n.02', 'synonyms': ['cayenne_(spice)', 'cayenne_pepper_(spice)', 'red_pepper_(spice)'], 'def': 'ground pods and seeds of pungent red peppers of the genus Capsicum', 'name': 'cayenne_(spice)'}, {'frequency': 'c', 'id': 233, 'synset': 'cd_player.n.01', 'synonyms': ['CD_player'], 'def': 'electronic equipment for playing compact discs (CDs)', 'name': 'CD_player'}, {'frequency': 'c', 'id': 234, 'synset': 'celery.n.01', 'synonyms': ['celery'], 'def': 'widely cultivated herb with aromatic leaf stalks that are eaten raw or cooked', 'name': 'celery'}, {'frequency': 'f', 'id': 235, 'synset': 'cellular_telephone.n.01', 'synonyms': ['cellular_telephone', 'cellular_phone', 'cellphone', 'mobile_phone', 'smart_phone'], 'def': 'a hand-held mobile telephone', 'name': 'cellular_telephone'}, {'frequency': 'r', 'id': 236, 'synset': 'chain_mail.n.01', 'synonyms': ['chain_mail', 'ring_mail', 'chain_armor', 'chain_armour', 'ring_armor', 'ring_armour'], 'def': '(Middle Ages) flexible armor made of interlinked metal rings', 'name': 'chain_mail'}, {'frequency': 'f', 'id': 237, 'synset': 'chair.n.01', 'synonyms': ['chair'], 'def': 'a seat for one person, with a support for the back', 'name': 'chair'}, {'frequency': 'r', 'id': 238, 'synset': 'chaise_longue.n.01', 'synonyms': ['chaise_longue', 'chaise', 'daybed'], 'def': 'a long chair; for reclining', 'name': 'chaise_longue'}, {'frequency': 'r', 'id': 239, 'synset': 'champagne.n.01', 'synonyms': ['champagne'], 'def': 'a white sparkling wine produced in Champagne or resembling that produced there', 'name': 'champagne'}, {'frequency': 'f', 'id': 240, 'synset': 'chandelier.n.01', 'synonyms': ['chandelier'], 'def': 'branched lighting fixture; often ornate; hangs from the ceiling', 'name': 'chandelier'}, {'frequency': 'r', 'id': 241, 'synset': 'chap.n.04', 'synonyms': ['chap'], 'def': 'leather leggings without a seat; worn over trousers by cowboys to protect their legs', 'name': 'chap'}, {'frequency': 'r', 'id': 242, 'synset': 'checkbook.n.01', 'synonyms': ['checkbook', 'chequebook'], 'def': 'a book issued to holders of checking accounts', 'name': 'checkbook'}, {'frequency': 'r', 'id': 243, 'synset': 'checkerboard.n.01', 'synonyms': ['checkerboard'], 'def': 'a board having 64 squares of two alternating colors', 'name': 'checkerboard'}, {'frequency': 'c', 'id': 244, 'synset': 'cherry.n.03', 'synonyms': ['cherry'], 'def': 'a red fruit with a single hard stone', 'name': 'cherry'}, {'frequency': 'r', 'id': 245, 'synset': 'chessboard.n.01', 'synonyms': ['chessboard'], 'def': 'a checkerboard used to play chess', 'name': 'chessboard'}, {'frequency': 'r', 'id': 246, 'synset': 'chest_of_drawers.n.01', 'synonyms': ['chest_of_drawers_(furniture)', 'bureau_(furniture)', 'chest_(furniture)'], 'def': 'furniture with drawers for keeping clothes', 'name': 'chest_of_drawers_(furniture)'}, {'frequency': 'c', 'id': 247, 'synset': 'chicken.n.02', 'synonyms': ['chicken_(animal)'], 'def': 'a domestic fowl bred for flesh or eggs', 'name': 'chicken_(animal)'}, {'frequency': 'c', 'id': 248, 'synset': 'chicken_wire.n.01', 'synonyms': ['chicken_wire'], 'def': 'a galvanized wire network with a hexagonal mesh; used to build fences', 'name': 'chicken_wire'}, {'frequency': 'r', 'id': 249, 'synset': 'chickpea.n.01', 'synonyms': ['chickpea', 'garbanzo'], 'def': 'the seed of the chickpea plant; usually dried', 'name': 'chickpea'}, {'frequency': 'r', 'id': 250, 'synset': 'chihuahua.n.03', 'synonyms': ['Chihuahua'], 'def': 'an old breed of tiny short-haired dog with protruding eyes from Mexico', 'name': 'Chihuahua'}, {'frequency': 'r', 'id': 251, 'synset': 'chili.n.02', 'synonyms': ['chili_(vegetable)', 'chili_pepper_(vegetable)', 'chilli_(vegetable)', 'chilly_(vegetable)', 'chile_(vegetable)'], 'def': 'very hot and finely tapering pepper of special pungency', 'name': 'chili_(vegetable)'}, {'frequency': 'r', 'id': 252, 'synset': 'chime.n.01', 'synonyms': ['chime', 'gong'], 'def': 'an instrument consisting of a set of bells that are struck with a hammer', 'name': 'chime'}, {'frequency': 'r', 'id': 253, 'synset': 'chinaware.n.01', 'synonyms': ['chinaware'], 'def': 'dishware made of high quality porcelain', 'name': 'chinaware'}, {'frequency': 'c', 'id': 254, 'synset': 'chip.n.04', 'synonyms': ['crisp_(potato_chip)', 'potato_chip'], 'def': 'a thin crisp slice of potato fried in deep fat', 'name': 'crisp_(potato_chip)'}, {'frequency': 'r', 'id': 255, 'synset': 'chip.n.06', 'synonyms': ['poker_chip'], 'def': 'a small disk-shaped counter used to represent money when gambling', 'name': 'poker_chip'}, {'frequency': 'c', 'id': 256, 'synset': 'chocolate_bar.n.01', 'synonyms': ['chocolate_bar'], 'def': 'a bar of chocolate candy', 'name': 'chocolate_bar'}, {'frequency': 'c', 'id': 257, 'synset': 'chocolate_cake.n.01', 'synonyms': ['chocolate_cake'], 'def': 'cake containing chocolate', 'name': 'chocolate_cake'}, {'frequency': 'r', 'id': 258, 'synset': 'chocolate_milk.n.01', 'synonyms': ['chocolate_milk'], 'def': 'milk flavored with chocolate syrup', 'name': 'chocolate_milk'}, {'frequency': 'r', 'id': 259, 'synset': 'chocolate_mousse.n.01', 'synonyms': ['chocolate_mousse'], 'def': 'dessert mousse made with chocolate', 'name': 'chocolate_mousse'}, {'frequency': 'f', 'id': 260, 'synset': 'choker.n.03', 'synonyms': ['choker', 'collar', 'neckband'], 'def': 'necklace that fits tightly around the neck', 'name': 'choker'}, {'frequency': 'f', 'id': 261, 'synset': 'chopping_board.n.01', 'synonyms': ['chopping_board', 'cutting_board', 'chopping_block'], 'def': 'a wooden board where meats or vegetables can be cut', 'name': 'chopping_board'}, {'frequency': 'c', 'id': 262, 'synset': 'chopstick.n.01', 'synonyms': ['chopstick'], 'def': 'one of a pair of slender sticks used as oriental tableware to eat food with', 'name': 'chopstick'}, {'frequency': 'f', 'id': 263, 'synset': 'christmas_tree.n.05', 'synonyms': ['Christmas_tree'], 'def': 'an ornamented evergreen used as a Christmas decoration', 'name': 'Christmas_tree'}, {'frequency': 'c', 'id': 264, 'synset': 'chute.n.02', 'synonyms': ['slide'], 'def': 'sloping channel through which things can descend', 'name': 'slide'}, {'frequency': 'r', 'id': 265, 'synset': 'cider.n.01', 'synonyms': ['cider', 'cyder'], 'def': 'a beverage made from juice pressed from apples', 'name': 'cider'}, {'frequency': 'r', 'id': 266, 'synset': 'cigar_box.n.01', 'synonyms': ['cigar_box'], 'def': 'a box for holding cigars', 'name': 'cigar_box'}, {'frequency': 'c', 'id': 267, 'synset': 'cigarette.n.01', 'synonyms': ['cigarette'], 'def': 'finely ground tobacco wrapped in paper; for smoking', 'name': 'cigarette'}, {'frequency': 'c', 'id': 268, 'synset': 'cigarette_case.n.01', 'synonyms': ['cigarette_case', 'cigarette_pack'], 'def': 'a small flat case for holding cigarettes', 'name': 'cigarette_case'}, {'frequency': 'f', 'id': 269, 'synset': 'cistern.n.02', 'synonyms': ['cistern', 'water_tank'], 'def': 'a tank that holds the water used to flush a toilet', 'name': 'cistern'}, {'frequency': 'r', 'id': 270, 'synset': 'clarinet.n.01', 'synonyms': ['clarinet'], 'def': 'a single-reed instrument with a straight tube', 'name': 'clarinet'}, {'frequency': 'r', 'id': 271, 'synset': 'clasp.n.01', 'synonyms': ['clasp'], 'def': 'a fastener (as a buckle or hook) that is used to hold two things together', 'name': 'clasp'}, {'frequency': 'c', 'id': 272, 'synset': 'cleansing_agent.n.01', 'synonyms': ['cleansing_agent', 'cleanser', 'cleaner'], 'def': 'a preparation used in cleaning something', 'name': 'cleansing_agent'}, {'frequency': 'r', 'id': 273, 'synset': 'clementine.n.01', 'synonyms': ['clementine'], 'def': 'a variety of mandarin orange', 'name': 'clementine'}, {'frequency': 'c', 'id': 274, 'synset': 'clip.n.03', 'synonyms': ['clip'], 'def': 'any of various small fasteners used to hold loose articles together', 'name': 'clip'}, {'frequency': 'c', 'id': 275, 'synset': 'clipboard.n.01', 'synonyms': ['clipboard'], 'def': 'a small writing board with a clip at the top for holding papers', 'name': 'clipboard'}, {'frequency': 'f', 'id': 276, 'synset': 'clock.n.01', 'synonyms': ['clock', 'timepiece', 'timekeeper'], 'def': 'a timepiece that shows the time of day', 'name': 'clock'}, {'frequency': 'f', 'id': 277, 'synset': 'clock_tower.n.01', 'synonyms': ['clock_tower'], 'def': 'a tower with a large clock visible high up on an outside face', 'name': 'clock_tower'}, {'frequency': 'c', 'id': 278, 'synset': 'clothes_hamper.n.01', 'synonyms': ['clothes_hamper', 'laundry_basket', 'clothes_basket'], 'def': 'a hamper that holds dirty clothes to be washed or wet clothes to be dried', 'name': 'clothes_hamper'}, {'frequency': 'c', 'id': 279, 'synset': 'clothespin.n.01', 'synonyms': ['clothespin', 'clothes_peg'], 'def': 'wood or plastic fastener; for holding clothes on a clothesline', 'name': 'clothespin'}, {'frequency': 'r', 'id': 280, 'synset': 'clutch_bag.n.01', 'synonyms': ['clutch_bag'], 'def': "a woman's strapless purse that is carried in the hand", 'name': 'clutch_bag'}, {'frequency': 'f', 'id': 281, 'synset': 'coaster.n.03', 'synonyms': ['coaster'], 'def': 'a covering (plate or mat) that protects the surface of a table', 'name': 'coaster'}, {'frequency': 'f', 'id': 282, 'synset': 'coat.n.01', 'synonyms': ['coat'], 'def': 'an outer garment that has sleeves and covers the body from shoulder down', 'name': 'coat'}, {'frequency': 'c', 'id': 283, 'synset': 'coat_hanger.n.01', 'synonyms': ['coat_hanger', 'clothes_hanger', 'dress_hanger'], 'def': "a hanger that is shaped like a person's shoulders", 'name': 'coat_hanger'}, {'frequency': 'r', 'id': 284, 'synset': 'coatrack.n.01', 'synonyms': ['coatrack', 'hatrack'], 'def': 'a rack with hooks for temporarily holding coats and hats', 'name': 'coatrack'}, {'frequency': 'c', 'id': 285, 'synset': 'cock.n.04', 'synonyms': ['cock', 'rooster'], 'def': 'adult male chicken', 'name': 'cock'}, {'frequency': 'c', 'id': 286, 'synset': 'coconut.n.02', 'synonyms': ['coconut', 'cocoanut'], 'def': 'large hard-shelled brown oval nut with a fibrous husk', 'name': 'coconut'}, {'frequency': 'r', 'id': 287, 'synset': 'coffee_filter.n.01', 'synonyms': ['coffee_filter'], 'def': 'filter (usually of paper) that passes the coffee and retains the coffee grounds', 'name': 'coffee_filter'}, {'frequency': 'f', 'id': 288, 'synset': 'coffee_maker.n.01', 'synonyms': ['coffee_maker', 'coffee_machine'], 'def': 'a kitchen appliance for brewing coffee automatically', 'name': 'coffee_maker'}, {'frequency': 'f', 'id': 289, 'synset': 'coffee_table.n.01', 'synonyms': ['coffee_table', 'cocktail_table'], 'def': 'low table where magazines can be placed and coffee or cocktails are served', 'name': 'coffee_table'}, {'frequency': 'c', 'id': 290, 'synset': 'coffeepot.n.01', 'synonyms': ['coffeepot'], 'def': 'tall pot in which coffee is brewed', 'name': 'coffeepot'}, {'frequency': 'r', 'id': 291, 'synset': 'coil.n.05', 'synonyms': ['coil'], 'def': 'tubing that is wound in a spiral', 'name': 'coil'}, {'frequency': 'c', 'id': 292, 'synset': 'coin.n.01', 'synonyms': ['coin'], 'def': 'a flat metal piece (usually a disc) used as money', 'name': 'coin'}, {'frequency': 'r', 'id': 293, 'synset': 'colander.n.01', 'synonyms': ['colander', 'cullender'], 'def': 'bowl-shaped strainer; used to wash or drain foods', 'name': 'colander'}, {'frequency': 'c', 'id': 294, 'synset': 'coleslaw.n.01', 'synonyms': ['coleslaw', 'slaw'], 'def': 'basically shredded cabbage', 'name': 'coleslaw'}, {'frequency': 'r', 'id': 295, 'synset': 'coloring_material.n.01', 'synonyms': ['coloring_material', 'colouring_material'], 'def': 'any material used for its color', 'name': 'coloring_material'}, {'frequency': 'r', 'id': 296, 'synset': 'combination_lock.n.01', 'synonyms': ['combination_lock'], 'def': 'lock that can be opened only by turning dials in a special sequence', 'name': 'combination_lock'}, {'frequency': 'c', 'id': 297, 'synset': 'comforter.n.04', 'synonyms': ['pacifier', 'teething_ring'], 'def': 'device used for an infant to suck or bite on', 'name': 'pacifier'}, {'frequency': 'r', 'id': 298, 'synset': 'comic_book.n.01', 'synonyms': ['comic_book'], 'def': 'a magazine devoted to comic strips', 'name': 'comic_book'}, {'frequency': 'f', 'id': 299, 'synset': 'computer_keyboard.n.01', 'synonyms': ['computer_keyboard', 'keyboard_(computer)'], 'def': 'a keyboard that is a data input device for computers', 'name': 'computer_keyboard'}, {'frequency': 'r', 'id': 300, 'synset': 'concrete_mixer.n.01', 'synonyms': ['concrete_mixer', 'cement_mixer'], 'def': 'a machine with a large revolving drum in which cement/concrete is mixed', 'name': 'concrete_mixer'}, {'frequency': 'f', 'id': 301, 'synset': 'cone.n.01', 'synonyms': ['cone', 'traffic_cone'], 'def': 'a cone-shaped object used to direct traffic', 'name': 'cone'}, {'frequency': 'f', 'id': 302, 'synset': 'control.n.09', 'synonyms': ['control', 'controller'], 'def': 'a mechanism that controls the operation of a machine', 'name': 'control'}, {'frequency': 'r', 'id': 303, 'synset': 'convertible.n.01', 'synonyms': ['convertible_(automobile)'], 'def': 'a car that has top that can be folded or removed', 'name': 'convertible_(automobile)'}, {'frequency': 'r', 'id': 304, 'synset': 'convertible.n.03', 'synonyms': ['sofa_bed'], 'def': 'a sofa that can be converted into a bed', 'name': 'sofa_bed'}, {'frequency': 'c', 'id': 305, 'synset': 'cookie.n.01', 'synonyms': ['cookie', 'cooky', 'biscuit_(cookie)'], 'def': "any of various small flat sweet cakes (`biscuit' is the British term)", 'name': 'cookie'}, {'frequency': 'r', 'id': 306, 'synset': 'cookie_jar.n.01', 'synonyms': ['cookie_jar', 'cooky_jar'], 'def': 'a jar in which cookies are kept (and sometimes money is hidden)', 'name': 'cookie_jar'}, {'frequency': 'r', 'id': 307, 'synset': 'cooking_utensil.n.01', 'synonyms': ['cooking_utensil'], 'def': 'a kitchen utensil made of material that does not melt easily; used for cooking', 'name': 'cooking_utensil'}, {'frequency': 'f', 'id': 308, 'synset': 'cooler.n.01', 'synonyms': ['cooler_(for_food)', 'ice_chest'], 'def': 'an insulated box for storing food often with ice', 'name': 'cooler_(for_food)'}, {'frequency': 'c', 'id': 309, 'synset': 'cork.n.04', 'synonyms': ['cork_(bottle_plug)', 'bottle_cork'], 'def': 'the plug in the mouth of a bottle (especially a wine bottle)', 'name': 'cork_(bottle_plug)'}, {'frequency': 'r', 'id': 310, 'synset': 'corkboard.n.01', 'synonyms': ['corkboard'], 'def': 'a sheet consisting of cork granules', 'name': 'corkboard'}, {'frequency': 'r', 'id': 311, 'synset': 'corkscrew.n.01', 'synonyms': ['corkscrew', 'bottle_screw'], 'def': 'a bottle opener that pulls corks', 'name': 'corkscrew'}, {'frequency': 'c', 'id': 312, 'synset': 'corn.n.03', 'synonyms': ['edible_corn', 'corn', 'maize'], 'def': 'ears of corn that can be prepared and served for human food', 'name': 'edible_corn'}, {'frequency': 'r', 'id': 313, 'synset': 'cornbread.n.01', 'synonyms': ['cornbread'], 'def': 'bread made primarily of cornmeal', 'name': 'cornbread'}, {'frequency': 'c', 'id': 314, 'synset': 'cornet.n.01', 'synonyms': ['cornet', 'horn', 'trumpet'], 'def': 'a brass musical instrument with a narrow tube and a flared bell and many valves', 'name': 'cornet'}, {'frequency': 'c', 'id': 315, 'synset': 'cornice.n.01', 'synonyms': ['cornice', 'valance', 'valance_board', 'pelmet'], 'def': 'a decorative framework to conceal curtain fixtures at the top of a window casing', 'name': 'cornice'}, {'frequency': 'r', 'id': 316, 'synset': 'cornmeal.n.01', 'synonyms': ['cornmeal'], 'def': 'coarsely ground corn', 'name': 'cornmeal'}, {'frequency': 'r', 'id': 317, 'synset': 'corset.n.01', 'synonyms': ['corset', 'girdle'], 'def': "a woman's close-fitting foundation garment", 'name': 'corset'}, {'frequency': 'r', 'id': 318, 'synset': 'cos.n.02', 'synonyms': ['romaine_lettuce'], 'def': 'lettuce with long dark-green leaves in a loosely packed elongated head', 'name': 'romaine_lettuce'}, {'frequency': 'c', 'id': 319, 'synset': 'costume.n.04', 'synonyms': ['costume'], 'def': 'the attire characteristic of a country or a time or a social class', 'name': 'costume'}, {'frequency': 'r', 'id': 320, 'synset': 'cougar.n.01', 'synonyms': ['cougar', 'puma', 'catamount', 'mountain_lion', 'panther'], 'def': 'large American feline resembling a lion', 'name': 'cougar'}, {'frequency': 'r', 'id': 321, 'synset': 'coverall.n.01', 'synonyms': ['coverall'], 'def': 'a loose-fitting protective garment that is worn over other clothing', 'name': 'coverall'}, {'frequency': 'r', 'id': 322, 'synset': 'cowbell.n.01', 'synonyms': ['cowbell'], 'def': 'a bell hung around the neck of cow so that the cow can be easily located', 'name': 'cowbell'}, {'frequency': 'f', 'id': 323, 'synset': 'cowboy_hat.n.01', 'synonyms': ['cowboy_hat', 'ten-gallon_hat'], 'def': 'a hat with a wide brim and a soft crown; worn by American ranch hands', 'name': 'cowboy_hat'}, {'frequency': 'r', 'id': 324, 'synset': 'crab.n.01', 'synonyms': ['crab_(animal)'], 'def': 'decapod having eyes on short stalks and a broad flattened shell and pincers', 'name': 'crab_(animal)'}, {'frequency': 'c', 'id': 325, 'synset': 'cracker.n.01', 'synonyms': ['cracker'], 'def': 'a thin crisp wafer', 'name': 'cracker'}, {'frequency': 'r', 'id': 326, 'synset': 'crape.n.01', 'synonyms': ['crape', 'crepe', 'French_pancake'], 'def': 'small very thin pancake', 'name': 'crape'}, {'frequency': 'f', 'id': 327, 'synset': 'crate.n.01', 'synonyms': ['crate'], 'def': 'a rugged box (usually made of wood); used for shipping', 'name': 'crate'}, {'frequency': 'r', 'id': 328, 'synset': 'crayon.n.01', 'synonyms': ['crayon', 'wax_crayon'], 'def': 'writing or drawing implement made of a colored stick of composition wax', 'name': 'crayon'}, {'frequency': 'r', 'id': 329, 'synset': 'cream_pitcher.n.01', 'synonyms': ['cream_pitcher'], 'def': 'a small pitcher for serving cream', 'name': 'cream_pitcher'}, {'frequency': 'r', 'id': 330, 'synset': 'credit_card.n.01', 'synonyms': ['credit_card', 'charge_card', 'debit_card'], 'def': 'a card, usually plastic, used to pay for goods and services', 'name': 'credit_card'}, {'frequency': 'c', 'id': 331, 'synset': 'crescent_roll.n.01', 'synonyms': ['crescent_roll', 'croissant'], 'def': 'very rich flaky crescent-shaped roll', 'name': 'crescent_roll'}, {'frequency': 'c', 'id': 332, 'synset': 'crib.n.01', 'synonyms': ['crib', 'cot'], 'def': 'baby bed with high sides made of slats', 'name': 'crib'}, {'frequency': 'c', 'id': 333, 'synset': 'crock.n.03', 'synonyms': ['crock_pot', 'earthenware_jar'], 'def': 'an earthen jar (made of baked clay)', 'name': 'crock_pot'}, {'frequency': 'f', 'id': 334, 'synset': 'crossbar.n.01', 'synonyms': ['crossbar'], 'def': 'a horizontal bar that goes across something', 'name': 'crossbar'}, {'frequency': 'r', 'id': 335, 'synset': 'crouton.n.01', 'synonyms': ['crouton'], 'def': 'a small piece of toasted or fried bread; served in soup or salads', 'name': 'crouton'}, {'frequency': 'r', 'id': 336, 'synset': 'crow.n.01', 'synonyms': ['crow'], 'def': 'black birds having a raucous call', 'name': 'crow'}, {'frequency': 'c', 'id': 337, 'synset': 'crown.n.04', 'synonyms': ['crown'], 'def': 'an ornamental jeweled headdress signifying sovereignty', 'name': 'crown'}, {'frequency': 'c', 'id': 338, 'synset': 'crucifix.n.01', 'synonyms': ['crucifix'], 'def': 'representation of the cross on which Jesus died', 'name': 'crucifix'}, {'frequency': 'c', 'id': 339, 'synset': 'cruise_ship.n.01', 'synonyms': ['cruise_ship', 'cruise_liner'], 'def': 'a passenger ship used commercially for pleasure cruises', 'name': 'cruise_ship'}, {'frequency': 'c', 'id': 340, 'synset': 'cruiser.n.01', 'synonyms': ['police_cruiser', 'patrol_car', 'police_car', 'squad_car'], 'def': 'a car in which policemen cruise the streets', 'name': 'police_cruiser'}, {'frequency': 'c', 'id': 341, 'synset': 'crumb.n.03', 'synonyms': ['crumb'], 'def': 'small piece of e.g. bread or cake', 'name': 'crumb'}, {'frequency': 'r', 'id': 342, 'synset': 'crutch.n.01', 'synonyms': ['crutch'], 'def': 'a wooden or metal staff that fits under the armpit and reaches to the ground', 'name': 'crutch'}, {'frequency': 'c', 'id': 343, 'synset': 'cub.n.03', 'synonyms': ['cub_(animal)'], 'def': 'the young of certain carnivorous mammals such as the bear or wolf or lion', 'name': 'cub_(animal)'}, {'frequency': 'r', 'id': 344, 'synset': 'cube.n.05', 'synonyms': ['cube', 'square_block'], 'def': 'a block in the (approximate) shape of a cube', 'name': 'cube'}, {'frequency': 'f', 'id': 345, 'synset': 'cucumber.n.02', 'synonyms': ['cucumber', 'cuke'], 'def': 'cylindrical green fruit with thin green rind and white flesh eaten as a vegetable', 'name': 'cucumber'}, {'frequency': 'c', 'id': 346, 'synset': 'cufflink.n.01', 'synonyms': ['cufflink'], 'def': 'jewelry consisting of linked buttons used to fasten the cuffs of a shirt', 'name': 'cufflink'}, {'frequency': 'f', 'id': 347, 'synset': 'cup.n.01', 'synonyms': ['cup'], 'def': 'a small open container usually used for drinking; usually has a handle', 'name': 'cup'}, {'frequency': 'c', 'id': 348, 'synset': 'cup.n.08', 'synonyms': ['trophy_cup'], 'def': 'a metal vessel with handles that is awarded as a trophy to a competition winner', 'name': 'trophy_cup'}, {'frequency': 'c', 'id': 349, 'synset': 'cupcake.n.01', 'synonyms': ['cupcake'], 'def': 'small cake baked in a muffin tin', 'name': 'cupcake'}, {'frequency': 'r', 'id': 350, 'synset': 'curler.n.01', 'synonyms': ['hair_curler', 'hair_roller', 'hair_crimper'], 'def': 'a cylindrical tube around which the hair is wound to curl it', 'name': 'hair_curler'}, {'frequency': 'r', 'id': 351, 'synset': 'curling_iron.n.01', 'synonyms': ['curling_iron'], 'def': 'a cylindrical home appliance that heats hair that has been curled around it', 'name': 'curling_iron'}, {'frequency': 'f', 'id': 352, 'synset': 'curtain.n.01', 'synonyms': ['curtain', 'drapery'], 'def': 'hanging cloth used as a blind (especially for a window)', 'name': 'curtain'}, {'frequency': 'f', 'id': 353, 'synset': 'cushion.n.03', 'synonyms': ['cushion'], 'def': 'a soft bag filled with air or padding such as feathers or foam rubber', 'name': 'cushion'}, {'frequency': 'r', 'id': 354, 'synset': 'custard.n.01', 'synonyms': ['custard'], 'def': 'sweetened mixture of milk and eggs baked or boiled or frozen', 'name': 'custard'}, {'frequency': 'c', 'id': 355, 'synset': 'cutter.n.06', 'synonyms': ['cutting_tool'], 'def': 'a cutting implement; a tool for cutting', 'name': 'cutting_tool'}, {'frequency': 'r', 'id': 356, 'synset': 'cylinder.n.04', 'synonyms': ['cylinder'], 'def': 'a cylindrical container', 'name': 'cylinder'}, {'frequency': 'r', 'id': 357, 'synset': 'cymbal.n.01', 'synonyms': ['cymbal'], 'def': 'a percussion instrument consisting of a concave brass disk', 'name': 'cymbal'}, {'frequency': 'r', 'id': 358, 'synset': 'dachshund.n.01', 'synonyms': ['dachshund', 'dachsie', 'badger_dog'], 'def': 'small long-bodied short-legged breed of dog having a short sleek coat and long drooping ears', 'name': 'dachshund'}, {'frequency': 'r', 'id': 359, 'synset': 'dagger.n.01', 'synonyms': ['dagger'], 'def': 'a short knife with a pointed blade used for piercing or stabbing', 'name': 'dagger'}, {'frequency': 'r', 'id': 360, 'synset': 'dartboard.n.01', 'synonyms': ['dartboard'], 'def': 'a circular board of wood or cork used as the target in the game of darts', 'name': 'dartboard'}, {'frequency': 'r', 'id': 361, 'synset': 'date.n.08', 'synonyms': ['date_(fruit)'], 'def': 'sweet edible fruit of the date palm with a single long woody seed', 'name': 'date_(fruit)'}, {'frequency': 'f', 'id': 362, 'synset': 'deck_chair.n.01', 'synonyms': ['deck_chair', 'beach_chair'], 'def': 'a folding chair for use outdoors; a wooden frame supports a length of canvas', 'name': 'deck_chair'}, {'frequency': 'c', 'id': 363, 'synset': 'deer.n.01', 'synonyms': ['deer', 'cervid'], 'def': "distinguished from Bovidae by the male's having solid deciduous antlers", 'name': 'deer'}, {'frequency': 'c', 'id': 364, 'synset': 'dental_floss.n.01', 'synonyms': ['dental_floss', 'floss'], 'def': 'a soft thread for cleaning the spaces between the teeth', 'name': 'dental_floss'}, {'frequency': 'f', 'id': 365, 'synset': 'desk.n.01', 'synonyms': ['desk'], 'def': 'a piece of furniture with a writing surface and usually drawers or other compartments', 'name': 'desk'}, {'frequency': 'r', 'id': 366, 'synset': 'detergent.n.01', 'synonyms': ['detergent'], 'def': 'a surface-active chemical widely used in industry and laundering', 'name': 'detergent'}, {'frequency': 'c', 'id': 367, 'synset': 'diaper.n.01', 'synonyms': ['diaper'], 'def': 'garment consisting of a folded cloth drawn up between the legs and fastened at the waist', 'name': 'diaper'}, {'frequency': 'r', 'id': 368, 'synset': 'diary.n.01', 'synonyms': ['diary', 'journal'], 'def': 'a daily written record of (usually personal) experiences and observations', 'name': 'diary'}, {'frequency': 'r', 'id': 369, 'synset': 'die.n.01', 'synonyms': ['die', 'dice'], 'def': 'a small cube with 1 to 6 spots on the six faces; used in gambling', 'name': 'die'}, {'frequency': 'r', 'id': 370, 'synset': 'dinghy.n.01', 'synonyms': ['dinghy', 'dory', 'rowboat'], 'def': 'a small boat of shallow draft with seats and oars with which it is propelled', 'name': 'dinghy'}, {'frequency': 'f', 'id': 371, 'synset': 'dining_table.n.01', 'synonyms': ['dining_table'], 'def': 'a table at which meals are served', 'name': 'dining_table'}, {'frequency': 'r', 'id': 372, 'synset': 'dinner_jacket.n.01', 'synonyms': ['tux', 'tuxedo'], 'def': 'semiformal evening dress for men', 'name': 'tux'}, {'frequency': 'c', 'id': 373, 'synset': 'dish.n.01', 'synonyms': ['dish'], 'def': 'a piece of dishware normally used as a container for holding or serving food', 'name': 'dish'}, {'frequency': 'c', 'id': 374, 'synset': 'dish.n.05', 'synonyms': ['dish_antenna'], 'def': 'directional antenna consisting of a parabolic reflector', 'name': 'dish_antenna'}, {'frequency': 'c', 'id': 375, 'synset': 'dishrag.n.01', 'synonyms': ['dishrag', 'dishcloth'], 'def': 'a cloth for washing dishes', 'name': 'dishrag'}, {'frequency': 'c', 'id': 376, 'synset': 'dishtowel.n.01', 'synonyms': ['dishtowel', 'tea_towel'], 'def': 'a towel for drying dishes', 'name': 'dishtowel'}, {'frequency': 'f', 'id': 377, 'synset': 'dishwasher.n.01', 'synonyms': ['dishwasher', 'dishwashing_machine'], 'def': 'a machine for washing dishes', 'name': 'dishwasher'}, {'frequency': 'r', 'id': 378, 'synset': 'dishwasher_detergent.n.01', 'synonyms': ['dishwasher_detergent', 'dishwashing_detergent', 'dishwashing_liquid'], 'def': 'a low-sudsing detergent designed for use in dishwashers', 'name': 'dishwasher_detergent'}, {'frequency': 'r', 'id': 379, 'synset': 'diskette.n.01', 'synonyms': ['diskette', 'floppy', 'floppy_disk'], 'def': 'a small plastic magnetic disk enclosed in a stiff envelope used to store data', 'name': 'diskette'}, {'frequency': 'c', 'id': 380, 'synset': 'dispenser.n.01', 'synonyms': ['dispenser'], 'def': 'a container so designed that the contents can be used in prescribed amounts', 'name': 'dispenser'}, {'frequency': 'c', 'id': 381, 'synset': 'dixie_cup.n.01', 'synonyms': ['Dixie_cup', 'paper_cup'], 'def': 'a disposable cup made of paper; for holding drinks', 'name': 'Dixie_cup'}, {'frequency': 'f', 'id': 382, 'synset': 'dog.n.01', 'synonyms': ['dog'], 'def': 'a common domesticated dog', 'name': 'dog'}, {'frequency': 'f', 'id': 383, 'synset': 'dog_collar.n.01', 'synonyms': ['dog_collar'], 'def': 'a collar for a dog', 'name': 'dog_collar'}, {'frequency': 'c', 'id': 384, 'synset': 'doll.n.01', 'synonyms': ['doll'], 'def': 'a toy replica of a HUMAN (NOT AN ANIMAL)', 'name': 'doll'}, {'frequency': 'r', 'id': 385, 'synset': 'dollar.n.02', 'synonyms': ['dollar', 'dollar_bill', 'one_dollar_bill'], 'def': 'a piece of paper money worth one dollar', 'name': 'dollar'}, {'frequency': 'r', 'id': 386, 'synset': 'dolphin.n.02', 'synonyms': ['dolphin'], 'def': 'any of various small toothed whales with a beaklike snout; larger than porpoises', 'name': 'dolphin'}, {'frequency': 'c', 'id': 387, 'synset': 'domestic_ass.n.01', 'synonyms': ['domestic_ass', 'donkey'], 'def': 'domestic beast of burden descended from the African wild ass; patient but stubborn', 'name': 'domestic_ass'}, {'frequency': 'r', 'id': 388, 'synset': 'domino.n.03', 'synonyms': ['eye_mask'], 'def': 'a mask covering the upper part of the face but with holes for the eyes', 'name': 'eye_mask'}, {'frequency': 'r', 'id': 389, 'synset': 'doorbell.n.01', 'synonyms': ['doorbell', 'buzzer'], 'def': 'a button at an outer door that gives a ringing or buzzing signal when pushed', 'name': 'doorbell'}, {'frequency': 'f', 'id': 390, 'synset': 'doorknob.n.01', 'synonyms': ['doorknob', 'doorhandle'], 'def': "a knob used to open a door (often called `doorhandle' in Great Britain)", 'name': 'doorknob'}, {'frequency': 'c', 'id': 391, 'synset': 'doormat.n.02', 'synonyms': ['doormat', 'welcome_mat'], 'def': 'a mat placed outside an exterior door for wiping the shoes before entering', 'name': 'doormat'}, {'frequency': 'f', 'id': 392, 'synset': 'doughnut.n.02', 'synonyms': ['doughnut', 'donut'], 'def': 'a small ring-shaped friedcake', 'name': 'doughnut'}, {'frequency': 'r', 'id': 393, 'synset': 'dove.n.01', 'synonyms': ['dove'], 'def': 'any of numerous small pigeons', 'name': 'dove'}, {'frequency': 'r', 'id': 394, 'synset': 'dragonfly.n.01', 'synonyms': ['dragonfly'], 'def': 'slender-bodied non-stinging insect having iridescent wings that are outspread at rest', 'name': 'dragonfly'}, {'frequency': 'f', 'id': 395, 'synset': 'drawer.n.01', 'synonyms': ['drawer'], 'def': 'a boxlike container in a piece of furniture; made so as to slide in and out', 'name': 'drawer'}, {'frequency': 'c', 'id': 396, 'synset': 'drawers.n.01', 'synonyms': ['underdrawers', 'boxers', 'boxershorts'], 'def': 'underpants worn by men', 'name': 'underdrawers'}, {'frequency': 'f', 'id': 397, 'synset': 'dress.n.01', 'synonyms': ['dress', 'frock'], 'def': 'a one-piece garment for a woman; has skirt and bodice', 'name': 'dress'}, {'frequency': 'c', 'id': 398, 'synset': 'dress_hat.n.01', 'synonyms': ['dress_hat', 'high_hat', 'opera_hat', 'silk_hat', 'top_hat'], 'def': "a man's hat with a tall crown; usually covered with silk or with beaver fur", 'name': 'dress_hat'}, {'frequency': 'c', 'id': 399, 'synset': 'dress_suit.n.01', 'synonyms': ['dress_suit'], 'def': 'formalwear consisting of full evening dress for men', 'name': 'dress_suit'}, {'frequency': 'c', 'id': 400, 'synset': 'dresser.n.05', 'synonyms': ['dresser'], 'def': 'a cabinet with shelves', 'name': 'dresser'}, {'frequency': 'c', 'id': 401, 'synset': 'drill.n.01', 'synonyms': ['drill'], 'def': 'a tool with a sharp rotating point for making holes in hard materials', 'name': 'drill'}, {'frequency': 'r', 'id': 402, 'synset': 'drinking_fountain.n.01', 'synonyms': ['drinking_fountain'], 'def': 'a public fountain to provide a jet of drinking water', 'name': 'drinking_fountain'}, {'frequency': 'r', 'id': 403, 'synset': 'drone.n.04', 'synonyms': ['drone'], 'def': 'an aircraft without a pilot that is operated by remote control', 'name': 'drone'}, {'frequency': 'r', 'id': 404, 'synset': 'dropper.n.01', 'synonyms': ['dropper', 'eye_dropper'], 'def': 'pipet consisting of a small tube with a vacuum bulb at one end for drawing liquid in and releasing it a drop at a time', 'name': 'dropper'}, {'frequency': 'c', 'id': 405, 'synset': 'drum.n.01', 'synonyms': ['drum_(musical_instrument)'], 'def': 'a musical percussion instrument; usually consists of a hollow cylinder with a membrane stretched across each end', 'name': 'drum_(musical_instrument)'}, {'frequency': 'r', 'id': 406, 'synset': 'drumstick.n.02', 'synonyms': ['drumstick'], 'def': 'a stick used for playing a drum', 'name': 'drumstick'}, {'frequency': 'f', 'id': 407, 'synset': 'duck.n.01', 'synonyms': ['duck'], 'def': 'small web-footed broad-billed swimming bird', 'name': 'duck'}, {'frequency': 'r', 'id': 408, 'synset': 'duckling.n.02', 'synonyms': ['duckling'], 'def': 'young duck', 'name': 'duckling'}, {'frequency': 'c', 'id': 409, 'synset': 'duct_tape.n.01', 'synonyms': ['duct_tape'], 'def': 'a wide silvery adhesive tape', 'name': 'duct_tape'}, {'frequency': 'f', 'id': 410, 'synset': 'duffel_bag.n.01', 'synonyms': ['duffel_bag', 'duffle_bag', 'duffel', 'duffle'], 'def': 'a large cylindrical bag of heavy cloth', 'name': 'duffel_bag'}, {'frequency': 'r', 'id': 411, 'synset': 'dumbbell.n.01', 'synonyms': ['dumbbell'], 'def': 'an exercising weight with two ball-like ends connected by a short handle', 'name': 'dumbbell'}, {'frequency': 'c', 'id': 412, 'synset': 'dumpster.n.01', 'synonyms': ['dumpster'], 'def': 'a container designed to receive and transport and dump waste', 'name': 'dumpster'}, {'frequency': 'r', 'id': 413, 'synset': 'dustpan.n.02', 'synonyms': ['dustpan'], 'def': 'a short-handled receptacle into which dust can be swept', 'name': 'dustpan'}, {'frequency': 'r', 'id': 414, 'synset': 'dutch_oven.n.02', 'synonyms': ['Dutch_oven'], 'def': 'iron or earthenware cooking pot; used for stews', 'name': 'Dutch_oven'}, {'frequency': 'c', 'id': 415, 'synset': 'eagle.n.01', 'synonyms': ['eagle'], 'def': 'large birds of prey noted for their broad wings and strong soaring flight', 'name': 'eagle'}, {'frequency': 'f', 'id': 416, 'synset': 'earphone.n.01', 'synonyms': ['earphone', 'earpiece', 'headphone'], 'def': 'device for listening to audio that is held over or inserted into the ear', 'name': 'earphone'}, {'frequency': 'r', 'id': 417, 'synset': 'earplug.n.01', 'synonyms': ['earplug'], 'def': 'a soft plug that is inserted into the ear canal to block sound', 'name': 'earplug'}, {'frequency': 'f', 'id': 418, 'synset': 'earring.n.01', 'synonyms': ['earring'], 'def': 'jewelry to ornament the ear', 'name': 'earring'}, {'frequency': 'c', 'id': 419, 'synset': 'easel.n.01', 'synonyms': ['easel'], 'def': "an upright tripod for displaying something (usually an artist's canvas)", 'name': 'easel'}, {'frequency': 'r', 'id': 420, 'synset': 'eclair.n.01', 'synonyms': ['eclair'], 'def': 'oblong cream puff', 'name': 'eclair'}, {'frequency': 'r', 'id': 421, 'synset': 'eel.n.01', 'synonyms': ['eel'], 'def': 'an elongate fish with fatty flesh', 'name': 'eel'}, {'frequency': 'f', 'id': 422, 'synset': 'egg.n.02', 'synonyms': ['egg', 'eggs'], 'def': 'oval reproductive body of a fowl (especially a hen) used as food', 'name': 'egg'}, {'frequency': 'r', 'id': 423, 'synset': 'egg_roll.n.01', 'synonyms': ['egg_roll', 'spring_roll'], 'def': 'minced vegetables and meat wrapped in a pancake and fried', 'name': 'egg_roll'}, {'frequency': 'c', 'id': 424, 'synset': 'egg_yolk.n.01', 'synonyms': ['egg_yolk', 'yolk_(egg)'], 'def': 'the yellow spherical part of an egg', 'name': 'egg_yolk'}, {'frequency': 'c', 'id': 425, 'synset': 'eggbeater.n.02', 'synonyms': ['eggbeater', 'eggwhisk'], 'def': 'a mixer for beating eggs or whipping cream', 'name': 'eggbeater'}, {'frequency': 'c', 'id': 426, 'synset': 'eggplant.n.01', 'synonyms': ['eggplant', 'aubergine'], 'def': 'egg-shaped vegetable having a shiny skin typically dark purple', 'name': 'eggplant'}, {'frequency': 'r', 'id': 427, 'synset': 'electric_chair.n.01', 'synonyms': ['electric_chair'], 'def': 'a chair-shaped instrument of execution by electrocution', 'name': 'electric_chair'}, {'frequency': 'f', 'id': 428, 'synset': 'electric_refrigerator.n.01', 'synonyms': ['refrigerator'], 'def': 'a refrigerator in which the coolant is pumped around by an electric motor', 'name': 'refrigerator'}, {'frequency': 'f', 'id': 429, 'synset': 'elephant.n.01', 'synonyms': ['elephant'], 'def': 'a common elephant', 'name': 'elephant'}, {'frequency': 'r', 'id': 430, 'synset': 'elk.n.01', 'synonyms': ['elk', 'moose'], 'def': 'large northern deer with enormous flattened antlers in the male', 'name': 'elk'}, {'frequency': 'c', 'id': 431, 'synset': 'envelope.n.01', 'synonyms': ['envelope'], 'def': 'a flat (usually rectangular) container for a letter, thin package, etc.', 'name': 'envelope'}, {'frequency': 'c', 'id': 432, 'synset': 'eraser.n.01', 'synonyms': ['eraser'], 'def': 'an implement used to erase something', 'name': 'eraser'}, {'frequency': 'r', 'id': 433, 'synset': 'escargot.n.01', 'synonyms': ['escargot'], 'def': 'edible snail usually served in the shell with a sauce of melted butter and garlic', 'name': 'escargot'}, {'frequency': 'r', 'id': 434, 'synset': 'eyepatch.n.01', 'synonyms': ['eyepatch'], 'def': 'a protective cloth covering for an injured eye', 'name': 'eyepatch'}, {'frequency': 'r', 'id': 435, 'synset': 'falcon.n.01', 'synonyms': ['falcon'], 'def': 'birds of prey having long pointed powerful wings adapted for swift flight', 'name': 'falcon'}, {'frequency': 'f', 'id': 436, 'synset': 'fan.n.01', 'synonyms': ['fan'], 'def': 'a device for creating a current of air by movement of a surface or surfaces', 'name': 'fan'}, {'frequency': 'f', 'id': 437, 'synset': 'faucet.n.01', 'synonyms': ['faucet', 'spigot', 'tap'], 'def': 'a regulator for controlling the flow of a liquid from a reservoir', 'name': 'faucet'}, {'frequency': 'r', 'id': 438, 'synset': 'fedora.n.01', 'synonyms': ['fedora'], 'def': 'a hat made of felt with a creased crown', 'name': 'fedora'}, {'frequency': 'r', 'id': 439, 'synset': 'ferret.n.02', 'synonyms': ['ferret'], 'def': 'domesticated albino variety of the European polecat bred for hunting rats and rabbits', 'name': 'ferret'}, {'frequency': 'c', 'id': 440, 'synset': 'ferris_wheel.n.01', 'synonyms': ['Ferris_wheel'], 'def': 'a large wheel with suspended seats that remain upright as the wheel rotates', 'name': 'Ferris_wheel'}, {'frequency': 'r', 'id': 441, 'synset': 'ferry.n.01', 'synonyms': ['ferry', 'ferryboat'], 'def': 'a boat that transports people or vehicles across a body of water and operates on a regular schedule', 'name': 'ferry'}, {'frequency': 'r', 'id': 442, 'synset': 'fig.n.04', 'synonyms': ['fig_(fruit)'], 'def': 'fleshy sweet pear-shaped yellowish or purple fruit eaten fresh or preserved or dried', 'name': 'fig_(fruit)'}, {'frequency': 'c', 'id': 443, 'synset': 'fighter.n.02', 'synonyms': ['fighter_jet', 'fighter_aircraft', 'attack_aircraft'], 'def': 'a high-speed military or naval airplane designed to destroy enemy targets', 'name': 'fighter_jet'}, {'frequency': 'f', 'id': 444, 'synset': 'figurine.n.01', 'synonyms': ['figurine'], 'def': 'a small carved or molded figure', 'name': 'figurine'}, {'frequency': 'c', 'id': 445, 'synset': 'file.n.03', 'synonyms': ['file_cabinet', 'filing_cabinet'], 'def': 'office furniture consisting of a container for keeping papers in order', 'name': 'file_cabinet'}, {'frequency': 'r', 'id': 446, 'synset': 'file.n.04', 'synonyms': ['file_(tool)'], 'def': 'a steel hand tool with small sharp teeth on some or all of its surfaces; used for smoothing wood or metal', 'name': 'file_(tool)'}, {'frequency': 'f', 'id': 447, 'synset': 'fire_alarm.n.02', 'synonyms': ['fire_alarm', 'smoke_alarm'], 'def': 'an alarm that is tripped off by fire or smoke', 'name': 'fire_alarm'}, {'frequency': 'c', 'id': 448, 'synset': 'fire_engine.n.01', 'synonyms': ['fire_engine', 'fire_truck'], 'def': 'large trucks that carry firefighters and equipment to the site of a fire', 'name': 'fire_engine'}, {'frequency': 'c', 'id': 449, 'synset': 'fire_extinguisher.n.01', 'synonyms': ['fire_extinguisher', 'extinguisher'], 'def': 'a manually operated device for extinguishing small fires', 'name': 'fire_extinguisher'}, {'frequency': 'c', 'id': 450, 'synset': 'fire_hose.n.01', 'synonyms': ['fire_hose'], 'def': 'a large hose that carries water from a fire hydrant to the site of the fire', 'name': 'fire_hose'}, {'frequency': 'f', 'id': 451, 'synset': 'fireplace.n.01', 'synonyms': ['fireplace'], 'def': 'an open recess in a wall at the base of a chimney where a fire can be built', 'name': 'fireplace'}, {'frequency': 'f', 'id': 452, 'synset': 'fireplug.n.01', 'synonyms': ['fireplug', 'fire_hydrant', 'hydrant'], 'def': 'an upright hydrant for drawing water to use in fighting a fire', 'name': 'fireplug'}, {'frequency': 'c', 'id': 453, 'synset': 'fish.n.01', 'synonyms': ['fish'], 'def': 'any of various mostly cold-blooded aquatic vertebrates usually having scales and breathing through gills', 'name': 'fish'}, {'frequency': 'r', 'id': 454, 'synset': 'fish.n.02', 'synonyms': ['fish_(food)'], 'def': 'the flesh of fish used as food', 'name': 'fish_(food)'}, {'frequency': 'r', 'id': 455, 'synset': 'fishbowl.n.02', 'synonyms': ['fishbowl', 'goldfish_bowl'], 'def': 'a transparent bowl in which small fish are kept', 'name': 'fishbowl'}, {'frequency': 'r', 'id': 456, 'synset': 'fishing_boat.n.01', 'synonyms': ['fishing_boat', 'fishing_vessel'], 'def': 'a vessel for fishing', 'name': 'fishing_boat'}, {'frequency': 'c', 'id': 457, 'synset': 'fishing_rod.n.01', 'synonyms': ['fishing_rod', 'fishing_pole'], 'def': 'a rod that is used in fishing to extend the fishing line', 'name': 'fishing_rod'}, {'frequency': 'f', 'id': 458, 'synset': 'flag.n.01', 'synonyms': ['flag'], 'def': 'emblem usually consisting of a rectangular piece of cloth of distinctive design (do not include pole)', 'name': 'flag'}, {'frequency': 'f', 'id': 459, 'synset': 'flagpole.n.02', 'synonyms': ['flagpole', 'flagstaff'], 'def': 'a tall staff or pole on which a flag is raised', 'name': 'flagpole'}, {'frequency': 'c', 'id': 460, 'synset': 'flamingo.n.01', 'synonyms': ['flamingo'], 'def': 'large pink web-footed bird with down-bent bill', 'name': 'flamingo'}, {'frequency': 'c', 'id': 461, 'synset': 'flannel.n.01', 'synonyms': ['flannel'], 'def': 'a soft light woolen fabric; used for clothing', 'name': 'flannel'}, {'frequency': 'r', 'id': 462, 'synset': 'flash.n.10', 'synonyms': ['flash', 'flashbulb'], 'def': 'a lamp for providing momentary light to take a photograph', 'name': 'flash'}, {'frequency': 'c', 'id': 463, 'synset': 'flashlight.n.01', 'synonyms': ['flashlight', 'torch'], 'def': 'a small portable battery-powered electric lamp', 'name': 'flashlight'}, {'frequency': 'r', 'id': 464, 'synset': 'fleece.n.03', 'synonyms': ['fleece'], 'def': 'a soft bulky fabric with deep pile; used chiefly for clothing', 'name': 'fleece'}, {'frequency': 'f', 'id': 465, 'synset': 'flip-flop.n.02', 'synonyms': ['flip-flop_(sandal)'], 'def': 'a backless sandal held to the foot by a thong between two toes', 'name': 'flip-flop_(sandal)'}, {'frequency': 'c', 'id': 466, 'synset': 'flipper.n.01', 'synonyms': ['flipper_(footwear)', 'fin_(footwear)'], 'def': 'a shoe to aid a person in swimming', 'name': 'flipper_(footwear)'}, {'frequency': 'f', 'id': 467, 'synset': 'flower_arrangement.n.01', 'synonyms': ['flower_arrangement', 'floral_arrangement'], 'def': 'a decorative arrangement of flowers', 'name': 'flower_arrangement'}, {'frequency': 'c', 'id': 468, 'synset': 'flute.n.02', 'synonyms': ['flute_glass', 'champagne_flute'], 'def': 'a tall narrow wineglass', 'name': 'flute_glass'}, {'frequency': 'r', 'id': 469, 'synset': 'foal.n.01', 'synonyms': ['foal'], 'def': 'a young horse', 'name': 'foal'}, {'frequency': 'c', 'id': 470, 'synset': 'folding_chair.n.01', 'synonyms': ['folding_chair'], 'def': 'a chair that can be folded flat for storage', 'name': 'folding_chair'}, {'frequency': 'c', 'id': 471, 'synset': 'food_processor.n.01', 'synonyms': ['food_processor'], 'def': 'a kitchen appliance for shredding, blending, chopping, or slicing food', 'name': 'food_processor'}, {'frequency': 'c', 'id': 472, 'synset': 'football.n.02', 'synonyms': ['football_(American)'], 'def': 'the inflated oblong ball used in playing American football', 'name': 'football_(American)'}, {'frequency': 'r', 'id': 473, 'synset': 'football_helmet.n.01', 'synonyms': ['football_helmet'], 'def': 'a padded helmet with a face mask to protect the head of football players', 'name': 'football_helmet'}, {'frequency': 'c', 'id': 474, 'synset': 'footstool.n.01', 'synonyms': ['footstool', 'footrest'], 'def': 'a low seat or a stool to rest the feet of a seated person', 'name': 'footstool'}, {'frequency': 'f', 'id': 475, 'synset': 'fork.n.01', 'synonyms': ['fork'], 'def': 'cutlery used for serving and eating food', 'name': 'fork'}, {'frequency': 'r', 'id': 476, 'synset': 'forklift.n.01', 'synonyms': ['forklift'], 'def': 'an industrial vehicle with a power operated fork in front that can be inserted under loads to lift and move them', 'name': 'forklift'}, {'frequency': 'r', 'id': 477, 'synset': 'freight_car.n.01', 'synonyms': ['freight_car'], 'def': 'a railway car that carries freight', 'name': 'freight_car'}, {'frequency': 'r', 'id': 478, 'synset': 'french_toast.n.01', 'synonyms': ['French_toast'], 'def': 'bread slice dipped in egg and milk and fried', 'name': 'French_toast'}, {'frequency': 'c', 'id': 479, 'synset': 'freshener.n.01', 'synonyms': ['freshener', 'air_freshener'], 'def': 'anything that freshens', 'name': 'freshener'}, {'frequency': 'f', 'id': 480, 'synset': 'frisbee.n.01', 'synonyms': ['frisbee'], 'def': 'a light, plastic disk propelled with a flip of the wrist for recreation or competition', 'name': 'frisbee'}, {'frequency': 'c', 'id': 481, 'synset': 'frog.n.01', 'synonyms': ['frog', 'toad', 'toad_frog'], 'def': 'a tailless stout-bodied amphibians with long hind limbs for leaping', 'name': 'frog'}, {'frequency': 'c', 'id': 482, 'synset': 'fruit_juice.n.01', 'synonyms': ['fruit_juice'], 'def': 'drink produced by squeezing or crushing fruit', 'name': 'fruit_juice'}, {'frequency': 'r', 'id': 483, 'synset': 'fruit_salad.n.01', 'synonyms': ['fruit_salad'], 'def': 'salad composed of fruits', 'name': 'fruit_salad'}, {'frequency': 'c', 'id': 484, 'synset': 'frying_pan.n.01', 'synonyms': ['frying_pan', 'frypan', 'skillet'], 'def': 'a pan used for frying foods', 'name': 'frying_pan'}, {'frequency': 'r', 'id': 485, 'synset': 'fudge.n.01', 'synonyms': ['fudge'], 'def': 'soft creamy candy', 'name': 'fudge'}, {'frequency': 'r', 'id': 486, 'synset': 'funnel.n.02', 'synonyms': ['funnel'], 'def': 'a cone-shaped utensil used to channel a substance into a container with a small mouth', 'name': 'funnel'}, {'frequency': 'c', 'id': 487, 'synset': 'futon.n.01', 'synonyms': ['futon'], 'def': 'a pad that is used for sleeping on the floor or on a raised frame', 'name': 'futon'}, {'frequency': 'r', 'id': 488, 'synset': 'gag.n.02', 'synonyms': ['gag', 'muzzle'], 'def': "restraint put into a person's mouth to prevent speaking or shouting", 'name': 'gag'}, {'frequency': 'r', 'id': 489, 'synset': 'garbage.n.03', 'synonyms': ['garbage'], 'def': 'a receptacle where waste can be discarded', 'name': 'garbage'}, {'frequency': 'c', 'id': 490, 'synset': 'garbage_truck.n.01', 'synonyms': ['garbage_truck'], 'def': 'a truck for collecting domestic refuse', 'name': 'garbage_truck'}, {'frequency': 'c', 'id': 491, 'synset': 'garden_hose.n.01', 'synonyms': ['garden_hose'], 'def': 'a hose used for watering a lawn or garden', 'name': 'garden_hose'}, {'frequency': 'c', 'id': 492, 'synset': 'gargle.n.01', 'synonyms': ['gargle', 'mouthwash'], 'def': 'a medicated solution used for gargling and rinsing the mouth', 'name': 'gargle'}, {'frequency': 'r', 'id': 493, 'synset': 'gargoyle.n.02', 'synonyms': ['gargoyle'], 'def': 'an ornament consisting of a grotesquely carved figure of a person or animal', 'name': 'gargoyle'}, {'frequency': 'c', 'id': 494, 'synset': 'garlic.n.02', 'synonyms': ['garlic', 'ail'], 'def': 'aromatic bulb used as seasoning', 'name': 'garlic'}, {'frequency': 'r', 'id': 495, 'synset': 'gasmask.n.01', 'synonyms': ['gasmask', 'respirator', 'gas_helmet'], 'def': 'a protective face mask with a filter', 'name': 'gasmask'}, {'frequency': 'r', 'id': 496, 'synset': 'gazelle.n.01', 'synonyms': ['gazelle'], 'def': 'small swift graceful antelope of Africa and Asia having lustrous eyes', 'name': 'gazelle'}, {'frequency': 'c', 'id': 497, 'synset': 'gelatin.n.02', 'synonyms': ['gelatin', 'jelly'], 'def': 'an edible jelly made with gelatin and used as a dessert or salad base or a coating for foods', 'name': 'gelatin'}, {'frequency': 'r', 'id': 498, 'synset': 'gem.n.02', 'synonyms': ['gemstone'], 'def': 'a crystalline rock that can be cut and polished for jewelry', 'name': 'gemstone'}, {'frequency': 'c', 'id': 499, 'synset': 'giant_panda.n.01', 'synonyms': ['giant_panda', 'panda', 'panda_bear'], 'def': 'large black-and-white herbivorous mammal of bamboo forests of China and Tibet', 'name': 'giant_panda'}, {'frequency': 'c', 'id': 500, 'synset': 'gift_wrap.n.01', 'synonyms': ['gift_wrap'], 'def': 'attractive wrapping paper suitable for wrapping gifts', 'name': 'gift_wrap'}, {'frequency': 'c', 'id': 501, 'synset': 'ginger.n.03', 'synonyms': ['ginger', 'gingerroot'], 'def': 'the root of the common ginger plant; used fresh as a seasoning', 'name': 'ginger'}, {'frequency': 'f', 'id': 502, 'synset': 'giraffe.n.01', 'synonyms': ['giraffe'], 'def': 'tall animal having a spotted coat and small horns and very long neck and legs', 'name': 'giraffe'}, {'frequency': 'c', 'id': 503, 'synset': 'girdle.n.02', 'synonyms': ['cincture', 'sash', 'waistband', 'waistcloth'], 'def': 'a band of material around the waist that strengthens a skirt or trousers', 'name': 'cincture'}, {'frequency': 'f', 'id': 504, 'synset': 'glass.n.02', 'synonyms': ['glass_(drink_container)', 'drinking_glass'], 'def': 'a container for holding liquids while drinking', 'name': 'glass_(drink_container)'}, {'frequency': 'c', 'id': 505, 'synset': 'globe.n.03', 'synonyms': ['globe'], 'def': 'a sphere on which a map (especially of the earth) is represented', 'name': 'globe'}, {'frequency': 'f', 'id': 506, 'synset': 'glove.n.02', 'synonyms': ['glove'], 'def': 'handwear covering the hand', 'name': 'glove'}, {'frequency': 'c', 'id': 507, 'synset': 'goat.n.01', 'synonyms': ['goat'], 'def': 'a common goat', 'name': 'goat'}, {'frequency': 'f', 'id': 508, 'synset': 'goggles.n.01', 'synonyms': ['goggles'], 'def': 'tight-fitting spectacles worn to protect the eyes', 'name': 'goggles'}, {'frequency': 'r', 'id': 509, 'synset': 'goldfish.n.01', 'synonyms': ['goldfish'], 'def': 'small golden or orange-red freshwater fishes used as pond or aquarium pets', 'name': 'goldfish'}, {'frequency': 'r', 'id': 510, 'synset': 'golf_club.n.02', 'synonyms': ['golf_club', 'golf-club'], 'def': 'golf equipment used by a golfer to hit a golf ball', 'name': 'golf_club'}, {'frequency': 'c', 'id': 511, 'synset': 'golfcart.n.01', 'synonyms': ['golfcart'], 'def': 'a small motor vehicle in which golfers can ride between shots', 'name': 'golfcart'}, {'frequency': 'r', 'id': 512, 'synset': 'gondola.n.02', 'synonyms': ['gondola_(boat)'], 'def': 'long narrow flat-bottomed boat propelled by sculling; traditionally used on canals of Venice', 'name': 'gondola_(boat)'}, {'frequency': 'c', 'id': 513, 'synset': 'goose.n.01', 'synonyms': ['goose'], 'def': 'loud, web-footed long-necked aquatic birds usually larger than ducks', 'name': 'goose'}, {'frequency': 'r', 'id': 514, 'synset': 'gorilla.n.01', 'synonyms': ['gorilla'], 'def': 'largest ape', 'name': 'gorilla'}, {'frequency': 'r', 'id': 515, 'synset': 'gourd.n.02', 'synonyms': ['gourd'], 'def': 'any of numerous inedible fruits with hard rinds', 'name': 'gourd'}, {'frequency': 'r', 'id': 516, 'synset': 'gown.n.04', 'synonyms': ['surgical_gown', 'scrubs_(surgical_clothing)'], 'def': 'protective garment worn by surgeons during operations', 'name': 'surgical_gown'}, {'frequency': 'f', 'id': 517, 'synset': 'grape.n.01', 'synonyms': ['grape'], 'def': 'any of various juicy fruit with green or purple skins; grow in clusters', 'name': 'grape'}, {'frequency': 'r', 'id': 518, 'synset': 'grasshopper.n.01', 'synonyms': ['grasshopper'], 'def': 'plant-eating insect with hind legs adapted for leaping', 'name': 'grasshopper'}, {'frequency': 'c', 'id': 519, 'synset': 'grater.n.01', 'synonyms': ['grater'], 'def': 'utensil with sharp perforations for shredding foods (as vegetables or cheese)', 'name': 'grater'}, {'frequency': 'c', 'id': 520, 'synset': 'gravestone.n.01', 'synonyms': ['gravestone', 'headstone', 'tombstone'], 'def': 'a stone that is used to mark a grave', 'name': 'gravestone'}, {'frequency': 'r', 'id': 521, 'synset': 'gravy_boat.n.01', 'synonyms': ['gravy_boat', 'gravy_holder'], 'def': 'a dish (often boat-shaped) for serving gravy or sauce', 'name': 'gravy_boat'}, {'frequency': 'c', 'id': 522, 'synset': 'green_bean.n.02', 'synonyms': ['green_bean'], 'def': 'a common bean plant cultivated for its slender green edible pods', 'name': 'green_bean'}, {'frequency': 'c', 'id': 523, 'synset': 'green_onion.n.01', 'synonyms': ['green_onion', 'spring_onion', 'scallion'], 'def': 'a young onion before the bulb has enlarged', 'name': 'green_onion'}, {'frequency': 'r', 'id': 524, 'synset': 'griddle.n.01', 'synonyms': ['griddle'], 'def': 'cooking utensil consisting of a flat heated surface on which food is cooked', 'name': 'griddle'}, {'frequency': 'r', 'id': 525, 'synset': 'grillroom.n.01', 'synonyms': ['grillroom', 'grill_(restaurant)'], 'def': 'a restaurant where food is cooked on a grill', 'name': 'grillroom'}, {'frequency': 'r', 'id': 526, 'synset': 'grinder.n.04', 'synonyms': ['grinder_(tool)'], 'def': 'a machine tool that polishes metal', 'name': 'grinder_(tool)'}, {'frequency': 'r', 'id': 527, 'synset': 'grits.n.01', 'synonyms': ['grits', 'hominy_grits'], 'def': 'coarsely ground corn boiled as a breakfast dish', 'name': 'grits'}, {'frequency': 'c', 'id': 528, 'synset': 'grizzly.n.01', 'synonyms': ['grizzly', 'grizzly_bear'], 'def': 'powerful brownish-yellow bear of the uplands of western North America', 'name': 'grizzly'}, {'frequency': 'c', 'id': 529, 'synset': 'grocery_bag.n.01', 'synonyms': ['grocery_bag'], 'def': "a sack for holding customer's groceries", 'name': 'grocery_bag'}, {'frequency': 'r', 'id': 530, 'synset': 'guacamole.n.01', 'synonyms': ['guacamole'], 'def': 'a dip made of mashed avocado mixed with chopped onions and other seasonings', 'name': 'guacamole'}, {'frequency': 'f', 'id': 531, 'synset': 'guitar.n.01', 'synonyms': ['guitar'], 'def': 'a stringed instrument usually having six strings; played by strumming or plucking', 'name': 'guitar'}, {'frequency': 'c', 'id': 532, 'synset': 'gull.n.02', 'synonyms': ['gull', 'seagull'], 'def': 'mostly white aquatic bird having long pointed wings and short legs', 'name': 'gull'}, {'frequency': 'c', 'id': 533, 'synset': 'gun.n.01', 'synonyms': ['gun'], 'def': 'a weapon that discharges a bullet at high velocity from a metal tube', 'name': 'gun'}, {'frequency': 'r', 'id': 534, 'synset': 'hair_spray.n.01', 'synonyms': ['hair_spray'], 'def': 'substance sprayed on the hair to hold it in place', 'name': 'hair_spray'}, {'frequency': 'c', 'id': 535, 'synset': 'hairbrush.n.01', 'synonyms': ['hairbrush'], 'def': "a brush used to groom a person's hair", 'name': 'hairbrush'}, {'frequency': 'c', 'id': 536, 'synset': 'hairnet.n.01', 'synonyms': ['hairnet'], 'def': 'a small net that someone wears over their hair to keep it in place', 'name': 'hairnet'}, {'frequency': 'c', 'id': 537, 'synset': 'hairpin.n.01', 'synonyms': ['hairpin'], 'def': "a double pronged pin used to hold women's hair in place", 'name': 'hairpin'}, {'frequency': 'f', 'id': 538, 'synset': 'ham.n.01', 'synonyms': ['ham', 'jambon', 'gammon'], 'def': 'meat cut from the thigh of a hog (usually smoked)', 'name': 'ham'}, {'frequency': 'c', 'id': 539, 'synset': 'hamburger.n.01', 'synonyms': ['hamburger', 'beefburger', 'burger'], 'def': 'a sandwich consisting of a patty of minced beef served on a bun', 'name': 'hamburger'}, {'frequency': 'c', 'id': 540, 'synset': 'hammer.n.02', 'synonyms': ['hammer'], 'def': 'a hand tool with a heavy head and a handle; used to deliver an impulsive force by striking', 'name': 'hammer'}, {'frequency': 'r', 'id': 541, 'synset': 'hammock.n.02', 'synonyms': ['hammock'], 'def': 'a hanging bed of canvas or rope netting (usually suspended between two trees)', 'name': 'hammock'}, {'frequency': 'r', 'id': 542, 'synset': 'hamper.n.02', 'synonyms': ['hamper'], 'def': 'a basket usually with a cover', 'name': 'hamper'}, {'frequency': 'r', 'id': 543, 'synset': 'hamster.n.01', 'synonyms': ['hamster'], 'def': 'short-tailed burrowing rodent with large cheek pouches', 'name': 'hamster'}, {'frequency': 'c', 'id': 544, 'synset': 'hand_blower.n.01', 'synonyms': ['hair_dryer'], 'def': 'a hand-held electric blower that can blow warm air onto the hair', 'name': 'hair_dryer'}, {'frequency': 'r', 'id': 545, 'synset': 'hand_glass.n.01', 'synonyms': ['hand_glass', 'hand_mirror'], 'def': 'a mirror intended to be held in the hand', 'name': 'hand_glass'}, {'frequency': 'f', 'id': 546, 'synset': 'hand_towel.n.01', 'synonyms': ['hand_towel', 'face_towel'], 'def': 'a small towel used to dry the hands or face', 'name': 'hand_towel'}, {'frequency': 'c', 'id': 547, 'synset': 'handcart.n.01', 'synonyms': ['handcart', 'pushcart', 'hand_truck'], 'def': 'wheeled vehicle that can be pushed by a person', 'name': 'handcart'}, {'frequency': 'r', 'id': 548, 'synset': 'handcuff.n.01', 'synonyms': ['handcuff'], 'def': 'shackle that consists of a metal loop that can be locked around the wrist', 'name': 'handcuff'}, {'frequency': 'c', 'id': 549, 'synset': 'handkerchief.n.01', 'synonyms': ['handkerchief'], 'def': 'a square piece of cloth used for wiping the eyes or nose or as a costume accessory', 'name': 'handkerchief'}, {'frequency': 'f', 'id': 550, 'synset': 'handle.n.01', 'synonyms': ['handle', 'grip', 'handgrip'], 'def': 'the appendage to an object that is designed to be held in order to use or move it', 'name': 'handle'}, {'frequency': 'r', 'id': 551, 'synset': 'handsaw.n.01', 'synonyms': ['handsaw', "carpenter's_saw"], 'def': 'a saw used with one hand for cutting wood', 'name': 'handsaw'}, {'frequency': 'r', 'id': 552, 'synset': 'hardback.n.01', 'synonyms': ['hardback_book', 'hardcover_book'], 'def': 'a book with cardboard or cloth or leather covers', 'name': 'hardback_book'}, {'frequency': 'r', 'id': 553, 'synset': 'harmonium.n.01', 'synonyms': ['harmonium', 'organ_(musical_instrument)', 'reed_organ_(musical_instrument)'], 'def': 'a free-reed instrument in which air is forced through the reeds by bellows', 'name': 'harmonium'}, {'frequency': 'f', 'id': 554, 'synset': 'hat.n.01', 'synonyms': ['hat'], 'def': 'headwear that protects the head from bad weather, sun, or worn for fashion', 'name': 'hat'}, {'frequency': 'r', 'id': 555, 'synset': 'hatbox.n.01', 'synonyms': ['hatbox'], 'def': 'a round piece of luggage for carrying hats', 'name': 'hatbox'}, {'frequency': 'r', 'id': 556, 'synset': 'hatch.n.03', 'synonyms': ['hatch'], 'def': 'a movable barrier covering a hatchway', 'name': 'hatch'}, {'frequency': 'c', 'id': 557, 'synset': 'head_covering.n.01', 'synonyms': ['veil'], 'def': 'a garment that covers the head and face', 'name': 'veil'}, {'frequency': 'f', 'id': 558, 'synset': 'headband.n.01', 'synonyms': ['headband'], 'def': 'a band worn around or over the head', 'name': 'headband'}, {'frequency': 'f', 'id': 559, 'synset': 'headboard.n.01', 'synonyms': ['headboard'], 'def': 'a vertical board or panel forming the head of a bedstead', 'name': 'headboard'}, {'frequency': 'f', 'id': 560, 'synset': 'headlight.n.01', 'synonyms': ['headlight', 'headlamp'], 'def': 'a powerful light with reflector; attached to the front of an automobile or locomotive', 'name': 'headlight'}, {'frequency': 'c', 'id': 561, 'synset': 'headscarf.n.01', 'synonyms': ['headscarf'], 'def': 'a kerchief worn over the head and tied under the chin', 'name': 'headscarf'}, {'frequency': 'r', 'id': 562, 'synset': 'headset.n.01', 'synonyms': ['headset'], 'def': 'receiver consisting of a pair of headphones', 'name': 'headset'}, {'frequency': 'c', 'id': 563, 'synset': 'headstall.n.01', 'synonyms': ['headstall_(for_horses)', 'headpiece_(for_horses)'], 'def': "the band that is the part of a bridle that fits around a horse's head", 'name': 'headstall_(for_horses)'}, {'frequency': 'r', 'id': 564, 'synset': 'hearing_aid.n.02', 'synonyms': ['hearing_aid'], 'def': 'an acoustic device used to direct sound to the ear of a hearing-impaired person', 'name': 'hearing_aid'}, {'frequency': 'c', 'id': 565, 'synset': 'heart.n.02', 'synonyms': ['heart'], 'def': 'a muscular organ; its contractions move the blood through the body', 'name': 'heart'}, {'frequency': 'c', 'id': 566, 'synset': 'heater.n.01', 'synonyms': ['heater', 'warmer'], 'def': 'device that heats water or supplies warmth to a room', 'name': 'heater'}, {'frequency': 'c', 'id': 567, 'synset': 'helicopter.n.01', 'synonyms': ['helicopter'], 'def': 'an aircraft without wings that obtains its lift from the rotation of overhead blades', 'name': 'helicopter'}, {'frequency': 'f', 'id': 568, 'synset': 'helmet.n.02', 'synonyms': ['helmet'], 'def': 'a protective headgear made of hard material to resist blows', 'name': 'helmet'}, {'frequency': 'r', 'id': 569, 'synset': 'heron.n.02', 'synonyms': ['heron'], 'def': 'grey or white wading bird with long neck and long legs and (usually) long bill', 'name': 'heron'}, {'frequency': 'c', 'id': 570, 'synset': 'highchair.n.01', 'synonyms': ['highchair', 'feeding_chair'], 'def': 'a chair for feeding a very young child', 'name': 'highchair'}, {'frequency': 'f', 'id': 571, 'synset': 'hinge.n.01', 'synonyms': ['hinge'], 'def': 'a joint that holds two parts together so that one can swing relative to the other', 'name': 'hinge'}, {'frequency': 'r', 'id': 572, 'synset': 'hippopotamus.n.01', 'synonyms': ['hippopotamus'], 'def': 'massive thick-skinned animal living in or around rivers of tropical Africa', 'name': 'hippopotamus'}, {'frequency': 'r', 'id': 573, 'synset': 'hockey_stick.n.01', 'synonyms': ['hockey_stick'], 'def': 'sports implement consisting of a stick used by hockey players to move the puck', 'name': 'hockey_stick'}, {'frequency': 'c', 'id': 574, 'synset': 'hog.n.03', 'synonyms': ['hog', 'pig'], 'def': 'domestic swine', 'name': 'hog'}, {'frequency': 'f', 'id': 575, 'synset': 'home_plate.n.01', 'synonyms': ['home_plate_(baseball)', 'home_base_(baseball)'], 'def': '(baseball) a rubber slab where the batter stands; it must be touched by a base runner in order to score', 'name': 'home_plate_(baseball)'}, {'frequency': 'c', 'id': 576, 'synset': 'honey.n.01', 'synonyms': ['honey'], 'def': 'a sweet yellow liquid produced by bees', 'name': 'honey'}, {'frequency': 'f', 'id': 577, 'synset': 'hood.n.06', 'synonyms': ['fume_hood', 'exhaust_hood'], 'def': 'metal covering leading to a vent that exhausts smoke or fumes', 'name': 'fume_hood'}, {'frequency': 'f', 'id': 578, 'synset': 'hook.n.05', 'synonyms': ['hook'], 'def': 'a curved or bent implement for suspending or pulling something', 'name': 'hook'}, {'frequency': 'f', 'id': 579, 'synset': 'horse.n.01', 'synonyms': ['horse'], 'def': 'a common horse', 'name': 'horse'}, {'frequency': 'f', 'id': 580, 'synset': 'hose.n.03', 'synonyms': ['hose', 'hosepipe'], 'def': 'a flexible pipe for conveying a liquid or gas', 'name': 'hose'}, {'frequency': 'r', 'id': 581, 'synset': 'hot-air_balloon.n.01', 'synonyms': ['hot-air_balloon'], 'def': 'balloon for travel through the air in a basket suspended below a large bag of heated air', 'name': 'hot-air_balloon'}, {'frequency': 'r', 'id': 582, 'synset': 'hot_plate.n.01', 'synonyms': ['hotplate'], 'def': 'a portable electric appliance for heating or cooking or keeping food warm', 'name': 'hotplate'}, {'frequency': 'c', 'id': 583, 'synset': 'hot_sauce.n.01', 'synonyms': ['hot_sauce'], 'def': 'a pungent peppery sauce', 'name': 'hot_sauce'}, {'frequency': 'r', 'id': 584, 'synset': 'hourglass.n.01', 'synonyms': ['hourglass'], 'def': 'a sandglass timer that runs for sixty minutes', 'name': 'hourglass'}, {'frequency': 'r', 'id': 585, 'synset': 'houseboat.n.01', 'synonyms': ['houseboat'], 'def': 'a barge that is designed and equipped for use as a dwelling', 'name': 'houseboat'}, {'frequency': 'r', 'id': 586, 'synset': 'hummingbird.n.01', 'synonyms': ['hummingbird'], 'def': 'tiny American bird having brilliant iridescent plumage and long slender bills', 'name': 'hummingbird'}, {'frequency': 'r', 'id': 587, 'synset': 'hummus.n.01', 'synonyms': ['hummus', 'humus', 'hommos', 'hoummos', 'humous'], 'def': 'a thick spread made from mashed chickpeas', 'name': 'hummus'}, {'frequency': 'c', 'id': 588, 'synset': 'ice_bear.n.01', 'synonyms': ['polar_bear'], 'def': 'white bear of Arctic regions', 'name': 'polar_bear'}, {'frequency': 'c', 'id': 589, 'synset': 'ice_cream.n.01', 'synonyms': ['icecream'], 'def': 'frozen dessert containing cream and sugar and flavoring', 'name': 'icecream'}, {'frequency': 'r', 'id': 590, 'synset': 'ice_lolly.n.01', 'synonyms': ['popsicle'], 'def': 'ice cream or water ice on a small wooden stick', 'name': 'popsicle'}, {'frequency': 'c', 'id': 591, 'synset': 'ice_maker.n.01', 'synonyms': ['ice_maker'], 'def': 'an appliance included in some electric refrigerators for making ice cubes', 'name': 'ice_maker'}, {'frequency': 'r', 'id': 592, 'synset': 'ice_pack.n.01', 'synonyms': ['ice_pack', 'ice_bag'], 'def': 'a waterproof bag filled with ice: applied to the body (especially the head) to cool or reduce swelling', 'name': 'ice_pack'}, {'frequency': 'r', 'id': 593, 'synset': 'ice_skate.n.01', 'synonyms': ['ice_skate'], 'def': 'skate consisting of a boot with a steel blade fitted to the sole', 'name': 'ice_skate'}, {'frequency': 'r', 'id': 594, 'synset': 'ice_tea.n.01', 'synonyms': ['ice_tea', 'iced_tea'], 'def': 'strong tea served over ice', 'name': 'ice_tea'}, {'frequency': 'c', 'id': 595, 'synset': 'igniter.n.01', 'synonyms': ['igniter', 'ignitor', 'lighter'], 'def': 'a substance or device used to start a fire', 'name': 'igniter'}, {'frequency': 'r', 'id': 596, 'synset': 'incense.n.01', 'synonyms': ['incense'], 'def': 'a substance that produces a fragrant odor when burned', 'name': 'incense'}, {'frequency': 'r', 'id': 597, 'synset': 'inhaler.n.01', 'synonyms': ['inhaler', 'inhalator'], 'def': 'a dispenser that produces a chemical vapor to be inhaled through mouth or nose', 'name': 'inhaler'}, {'frequency': 'c', 'id': 598, 'synset': 'ipod.n.01', 'synonyms': ['iPod'], 'def': 'a pocket-sized device used to play music files', 'name': 'iPod'}, {'frequency': 'c', 'id': 599, 'synset': 'iron.n.04', 'synonyms': ['iron_(for_clothing)', 'smoothing_iron_(for_clothing)'], 'def': 'home appliance consisting of a flat metal base that is heated and used to smooth cloth', 'name': 'iron_(for_clothing)'}, {'frequency': 'r', 'id': 600, 'synset': 'ironing_board.n.01', 'synonyms': ['ironing_board'], 'def': 'narrow padded board on collapsible supports; used for ironing clothes', 'name': 'ironing_board'}, {'frequency': 'f', 'id': 601, 'synset': 'jacket.n.01', 'synonyms': ['jacket'], 'def': 'a waist-length coat', 'name': 'jacket'}, {'frequency': 'r', 'id': 602, 'synset': 'jam.n.01', 'synonyms': ['jam'], 'def': 'preserve of crushed fruit', 'name': 'jam'}, {'frequency': 'f', 'id': 603, 'synset': 'jean.n.01', 'synonyms': ['jean', 'blue_jean', 'denim'], 'def': '(usually plural) close-fitting trousers of heavy denim for manual work or casual wear', 'name': 'jean'}, {'frequency': 'c', 'id': 604, 'synset': 'jeep.n.01', 'synonyms': ['jeep', 'landrover'], 'def': 'a car suitable for traveling over rough terrain', 'name': 'jeep'}, {'frequency': 'r', 'id': 605, 'synset': 'jelly_bean.n.01', 'synonyms': ['jelly_bean', 'jelly_egg'], 'def': 'sugar-glazed jellied candy', 'name': 'jelly_bean'}, {'frequency': 'f', 'id': 606, 'synset': 'jersey.n.03', 'synonyms': ['jersey', 'T-shirt', 'tee_shirt'], 'def': 'a close-fitting pullover shirt', 'name': 'jersey'}, {'frequency': 'c', 'id': 607, 'synset': 'jet.n.01', 'synonyms': ['jet_plane', 'jet-propelled_plane'], 'def': 'an airplane powered by one or more jet engines', 'name': 'jet_plane'}, {'frequency': 'c', 'id': 608, 'synset': 'jewelry.n.01', 'synonyms': ['jewelry', 'jewellery'], 'def': 'an adornment (as a bracelet or ring or necklace) made of precious metals and set with gems (or imitation gems)', 'name': 'jewelry'}, {'frequency': 'r', 'id': 609, 'synset': 'joystick.n.02', 'synonyms': ['joystick'], 'def': 'a control device for computers consisting of a vertical handle that can move freely in two directions', 'name': 'joystick'}, {'frequency': 'r', 'id': 610, 'synset': 'jump_suit.n.01', 'synonyms': ['jumpsuit'], 'def': "one-piece garment fashioned after a parachutist's uniform", 'name': 'jumpsuit'}, {'frequency': 'c', 'id': 611, 'synset': 'kayak.n.01', 'synonyms': ['kayak'], 'def': 'a small canoe consisting of a light frame made watertight with animal skins', 'name': 'kayak'}, {'frequency': 'r', 'id': 612, 'synset': 'keg.n.02', 'synonyms': ['keg'], 'def': 'small cask or barrel', 'name': 'keg'}, {'frequency': 'r', 'id': 613, 'synset': 'kennel.n.01', 'synonyms': ['kennel', 'doghouse'], 'def': 'outbuilding that serves as a shelter for a dog', 'name': 'kennel'}, {'frequency': 'c', 'id': 614, 'synset': 'kettle.n.01', 'synonyms': ['kettle', 'boiler'], 'def': 'a metal pot for stewing or boiling; usually has a lid', 'name': 'kettle'}, {'frequency': 'f', 'id': 615, 'synset': 'key.n.01', 'synonyms': ['key'], 'def': 'metal instrument used to unlock a lock', 'name': 'key'}, {'frequency': 'r', 'id': 616, 'synset': 'keycard.n.01', 'synonyms': ['keycard'], 'def': 'a plastic card used to gain access typically to a door', 'name': 'keycard'}, {'frequency': 'r', 'id': 617, 'synset': 'kilt.n.01', 'synonyms': ['kilt'], 'def': 'a knee-length pleated tartan skirt worn by men as part of the traditional dress in the Highlands of northern Scotland', 'name': 'kilt'}, {'frequency': 'c', 'id': 618, 'synset': 'kimono.n.01', 'synonyms': ['kimono'], 'def': 'a loose robe; imitated from robes originally worn by Japanese', 'name': 'kimono'}, {'frequency': 'f', 'id': 619, 'synset': 'kitchen_sink.n.01', 'synonyms': ['kitchen_sink'], 'def': 'a sink in a kitchen', 'name': 'kitchen_sink'}, {'frequency': 'c', 'id': 620, 'synset': 'kitchen_table.n.01', 'synonyms': ['kitchen_table'], 'def': 'a table in the kitchen', 'name': 'kitchen_table'}, {'frequency': 'f', 'id': 621, 'synset': 'kite.n.03', 'synonyms': ['kite'], 'def': 'plaything consisting of a light frame covered with tissue paper; flown in wind at end of a string', 'name': 'kite'}, {'frequency': 'c', 'id': 622, 'synset': 'kitten.n.01', 'synonyms': ['kitten', 'kitty'], 'def': 'young domestic cat', 'name': 'kitten'}, {'frequency': 'c', 'id': 623, 'synset': 'kiwi.n.03', 'synonyms': ['kiwi_fruit'], 'def': 'fuzzy brown egg-shaped fruit with slightly tart green flesh', 'name': 'kiwi_fruit'}, {'frequency': 'f', 'id': 624, 'synset': 'knee_pad.n.01', 'synonyms': ['knee_pad'], 'def': 'protective garment consisting of a pad worn by football or baseball or hockey players', 'name': 'knee_pad'}, {'frequency': 'f', 'id': 625, 'synset': 'knife.n.01', 'synonyms': ['knife'], 'def': 'tool with a blade and point used as a cutting instrument', 'name': 'knife'}, {'frequency': 'r', 'id': 626, 'synset': 'knight.n.02', 'synonyms': ['knight_(chess_piece)', 'horse_(chess_piece)'], 'def': 'a chess game piece shaped to resemble the head of a horse', 'name': 'knight_(chess_piece)'}, {'frequency': 'r', 'id': 627, 'synset': 'knitting_needle.n.01', 'synonyms': ['knitting_needle'], 'def': 'needle consisting of a slender rod with pointed ends; usually used in pairs', 'name': 'knitting_needle'}, {'frequency': 'f', 'id': 628, 'synset': 'knob.n.02', 'synonyms': ['knob'], 'def': 'a round handle often found on a door', 'name': 'knob'}, {'frequency': 'r', 'id': 629, 'synset': 'knocker.n.05', 'synonyms': ['knocker_(on_a_door)', 'doorknocker'], 'def': 'a device (usually metal and ornamental) attached by a hinge to a door', 'name': 'knocker_(on_a_door)'}, {'frequency': 'r', 'id': 630, 'synset': 'koala.n.01', 'synonyms': ['koala', 'koala_bear'], 'def': 'sluggish tailless Australian marsupial with grey furry ears and coat', 'name': 'koala'}, {'frequency': 'r', 'id': 631, 'synset': 'lab_coat.n.01', 'synonyms': ['lab_coat', 'laboratory_coat'], 'def': 'a light coat worn to protect clothing from substances used while working in a laboratory', 'name': 'lab_coat'}, {'frequency': 'f', 'id': 632, 'synset': 'ladder.n.01', 'synonyms': ['ladder'], 'def': 'steps consisting of two parallel members connected by rungs', 'name': 'ladder'}, {'frequency': 'c', 'id': 633, 'synset': 'ladle.n.01', 'synonyms': ['ladle'], 'def': 'a spoon-shaped vessel with a long handle frequently used to transfer liquids', 'name': 'ladle'}, {'frequency': 'r', 'id': 634, 'synset': 'ladybug.n.01', 'synonyms': ['ladybug', 'ladybeetle', 'ladybird_beetle'], 'def': 'small round bright-colored and spotted beetle, typically red and black', 'name': 'ladybug'}, {'frequency': 'c', 'id': 635, 'synset': 'lamb.n.01', 'synonyms': ['lamb_(animal)'], 'def': 'young sheep', 'name': 'lamb_(animal)'}, {'frequency': 'r', 'id': 636, 'synset': 'lamb_chop.n.01', 'synonyms': ['lamb-chop', 'lambchop'], 'def': 'chop cut from a lamb', 'name': 'lamb-chop'}, {'frequency': 'f', 'id': 637, 'synset': 'lamp.n.02', 'synonyms': ['lamp'], 'def': 'a piece of furniture holding one or more electric light bulbs', 'name': 'lamp'}, {'frequency': 'f', 'id': 638, 'synset': 'lamppost.n.01', 'synonyms': ['lamppost'], 'def': 'a metal post supporting an outdoor lamp (such as a streetlight)', 'name': 'lamppost'}, {'frequency': 'f', 'id': 639, 'synset': 'lampshade.n.01', 'synonyms': ['lampshade'], 'def': 'a protective ornamental shade used to screen a light bulb from direct view', 'name': 'lampshade'}, {'frequency': 'c', 'id': 640, 'synset': 'lantern.n.01', 'synonyms': ['lantern'], 'def': 'light in a transparent protective case', 'name': 'lantern'}, {'frequency': 'f', 'id': 641, 'synset': 'lanyard.n.02', 'synonyms': ['lanyard', 'laniard'], 'def': 'a cord worn around the neck to hold a knife or whistle, etc.', 'name': 'lanyard'}, {'frequency': 'f', 'id': 642, 'synset': 'laptop.n.01', 'synonyms': ['laptop_computer', 'notebook_computer'], 'def': 'a portable computer small enough to use in your lap', 'name': 'laptop_computer'}, {'frequency': 'r', 'id': 643, 'synset': 'lasagna.n.01', 'synonyms': ['lasagna', 'lasagne'], 'def': 'baked dish of layers of lasagna pasta with sauce and cheese and meat or vegetables', 'name': 'lasagna'}, {'frequency': 'c', 'id': 644, 'synset': 'latch.n.02', 'synonyms': ['latch'], 'def': 'a bar that can be lowered or slid into a groove to fasten a door or gate', 'name': 'latch'}, {'frequency': 'r', 'id': 645, 'synset': 'lawn_mower.n.01', 'synonyms': ['lawn_mower'], 'def': 'garden tool for mowing grass on lawns', 'name': 'lawn_mower'}, {'frequency': 'r', 'id': 646, 'synset': 'leather.n.01', 'synonyms': ['leather'], 'def': 'an animal skin made smooth and flexible by removing the hair and then tanning', 'name': 'leather'}, {'frequency': 'c', 'id': 647, 'synset': 'legging.n.01', 'synonyms': ['legging_(clothing)', 'leging_(clothing)', 'leg_covering'], 'def': 'a garment covering the leg (usually extending from the knee to the ankle)', 'name': 'legging_(clothing)'}, {'frequency': 'c', 'id': 648, 'synset': 'lego.n.01', 'synonyms': ['Lego', 'Lego_set'], 'def': "a child's plastic construction set for making models from blocks", 'name': 'Lego'}, {'frequency': 'f', 'id': 649, 'synset': 'lemon.n.01', 'synonyms': ['lemon'], 'def': 'yellow oval fruit with juicy acidic flesh', 'name': 'lemon'}, {'frequency': 'r', 'id': 650, 'synset': 'lemonade.n.01', 'synonyms': ['lemonade'], 'def': 'sweetened beverage of diluted lemon juice', 'name': 'lemonade'}, {'frequency': 'f', 'id': 651, 'synset': 'lettuce.n.02', 'synonyms': ['lettuce'], 'def': 'leafy plant commonly eaten in salad or on sandwiches', 'name': 'lettuce'}, {'frequency': 'f', 'id': 652, 'synset': 'license_plate.n.01', 'synonyms': ['license_plate', 'numberplate'], 'def': "a plate mounted on the front and back of car and bearing the car's registration number", 'name': 'license_plate'}, {'frequency': 'f', 'id': 653, 'synset': 'life_buoy.n.01', 'synonyms': ['life_buoy', 'lifesaver', 'life_belt', 'life_ring'], 'def': 'a ring-shaped life preserver used to prevent drowning (NOT a life-jacket or vest)', 'name': 'life_buoy'}, {'frequency': 'f', 'id': 654, 'synset': 'life_jacket.n.01', 'synonyms': ['life_jacket', 'life_vest'], 'def': 'life preserver consisting of a sleeveless jacket of buoyant or inflatable design', 'name': 'life_jacket'}, {'frequency': 'f', 'id': 655, 'synset': 'light_bulb.n.01', 'synonyms': ['lightbulb'], 'def': 'glass bulb or tube shaped electric device that emits light (DO NOT MARK LAMPS AS A WHOLE)', 'name': 'lightbulb'}, {'frequency': 'r', 'id': 656, 'synset': 'lightning_rod.n.02', 'synonyms': ['lightning_rod', 'lightning_conductor'], 'def': 'a metallic conductor that is attached to a high point and leads to the ground', 'name': 'lightning_rod'}, {'frequency': 'c', 'id': 657, 'synset': 'lime.n.06', 'synonyms': ['lime'], 'def': 'the green acidic fruit of any of various lime trees', 'name': 'lime'}, {'frequency': 'r', 'id': 658, 'synset': 'limousine.n.01', 'synonyms': ['limousine'], 'def': 'long luxurious car; usually driven by a chauffeur', 'name': 'limousine'}, {'frequency': 'r', 'id': 659, 'synset': 'linen.n.02', 'synonyms': ['linen_paper'], 'def': 'a high-quality paper made of linen fibers or with a linen finish', 'name': 'linen_paper'}, {'frequency': 'c', 'id': 660, 'synset': 'lion.n.01', 'synonyms': ['lion'], 'def': 'large gregarious predatory cat of Africa and India', 'name': 'lion'}, {'frequency': 'c', 'id': 661, 'synset': 'lip_balm.n.01', 'synonyms': ['lip_balm'], 'def': 'a balm applied to the lips', 'name': 'lip_balm'}, {'frequency': 'c', 'id': 662, 'synset': 'lipstick.n.01', 'synonyms': ['lipstick', 'lip_rouge'], 'def': 'makeup that is used to color the lips', 'name': 'lipstick'}, {'frequency': 'r', 'id': 663, 'synset': 'liquor.n.01', 'synonyms': ['liquor', 'spirits', 'hard_liquor', 'liqueur', 'cordial'], 'def': 'an alcoholic beverage that is distilled rather than fermented', 'name': 'liquor'}, {'frequency': 'r', 'id': 664, 'synset': 'lizard.n.01', 'synonyms': ['lizard'], 'def': 'a reptile with usually two pairs of legs and a tapering tail', 'name': 'lizard'}, {'frequency': 'r', 'id': 665, 'synset': 'loafer.n.02', 'synonyms': ['Loafer_(type_of_shoe)'], 'def': 'a low leather step-in shoe', 'name': 'Loafer_(type_of_shoe)'}, {'frequency': 'f', 'id': 666, 'synset': 'log.n.01', 'synonyms': ['log'], 'def': 'a segment of the trunk of a tree when stripped of branches', 'name': 'log'}, {'frequency': 'c', 'id': 667, 'synset': 'lollipop.n.02', 'synonyms': ['lollipop'], 'def': 'hard candy on a stick', 'name': 'lollipop'}, {'frequency': 'c', 'id': 668, 'synset': 'lotion.n.01', 'synonyms': ['lotion'], 'def': 'any of various cosmetic preparations that are applied to the skin', 'name': 'lotion'}, {'frequency': 'f', 'id': 669, 'synset': 'loudspeaker.n.01', 'synonyms': ['speaker_(stero_equipment)'], 'def': 'electronic device that produces sound often as part of a stereo system', 'name': 'speaker_(stero_equipment)'}, {'frequency': 'c', 'id': 670, 'synset': 'love_seat.n.01', 'synonyms': ['loveseat'], 'def': 'small sofa that seats two people', 'name': 'loveseat'}, {'frequency': 'r', 'id': 671, 'synset': 'machine_gun.n.01', 'synonyms': ['machine_gun'], 'def': 'a rapidly firing automatic gun', 'name': 'machine_gun'}, {'frequency': 'f', 'id': 672, 'synset': 'magazine.n.02', 'synonyms': ['magazine'], 'def': 'a paperback periodic publication', 'name': 'magazine'}, {'frequency': 'f', 'id': 673, 'synset': 'magnet.n.01', 'synonyms': ['magnet'], 'def': 'a device that attracts iron and produces a magnetic field', 'name': 'magnet'}, {'frequency': 'r', 'id': 674, 'synset': 'mail_slot.n.01', 'synonyms': ['mail_slot'], 'def': 'a slot (usually in a door) through which mail can be delivered', 'name': 'mail_slot'}, {'frequency': 'c', 'id': 675, 'synset': 'mailbox.n.01', 'synonyms': ['mailbox_(at_home)', 'letter_box_(at_home)'], 'def': 'a private box for delivery of mail', 'name': 'mailbox_(at_home)'}, {'frequency': 'r', 'id': 676, 'synset': 'mallet.n.01', 'synonyms': ['mallet'], 'def': 'a sports implement with a long handle and a hammer-like head used to hit a ball', 'name': 'mallet'}, {'frequency': 'r', 'id': 677, 'synset': 'mammoth.n.01', 'synonyms': ['mammoth'], 'def': 'any of numerous extinct elephants widely distributed in the Pleistocene', 'name': 'mammoth'}, {'frequency': 'c', 'id': 678, 'synset': 'mandarin.n.05', 'synonyms': ['mandarin_orange'], 'def': 'a somewhat flat reddish-orange loose skinned citrus of China', 'name': 'mandarin_orange'}, {'frequency': 'c', 'id': 679, 'synset': 'manger.n.01', 'synonyms': ['manger', 'trough'], 'def': 'a container (usually in a barn or stable) from which cattle or horses feed', 'name': 'manger'}, {'frequency': 'f', 'id': 680, 'synset': 'manhole.n.01', 'synonyms': ['manhole'], 'def': 'a hole (usually with a flush cover) through which a person can gain access to an underground structure', 'name': 'manhole'}, {'frequency': 'c', 'id': 681, 'synset': 'map.n.01', 'synonyms': ['map'], 'def': "a diagrammatic representation of the earth's surface (or part of it)", 'name': 'map'}, {'frequency': 'c', 'id': 682, 'synset': 'marker.n.03', 'synonyms': ['marker'], 'def': 'a writing implement for making a mark', 'name': 'marker'}, {'frequency': 'r', 'id': 683, 'synset': 'martini.n.01', 'synonyms': ['martini'], 'def': 'a cocktail made of gin (or vodka) with dry vermouth', 'name': 'martini'}, {'frequency': 'r', 'id': 684, 'synset': 'mascot.n.01', 'synonyms': ['mascot'], 'def': 'a person or animal that is adopted by a team or other group as a symbolic figure', 'name': 'mascot'}, {'frequency': 'c', 'id': 685, 'synset': 'mashed_potato.n.01', 'synonyms': ['mashed_potato'], 'def': 'potato that has been peeled and boiled and then mashed', 'name': 'mashed_potato'}, {'frequency': 'r', 'id': 686, 'synset': 'masher.n.02', 'synonyms': ['masher'], 'def': 'a kitchen utensil used for mashing (e.g. potatoes)', 'name': 'masher'}, {'frequency': 'f', 'id': 687, 'synset': 'mask.n.04', 'synonyms': ['mask', 'facemask'], 'def': 'a protective covering worn over the face', 'name': 'mask'}, {'frequency': 'f', 'id': 688, 'synset': 'mast.n.01', 'synonyms': ['mast'], 'def': 'a vertical spar for supporting sails', 'name': 'mast'}, {'frequency': 'c', 'id': 689, 'synset': 'mat.n.03', 'synonyms': ['mat_(gym_equipment)', 'gym_mat'], 'def': 'sports equipment consisting of a piece of thick padding on the floor for gymnastics', 'name': 'mat_(gym_equipment)'}, {'frequency': 'r', 'id': 690, 'synset': 'matchbox.n.01', 'synonyms': ['matchbox'], 'def': 'a box for holding matches', 'name': 'matchbox'}, {'frequency': 'f', 'id': 691, 'synset': 'mattress.n.01', 'synonyms': ['mattress'], 'def': 'a thick pad filled with resilient material used as a bed or part of a bed', 'name': 'mattress'}, {'frequency': 'c', 'id': 692, 'synset': 'measuring_cup.n.01', 'synonyms': ['measuring_cup'], 'def': 'graduated cup used to measure liquid or granular ingredients', 'name': 'measuring_cup'}, {'frequency': 'c', 'id': 693, 'synset': 'measuring_stick.n.01', 'synonyms': ['measuring_stick', 'ruler_(measuring_stick)', 'measuring_rod'], 'def': 'measuring instrument having a sequence of marks at regular intervals', 'name': 'measuring_stick'}, {'frequency': 'c', 'id': 694, 'synset': 'meatball.n.01', 'synonyms': ['meatball'], 'def': 'ground meat formed into a ball and fried or simmered in broth', 'name': 'meatball'}, {'frequency': 'c', 'id': 695, 'synset': 'medicine.n.02', 'synonyms': ['medicine'], 'def': 'something that treats or prevents or alleviates the symptoms of disease', 'name': 'medicine'}, {'frequency': 'r', 'id': 696, 'synset': 'melon.n.01', 'synonyms': ['melon'], 'def': 'fruit of the gourd family having a hard rind and sweet juicy flesh', 'name': 'melon'}, {'frequency': 'f', 'id': 697, 'synset': 'microphone.n.01', 'synonyms': ['microphone'], 'def': 'device for converting sound waves into electrical energy', 'name': 'microphone'}, {'frequency': 'r', 'id': 698, 'synset': 'microscope.n.01', 'synonyms': ['microscope'], 'def': 'magnifier of the image of small objects', 'name': 'microscope'}, {'frequency': 'f', 'id': 699, 'synset': 'microwave.n.02', 'synonyms': ['microwave_oven'], 'def': 'kitchen appliance that cooks food by passing an electromagnetic wave through it', 'name': 'microwave_oven'}, {'frequency': 'r', 'id': 700, 'synset': 'milestone.n.01', 'synonyms': ['milestone', 'milepost'], 'def': 'stone post at side of a road to show distances', 'name': 'milestone'}, {'frequency': 'c', 'id': 701, 'synset': 'milk.n.01', 'synonyms': ['milk'], 'def': 'a white nutritious liquid secreted by mammals and used as food by human beings', 'name': 'milk'}, {'frequency': 'f', 'id': 702, 'synset': 'minivan.n.01', 'synonyms': ['minivan'], 'def': 'a small box-shaped passenger van', 'name': 'minivan'}, {'frequency': 'r', 'id': 703, 'synset': 'mint.n.05', 'synonyms': ['mint_candy'], 'def': 'a candy that is flavored with a mint oil', 'name': 'mint_candy'}, {'frequency': 'f', 'id': 704, 'synset': 'mirror.n.01', 'synonyms': ['mirror'], 'def': 'polished surface that forms images by reflecting light', 'name': 'mirror'}, {'frequency': 'c', 'id': 705, 'synset': 'mitten.n.01', 'synonyms': ['mitten'], 'def': 'glove that encases the thumb separately and the other four fingers together', 'name': 'mitten'}, {'frequency': 'c', 'id': 706, 'synset': 'mixer.n.04', 'synonyms': ['mixer_(kitchen_tool)', 'stand_mixer'], 'def': 'a kitchen utensil that is used for mixing foods', 'name': 'mixer_(kitchen_tool)'}, {'frequency': 'c', 'id': 707, 'synset': 'money.n.03', 'synonyms': ['money'], 'def': 'the official currency issued by a government or national bank', 'name': 'money'}, {'frequency': 'f', 'id': 708, 'synset': 'monitor.n.04', 'synonyms': ['monitor_(computer_equipment) computer_monitor'], 'def': 'a computer monitor', 'name': 'monitor_(computer_equipment) computer_monitor'}, {'frequency': 'c', 'id': 709, 'synset': 'monkey.n.01', 'synonyms': ['monkey'], 'def': 'any of various long-tailed primates', 'name': 'monkey'}, {'frequency': 'f', 'id': 710, 'synset': 'motor.n.01', 'synonyms': ['motor'], 'def': 'machine that converts other forms of energy into mechanical energy and so imparts motion', 'name': 'motor'}, {'frequency': 'f', 'id': 711, 'synset': 'motor_scooter.n.01', 'synonyms': ['motor_scooter', 'scooter'], 'def': 'a wheeled vehicle with small wheels and a low-powered engine', 'name': 'motor_scooter'}, {'frequency': 'r', 'id': 712, 'synset': 'motor_vehicle.n.01', 'synonyms': ['motor_vehicle', 'automotive_vehicle'], 'def': 'a self-propelled wheeled vehicle that does not run on rails', 'name': 'motor_vehicle'}, {'frequency': 'r', 'id': 713, 'synset': 'motorboat.n.01', 'synonyms': ['motorboat', 'powerboat'], 'def': 'a boat propelled by an internal-combustion engine', 'name': 'motorboat'}, {'frequency': 'f', 'id': 714, 'synset': 'motorcycle.n.01', 'synonyms': ['motorcycle'], 'def': 'a motor vehicle with two wheels and a strong frame', 'name': 'motorcycle'}, {'frequency': 'f', 'id': 715, 'synset': 'mound.n.01', 'synonyms': ['mound_(baseball)', "pitcher's_mound"], 'def': '(baseball) the slight elevation on which the pitcher stands', 'name': 'mound_(baseball)'}, {'frequency': 'r', 'id': 716, 'synset': 'mouse.n.01', 'synonyms': ['mouse_(animal_rodent)'], 'def': 'a small rodent with pointed snouts and small ears on elongated bodies with slender usually hairless tails', 'name': 'mouse_(animal_rodent)'}, {'frequency': 'f', 'id': 717, 'synset': 'mouse.n.04', 'synonyms': ['mouse_(computer_equipment)', 'computer_mouse'], 'def': 'a computer input device that controls an on-screen pointer', 'name': 'mouse_(computer_equipment)'}, {'frequency': 'f', 'id': 718, 'synset': 'mousepad.n.01', 'synonyms': ['mousepad'], 'def': 'a small portable pad that provides an operating surface for a computer mouse', 'name': 'mousepad'}, {'frequency': 'c', 'id': 719, 'synset': 'muffin.n.01', 'synonyms': ['muffin'], 'def': 'a sweet quick bread baked in a cup-shaped pan', 'name': 'muffin'}, {'frequency': 'f', 'id': 720, 'synset': 'mug.n.04', 'synonyms': ['mug'], 'def': 'with handle and usually cylindrical', 'name': 'mug'}, {'frequency': 'f', 'id': 721, 'synset': 'mushroom.n.02', 'synonyms': ['mushroom'], 'def': 'a common mushroom', 'name': 'mushroom'}, {'frequency': 'r', 'id': 722, 'synset': 'music_stool.n.01', 'synonyms': ['music_stool', 'piano_stool'], 'def': 'a stool for piano players; usually adjustable in height', 'name': 'music_stool'}, {'frequency': 'r', 'id': 723, 'synset': 'musical_instrument.n.01', 'synonyms': ['musical_instrument', 'instrument_(musical)'], 'def': 'any of various devices or contrivances that can be used to produce musical tones or sounds', 'name': 'musical_instrument'}, {'frequency': 'r', 'id': 724, 'synset': 'nailfile.n.01', 'synonyms': ['nailfile'], 'def': 'a small flat file for shaping the nails', 'name': 'nailfile'}, {'frequency': 'r', 'id': 725, 'synset': 'nameplate.n.01', 'synonyms': ['nameplate'], 'def': 'a plate bearing a name', 'name': 'nameplate'}, {'frequency': 'f', 'id': 726, 'synset': 'napkin.n.01', 'synonyms': ['napkin', 'table_napkin', 'serviette'], 'def': 'a small piece of table linen or paper that is used to wipe the mouth and to cover the lap in order to protect clothing', 'name': 'napkin'}, {'frequency': 'r', 'id': 727, 'synset': 'neckerchief.n.01', 'synonyms': ['neckerchief'], 'def': 'a kerchief worn around the neck', 'name': 'neckerchief'}, {'frequency': 'f', 'id': 728, 'synset': 'necklace.n.01', 'synonyms': ['necklace'], 'def': 'jewelry consisting of a cord or chain (often bearing gems) worn about the neck as an ornament', 'name': 'necklace'}, {'frequency': 'f', 'id': 729, 'synset': 'necktie.n.01', 'synonyms': ['necktie', 'tie_(necktie)'], 'def': 'neckwear consisting of a long narrow piece of material worn under a collar and tied in knot at the front', 'name': 'necktie'}, {'frequency': 'r', 'id': 730, 'synset': 'needle.n.03', 'synonyms': ['needle'], 'def': 'a sharp pointed implement (usually metal)', 'name': 'needle'}, {'frequency': 'c', 'id': 731, 'synset': 'nest.n.01', 'synonyms': ['nest'], 'def': 'a structure in which animals lay eggs or give birth to their young', 'name': 'nest'}, {'frequency': 'r', 'id': 732, 'synset': 'newsstand.n.01', 'synonyms': ['newsstand'], 'def': 'a stall where newspapers and other periodicals are sold', 'name': 'newsstand'}, {'frequency': 'c', 'id': 733, 'synset': 'nightwear.n.01', 'synonyms': ['nightshirt', 'nightwear', 'sleepwear', 'nightclothes'], 'def': 'garments designed to be worn in bed', 'name': 'nightshirt'}, {'frequency': 'r', 'id': 734, 'synset': 'nosebag.n.01', 'synonyms': ['nosebag_(for_animals)', 'feedbag'], 'def': 'a canvas bag that is used to feed an animal (such as a horse); covers the muzzle and fastens at the top of the head', 'name': 'nosebag_(for_animals)'}, {'frequency': 'r', 'id': 735, 'synset': 'noseband.n.01', 'synonyms': ['noseband_(for_animals)', 'nosepiece_(for_animals)'], 'def': "a strap that is the part of a bridle that goes over the animal's nose", 'name': 'noseband_(for_animals)'}, {'frequency': 'f', 'id': 736, 'synset': 'notebook.n.01', 'synonyms': ['notebook'], 'def': 'a book with blank pages for recording notes or memoranda', 'name': 'notebook'}, {'frequency': 'c', 'id': 737, 'synset': 'notepad.n.01', 'synonyms': ['notepad'], 'def': 'a pad of paper for keeping notes', 'name': 'notepad'}, {'frequency': 'c', 'id': 738, 'synset': 'nut.n.03', 'synonyms': ['nut'], 'def': 'a small metal block (usually square or hexagonal) with internal screw thread to be fitted onto a bolt', 'name': 'nut'}, {'frequency': 'r', 'id': 739, 'synset': 'nutcracker.n.01', 'synonyms': ['nutcracker'], 'def': 'a hand tool used to crack nuts open', 'name': 'nutcracker'}, {'frequency': 'c', 'id': 740, 'synset': 'oar.n.01', 'synonyms': ['oar'], 'def': 'an implement used to propel or steer a boat', 'name': 'oar'}, {'frequency': 'r', 'id': 741, 'synset': 'octopus.n.01', 'synonyms': ['octopus_(food)'], 'def': 'tentacles of octopus prepared as food', 'name': 'octopus_(food)'}, {'frequency': 'r', 'id': 742, 'synset': 'octopus.n.02', 'synonyms': ['octopus_(animal)'], 'def': 'bottom-living cephalopod having a soft oval body with eight long tentacles', 'name': 'octopus_(animal)'}, {'frequency': 'c', 'id': 743, 'synset': 'oil_lamp.n.01', 'synonyms': ['oil_lamp', 'kerosene_lamp', 'kerosine_lamp'], 'def': 'a lamp that burns oil (as kerosine) for light', 'name': 'oil_lamp'}, {'frequency': 'c', 'id': 744, 'synset': 'olive_oil.n.01', 'synonyms': ['olive_oil'], 'def': 'oil from olives', 'name': 'olive_oil'}, {'frequency': 'r', 'id': 745, 'synset': 'omelet.n.01', 'synonyms': ['omelet', 'omelette'], 'def': 'beaten eggs cooked until just set; may be folded around e.g. ham or cheese or jelly', 'name': 'omelet'}, {'frequency': 'f', 'id': 746, 'synset': 'onion.n.01', 'synonyms': ['onion'], 'def': 'the bulb of an onion plant', 'name': 'onion'}, {'frequency': 'f', 'id': 747, 'synset': 'orange.n.01', 'synonyms': ['orange_(fruit)'], 'def': 'orange (FRUIT of an orange tree)', 'name': 'orange_(fruit)'}, {'frequency': 'c', 'id': 748, 'synset': 'orange_juice.n.01', 'synonyms': ['orange_juice'], 'def': 'bottled or freshly squeezed juice of oranges', 'name': 'orange_juice'}, {'frequency': 'r', 'id': 749, 'synset': 'oregano.n.01', 'synonyms': ['oregano', 'marjoram'], 'def': 'aromatic Eurasian perennial herb used in cooking and baking', 'name': 'oregano'}, {'frequency': 'c', 'id': 750, 'synset': 'ostrich.n.02', 'synonyms': ['ostrich'], 'def': 'fast-running African flightless bird with two-toed feet; largest living bird', 'name': 'ostrich'}, {'frequency': 'c', 'id': 751, 'synset': 'ottoman.n.03', 'synonyms': ['ottoman', 'pouf', 'pouffe', 'hassock'], 'def': 'thick cushion used as a seat', 'name': 'ottoman'}, {'frequency': 'c', 'id': 752, 'synset': 'overall.n.01', 'synonyms': ['overalls_(clothing)'], 'def': 'work clothing consisting of denim trousers usually with a bib and shoulder straps', 'name': 'overalls_(clothing)'}, {'frequency': 'c', 'id': 753, 'synset': 'owl.n.01', 'synonyms': ['owl'], 'def': 'nocturnal bird of prey with hawk-like beak and claws and large head with front-facing eyes', 'name': 'owl'}, {'frequency': 'c', 'id': 754, 'synset': 'packet.n.03', 'synonyms': ['packet'], 'def': 'a small package or bundle', 'name': 'packet'}, {'frequency': 'r', 'id': 755, 'synset': 'pad.n.03', 'synonyms': ['inkpad', 'inking_pad', 'stamp_pad'], 'def': 'absorbent material saturated with ink used to transfer ink evenly to a rubber stamp', 'name': 'inkpad'}, {'frequency': 'c', 'id': 756, 'synset': 'pad.n.04', 'synonyms': ['pad'], 'def': 'a flat mass of soft material used for protection, stuffing, or comfort', 'name': 'pad'}, {'frequency': 'c', 'id': 757, 'synset': 'paddle.n.04', 'synonyms': ['paddle', 'boat_paddle'], 'def': 'a short light oar used without an oarlock to propel a canoe or small boat', 'name': 'paddle'}, {'frequency': 'c', 'id': 758, 'synset': 'padlock.n.01', 'synonyms': ['padlock'], 'def': 'a detachable, portable lock', 'name': 'padlock'}, {'frequency': 'r', 'id': 759, 'synset': 'paintbox.n.01', 'synonyms': ['paintbox'], 'def': "a box containing a collection of cubes or tubes of artists' paint", 'name': 'paintbox'}, {'frequency': 'c', 'id': 760, 'synset': 'paintbrush.n.01', 'synonyms': ['paintbrush'], 'def': 'a brush used as an applicator to apply paint', 'name': 'paintbrush'}, {'frequency': 'f', 'id': 761, 'synset': 'painting.n.01', 'synonyms': ['painting'], 'def': 'graphic art consisting of an artistic composition made by applying paints to a surface', 'name': 'painting'}, {'frequency': 'c', 'id': 762, 'synset': 'pajama.n.02', 'synonyms': ['pajamas', 'pyjamas'], 'def': 'loose-fitting nightclothes worn for sleeping or lounging', 'name': 'pajamas'}, {'frequency': 'c', 'id': 763, 'synset': 'palette.n.02', 'synonyms': ['palette', 'pallet'], 'def': 'board that provides a flat surface on which artists mix paints and the range of colors used', 'name': 'palette'}, {'frequency': 'f', 'id': 764, 'synset': 'pan.n.01', 'synonyms': ['pan_(for_cooking)', 'cooking_pan'], 'def': 'cooking utensil consisting of a wide metal vessel', 'name': 'pan_(for_cooking)'}, {'frequency': 'r', 'id': 765, 'synset': 'pan.n.03', 'synonyms': ['pan_(metal_container)'], 'def': 'shallow container made of metal', 'name': 'pan_(metal_container)'}, {'frequency': 'c', 'id': 766, 'synset': 'pancake.n.01', 'synonyms': ['pancake'], 'def': 'a flat cake of thin batter fried on both sides on a griddle', 'name': 'pancake'}, {'frequency': 'r', 'id': 767, 'synset': 'pantyhose.n.01', 'synonyms': ['pantyhose'], 'def': "a woman's tights consisting of underpants and stockings", 'name': 'pantyhose'}, {'frequency': 'r', 'id': 768, 'synset': 'papaya.n.02', 'synonyms': ['papaya'], 'def': 'large oval melon-like tropical fruit with yellowish flesh', 'name': 'papaya'}, {'frequency': 'r', 'id': 769, 'synset': 'paper_clip.n.01', 'synonyms': ['paperclip'], 'def': 'a wire or plastic clip for holding sheets of paper together', 'name': 'paperclip'}, {'frequency': 'f', 'id': 770, 'synset': 'paper_plate.n.01', 'synonyms': ['paper_plate'], 'def': 'a disposable plate made of cardboard', 'name': 'paper_plate'}, {'frequency': 'f', 'id': 771, 'synset': 'paper_towel.n.01', 'synonyms': ['paper_towel'], 'def': 'a disposable towel made of absorbent paper', 'name': 'paper_towel'}, {'frequency': 'r', 'id': 772, 'synset': 'paperback_book.n.01', 'synonyms': ['paperback_book', 'paper-back_book', 'softback_book', 'soft-cover_book'], 'def': 'a book with paper covers', 'name': 'paperback_book'}, {'frequency': 'r', 'id': 773, 'synset': 'paperweight.n.01', 'synonyms': ['paperweight'], 'def': 'a weight used to hold down a stack of papers', 'name': 'paperweight'}, {'frequency': 'c', 'id': 774, 'synset': 'parachute.n.01', 'synonyms': ['parachute'], 'def': 'rescue equipment consisting of a device that fills with air and retards your fall', 'name': 'parachute'}, {'frequency': 'r', 'id': 775, 'synset': 'parakeet.n.01', 'synonyms': ['parakeet', 'parrakeet', 'parroket', 'paraquet', 'paroquet', 'parroquet'], 'def': 'any of numerous small slender long-tailed parrots', 'name': 'parakeet'}, {'frequency': 'c', 'id': 776, 'synset': 'parasail.n.01', 'synonyms': ['parasail_(sports)'], 'def': 'parachute that will lift a person up into the air when it is towed by a motorboat or a car', 'name': 'parasail_(sports)'}, {'frequency': 'r', 'id': 777, 'synset': 'parchment.n.01', 'synonyms': ['parchment'], 'def': 'a superior paper resembling sheepskin', 'name': 'parchment'}, {'frequency': 'r', 'id': 778, 'synset': 'parka.n.01', 'synonyms': ['parka', 'anorak'], 'def': "a kind of heavy jacket (`windcheater' is a British term)", 'name': 'parka'}, {'frequency': 'f', 'id': 779, 'synset': 'parking_meter.n.01', 'synonyms': ['parking_meter'], 'def': 'a coin-operated timer located next to a parking space', 'name': 'parking_meter'}, {'frequency': 'c', 'id': 780, 'synset': 'parrot.n.01', 'synonyms': ['parrot'], 'def': 'usually brightly colored tropical birds with short hooked beaks and the ability to mimic sounds', 'name': 'parrot'}, {'frequency': 'c', 'id': 781, 'synset': 'passenger_car.n.01', 'synonyms': ['passenger_car_(part_of_a_train)', 'coach_(part_of_a_train)'], 'def': 'a railcar where passengers ride', 'name': 'passenger_car_(part_of_a_train)'}, {'frequency': 'r', 'id': 782, 'synset': 'passenger_ship.n.01', 'synonyms': ['passenger_ship'], 'def': 'a ship built to carry passengers', 'name': 'passenger_ship'}, {'frequency': 'r', 'id': 783, 'synset': 'passport.n.02', 'synonyms': ['passport'], 'def': 'a document issued by a country to a citizen allowing that person to travel abroad and re-enter the home country', 'name': 'passport'}, {'frequency': 'f', 'id': 784, 'synset': 'pastry.n.02', 'synonyms': ['pastry'], 'def': 'any of various baked foods made of dough or batter', 'name': 'pastry'}, {'frequency': 'r', 'id': 785, 'synset': 'patty.n.01', 'synonyms': ['patty_(food)'], 'def': 'small flat mass of chopped food', 'name': 'patty_(food)'}, {'frequency': 'c', 'id': 786, 'synset': 'pea.n.01', 'synonyms': ['pea_(food)'], 'def': 'seed of a pea plant used for food', 'name': 'pea_(food)'}, {'frequency': 'c', 'id': 787, 'synset': 'peach.n.03', 'synonyms': ['peach'], 'def': 'downy juicy fruit with sweet yellowish or whitish flesh', 'name': 'peach'}, {'frequency': 'c', 'id': 788, 'synset': 'peanut_butter.n.01', 'synonyms': ['peanut_butter'], 'def': 'a spread made from ground peanuts', 'name': 'peanut_butter'}, {'frequency': 'c', 'id': 789, 'synset': 'pear.n.01', 'synonyms': ['pear'], 'def': 'sweet juicy gritty-textured fruit available in many varieties', 'name': 'pear'}, {'frequency': 'r', 'id': 790, 'synset': 'peeler.n.03', 'synonyms': ['peeler_(tool_for_fruit_and_vegetables)'], 'def': 'a device for peeling vegetables or fruits', 'name': 'peeler_(tool_for_fruit_and_vegetables)'}, {'frequency': 'r', 'id': 791, 'synset': 'pegboard.n.01', 'synonyms': ['pegboard'], 'def': 'a board perforated with regularly spaced holes into which pegs can be fitted', 'name': 'pegboard'}, {'frequency': 'c', 'id': 792, 'synset': 'pelican.n.01', 'synonyms': ['pelican'], 'def': 'large long-winged warm-water seabird having a large bill with a distensible pouch for fish', 'name': 'pelican'}, {'frequency': 'f', 'id': 793, 'synset': 'pen.n.01', 'synonyms': ['pen'], 'def': 'a writing implement with a point from which ink flows', 'name': 'pen'}, {'frequency': 'c', 'id': 794, 'synset': 'pencil.n.01', 'synonyms': ['pencil'], 'def': 'a thin cylindrical pointed writing implement made of wood and graphite', 'name': 'pencil'}, {'frequency': 'r', 'id': 795, 'synset': 'pencil_box.n.01', 'synonyms': ['pencil_box', 'pencil_case'], 'def': 'a box for holding pencils', 'name': 'pencil_box'}, {'frequency': 'r', 'id': 796, 'synset': 'pencil_sharpener.n.01', 'synonyms': ['pencil_sharpener'], 'def': 'a rotary implement for sharpening the point on pencils', 'name': 'pencil_sharpener'}, {'frequency': 'r', 'id': 797, 'synset': 'pendulum.n.01', 'synonyms': ['pendulum'], 'def': 'an apparatus consisting of an object mounted so that it swings freely under the influence of gravity', 'name': 'pendulum'}, {'frequency': 'c', 'id': 798, 'synset': 'penguin.n.01', 'synonyms': ['penguin'], 'def': 'short-legged flightless birds of cold southern regions having webbed feet and wings modified as flippers', 'name': 'penguin'}, {'frequency': 'r', 'id': 799, 'synset': 'pennant.n.02', 'synonyms': ['pennant'], 'def': 'a flag longer than it is wide (and often tapering)', 'name': 'pennant'}, {'frequency': 'r', 'id': 800, 'synset': 'penny.n.02', 'synonyms': ['penny_(coin)'], 'def': 'a coin worth one-hundredth of the value of the basic unit', 'name': 'penny_(coin)'}, {'frequency': 'c', 'id': 801, 'synset': 'pepper.n.03', 'synonyms': ['pepper', 'peppercorn'], 'def': 'pungent seasoning from the berry of the common pepper plant; whole or ground', 'name': 'pepper'}, {'frequency': 'c', 'id': 802, 'synset': 'pepper_mill.n.01', 'synonyms': ['pepper_mill', 'pepper_grinder'], 'def': 'a mill for grinding pepper', 'name': 'pepper_mill'}, {'frequency': 'c', 'id': 803, 'synset': 'perfume.n.02', 'synonyms': ['perfume'], 'def': 'a toiletry that emits and diffuses a fragrant odor', 'name': 'perfume'}, {'frequency': 'r', 'id': 804, 'synset': 'persimmon.n.02', 'synonyms': ['persimmon'], 'def': 'orange fruit resembling a plum; edible when fully ripe', 'name': 'persimmon'}, {'frequency': 'f', 'id': 805, 'synset': 'person.n.01', 'synonyms': ['baby', 'child', 'boy', 'girl', 'man', 'woman', 'person', 'human'], 'def': 'a human being', 'name': 'baby'}, {'frequency': 'r', 'id': 806, 'synset': 'pet.n.01', 'synonyms': ['pet'], 'def': 'a domesticated animal kept for companionship or amusement', 'name': 'pet'}, {'frequency': 'r', 'id': 807, 'synset': 'petfood.n.01', 'synonyms': ['petfood', 'pet-food'], 'def': 'food prepared for animal pets', 'name': 'petfood'}, {'frequency': 'r', 'id': 808, 'synset': 'pew.n.01', 'synonyms': ['pew_(church_bench)', 'church_bench'], 'def': 'long bench with backs; used in church by the congregation', 'name': 'pew_(church_bench)'}, {'frequency': 'r', 'id': 809, 'synset': 'phonebook.n.01', 'synonyms': ['phonebook', 'telephone_book', 'telephone_directory'], 'def': 'a directory containing an alphabetical list of telephone subscribers and their telephone numbers', 'name': 'phonebook'}, {'frequency': 'c', 'id': 810, 'synset': 'phonograph_record.n.01', 'synonyms': ['phonograph_record', 'phonograph_recording', 'record_(phonograph_recording)'], 'def': 'sound recording consisting of a typically black disk with a continuous groove', 'name': 'phonograph_record'}, {'frequency': 'c', 'id': 811, 'synset': 'piano.n.01', 'synonyms': ['piano'], 'def': 'a keyboard instrument that is played by depressing keys that cause hammers to strike tuned strings and produce sounds', 'name': 'piano'}, {'frequency': 'f', 'id': 812, 'synset': 'pickle.n.01', 'synonyms': ['pickle'], 'def': 'vegetables (especially cucumbers) preserved in brine or vinegar', 'name': 'pickle'}, {'frequency': 'f', 'id': 813, 'synset': 'pickup.n.01', 'synonyms': ['pickup_truck'], 'def': 'a light truck with an open body and low sides and a tailboard', 'name': 'pickup_truck'}, {'frequency': 'c', 'id': 814, 'synset': 'pie.n.01', 'synonyms': ['pie'], 'def': 'dish baked in pastry-lined pan often with a pastry top', 'name': 'pie'}, {'frequency': 'c', 'id': 815, 'synset': 'pigeon.n.01', 'synonyms': ['pigeon'], 'def': 'wild and domesticated birds having a heavy body and short legs', 'name': 'pigeon'}, {'frequency': 'r', 'id': 816, 'synset': 'piggy_bank.n.01', 'synonyms': ['piggy_bank', 'penny_bank'], 'def': "a child's coin bank (often shaped like a pig)", 'name': 'piggy_bank'}, {'frequency': 'f', 'id': 817, 'synset': 'pillow.n.01', 'synonyms': ['pillow'], 'def': 'a cushion to support the head of a sleeping person', 'name': 'pillow'}, {'frequency': 'r', 'id': 818, 'synset': 'pin.n.09', 'synonyms': ['pin_(non_jewelry)'], 'def': 'a small slender (often pointed) piece of wood or metal used to support or fasten or attach things', 'name': 'pin_(non_jewelry)'}, {'frequency': 'f', 'id': 819, 'synset': 'pineapple.n.02', 'synonyms': ['pineapple'], 'def': 'large sweet fleshy tropical fruit with a tuft of stiff leaves', 'name': 'pineapple'}, {'frequency': 'c', 'id': 820, 'synset': 'pinecone.n.01', 'synonyms': ['pinecone'], 'def': 'the seed-producing cone of a pine tree', 'name': 'pinecone'}, {'frequency': 'r', 'id': 821, 'synset': 'ping-pong_ball.n.01', 'synonyms': ['ping-pong_ball'], 'def': 'light hollow ball used in playing table tennis', 'name': 'ping-pong_ball'}, {'frequency': 'r', 'id': 822, 'synset': 'pinwheel.n.03', 'synonyms': ['pinwheel'], 'def': 'a toy consisting of vanes of colored paper or plastic that is pinned to a stick and spins when it is pointed into the wind', 'name': 'pinwheel'}, {'frequency': 'r', 'id': 823, 'synset': 'pipe.n.01', 'synonyms': ['tobacco_pipe'], 'def': 'a tube with a small bowl at one end; used for smoking tobacco', 'name': 'tobacco_pipe'}, {'frequency': 'f', 'id': 824, 'synset': 'pipe.n.02', 'synonyms': ['pipe', 'piping'], 'def': 'a long tube made of metal or plastic that is used to carry water or oil or gas etc.', 'name': 'pipe'}, {'frequency': 'r', 'id': 825, 'synset': 'pistol.n.01', 'synonyms': ['pistol', 'handgun'], 'def': 'a firearm that is held and fired with one hand', 'name': 'pistol'}, {'frequency': 'r', 'id': 826, 'synset': 'pita.n.01', 'synonyms': ['pita_(bread)', 'pocket_bread'], 'def': 'usually small round bread that can open into a pocket for filling', 'name': 'pita_(bread)'}, {'frequency': 'f', 'id': 827, 'synset': 'pitcher.n.02', 'synonyms': ['pitcher_(vessel_for_liquid)', 'ewer'], 'def': 'an open vessel with a handle and a spout for pouring', 'name': 'pitcher_(vessel_for_liquid)'}, {'frequency': 'r', 'id': 828, 'synset': 'pitchfork.n.01', 'synonyms': ['pitchfork'], 'def': 'a long-handled hand tool with sharp widely spaced prongs for lifting and pitching hay', 'name': 'pitchfork'}, {'frequency': 'f', 'id': 829, 'synset': 'pizza.n.01', 'synonyms': ['pizza'], 'def': 'Italian open pie made of thin bread dough spread with a spiced mixture of e.g. tomato sauce and cheese', 'name': 'pizza'}, {'frequency': 'f', 'id': 830, 'synset': 'place_mat.n.01', 'synonyms': ['place_mat'], 'def': 'a mat placed on a table for an individual place setting', 'name': 'place_mat'}, {'frequency': 'f', 'id': 831, 'synset': 'plate.n.04', 'synonyms': ['plate'], 'def': 'dish on which food is served or from which food is eaten', 'name': 'plate'}, {'frequency': 'c', 'id': 832, 'synset': 'platter.n.01', 'synonyms': ['platter'], 'def': 'a large shallow dish used for serving food', 'name': 'platter'}, {'frequency': 'r', 'id': 833, 'synset': 'playing_card.n.01', 'synonyms': ['playing_card'], 'def': 'one of a pack of cards that are used to play card games', 'name': 'playing_card'}, {'frequency': 'r', 'id': 834, 'synset': 'playpen.n.01', 'synonyms': ['playpen'], 'def': 'a portable enclosure in which babies may be left to play', 'name': 'playpen'}, {'frequency': 'c', 'id': 835, 'synset': 'pliers.n.01', 'synonyms': ['pliers', 'plyers'], 'def': 'a gripping hand tool with two hinged arms and (usually) serrated jaws', 'name': 'pliers'}, {'frequency': 'r', 'id': 836, 'synset': 'plow.n.01', 'synonyms': ['plow_(farm_equipment)', 'plough_(farm_equipment)'], 'def': 'a farm tool having one or more heavy blades to break the soil and cut a furrow prior to sowing', 'name': 'plow_(farm_equipment)'}, {'frequency': 'r', 'id': 837, 'synset': 'pocket_watch.n.01', 'synonyms': ['pocket_watch'], 'def': 'a watch that is carried in a small watch pocket', 'name': 'pocket_watch'}, {'frequency': 'c', 'id': 838, 'synset': 'pocketknife.n.01', 'synonyms': ['pocketknife'], 'def': 'a knife with a blade that folds into the handle; suitable for carrying in the pocket', 'name': 'pocketknife'}, {'frequency': 'c', 'id': 839, 'synset': 'poker.n.01', 'synonyms': ['poker_(fire_stirring_tool)', 'stove_poker', 'fire_hook'], 'def': 'fire iron consisting of a metal rod with a handle; used to stir a fire', 'name': 'poker_(fire_stirring_tool)'}, {'frequency': 'f', 'id': 840, 'synset': 'pole.n.01', 'synonyms': ['pole', 'post'], 'def': 'a long (usually round) rod of wood or metal or plastic', 'name': 'pole'}, {'frequency': 'r', 'id': 841, 'synset': 'police_van.n.01', 'synonyms': ['police_van', 'police_wagon', 'paddy_wagon', 'patrol_wagon'], 'def': 'van used by police to transport prisoners', 'name': 'police_van'}, {'frequency': 'f', 'id': 842, 'synset': 'polo_shirt.n.01', 'synonyms': ['polo_shirt', 'sport_shirt'], 'def': 'a shirt with short sleeves designed for comfort and casual wear', 'name': 'polo_shirt'}, {'frequency': 'r', 'id': 843, 'synset': 'poncho.n.01', 'synonyms': ['poncho'], 'def': 'a blanket-like cloak with a hole in the center for the head', 'name': 'poncho'}, {'frequency': 'c', 'id': 844, 'synset': 'pony.n.05', 'synonyms': ['pony'], 'def': 'any of various breeds of small gentle horses usually less than five feet high at the shoulder', 'name': 'pony'}, {'frequency': 'r', 'id': 845, 'synset': 'pool_table.n.01', 'synonyms': ['pool_table', 'billiard_table', 'snooker_table'], 'def': 'game equipment consisting of a heavy table on which pool is played', 'name': 'pool_table'}, {'frequency': 'f', 'id': 846, 'synset': 'pop.n.02', 'synonyms': ['pop_(soda)', 'soda_(pop)', 'tonic', 'soft_drink'], 'def': 'a sweet drink containing carbonated water and flavoring', 'name': 'pop_(soda)'}, {'frequency': 'r', 'id': 847, 'synset': 'portrait.n.02', 'synonyms': ['portrait', 'portrayal'], 'def': 'any likeness of a person, in any medium', 'name': 'portrait'}, {'frequency': 'c', 'id': 848, 'synset': 'postbox.n.01', 'synonyms': ['postbox_(public)', 'mailbox_(public)'], 'def': 'public box for deposit of mail', 'name': 'postbox_(public)'}, {'frequency': 'c', 'id': 849, 'synset': 'postcard.n.01', 'synonyms': ['postcard', 'postal_card', 'mailing-card'], 'def': 'a card for sending messages by post without an envelope', 'name': 'postcard'}, {'frequency': 'f', 'id': 850, 'synset': 'poster.n.01', 'synonyms': ['poster', 'placard'], 'def': 'a sign posted in a public place as an advertisement', 'name': 'poster'}, {'frequency': 'f', 'id': 851, 'synset': 'pot.n.01', 'synonyms': ['pot'], 'def': 'metal or earthenware cooking vessel that is usually round and deep; often has a handle and lid', 'name': 'pot'}, {'frequency': 'f', 'id': 852, 'synset': 'pot.n.04', 'synonyms': ['flowerpot'], 'def': 'a container in which plants are cultivated', 'name': 'flowerpot'}, {'frequency': 'f', 'id': 853, 'synset': 'potato.n.01', 'synonyms': ['potato'], 'def': 'an edible tuber native to South America', 'name': 'potato'}, {'frequency': 'c', 'id': 854, 'synset': 'potholder.n.01', 'synonyms': ['potholder'], 'def': 'an insulated pad for holding hot pots', 'name': 'potholder'}, {'frequency': 'c', 'id': 855, 'synset': 'pottery.n.01', 'synonyms': ['pottery', 'clayware'], 'def': 'ceramic ware made from clay and baked in a kiln', 'name': 'pottery'}, {'frequency': 'c', 'id': 856, 'synset': 'pouch.n.01', 'synonyms': ['pouch'], 'def': 'a small or medium size container for holding or carrying things', 'name': 'pouch'}, {'frequency': 'r', 'id': 857, 'synset': 'power_shovel.n.01', 'synonyms': ['power_shovel', 'excavator', 'digger'], 'def': 'a machine for excavating', 'name': 'power_shovel'}, {'frequency': 'c', 'id': 858, 'synset': 'prawn.n.01', 'synonyms': ['prawn', 'shrimp'], 'def': 'any of various edible decapod crustaceans', 'name': 'prawn'}, {'frequency': 'f', 'id': 859, 'synset': 'printer.n.03', 'synonyms': ['printer', 'printing_machine'], 'def': 'a machine that prints', 'name': 'printer'}, {'frequency': 'c', 'id': 860, 'synset': 'projectile.n.01', 'synonyms': ['projectile_(weapon)', 'missile'], 'def': 'a weapon that is forcibly thrown or projected at a targets', 'name': 'projectile_(weapon)'}, {'frequency': 'c', 'id': 861, 'synset': 'projector.n.02', 'synonyms': ['projector'], 'def': 'an optical instrument that projects an enlarged image onto a screen', 'name': 'projector'}, {'frequency': 'f', 'id': 862, 'synset': 'propeller.n.01', 'synonyms': ['propeller', 'propellor'], 'def': 'a mechanical device that rotates to push against air or water', 'name': 'propeller'}, {'frequency': 'r', 'id': 863, 'synset': 'prune.n.01', 'synonyms': ['prune'], 'def': 'dried plum', 'name': 'prune'}, {'frequency': 'r', 'id': 864, 'synset': 'pudding.n.01', 'synonyms': ['pudding'], 'def': 'any of various soft thick unsweetened baked dishes', 'name': 'pudding'}, {'frequency': 'r', 'id': 865, 'synset': 'puffer.n.02', 'synonyms': ['puffer_(fish)', 'pufferfish', 'blowfish', 'globefish'], 'def': 'fishes whose elongated spiny body can inflate itself with water or air to form a globe', 'name': 'puffer_(fish)'}, {'frequency': 'r', 'id': 866, 'synset': 'puffin.n.01', 'synonyms': ['puffin'], 'def': 'seabirds having short necks and brightly colored compressed bills', 'name': 'puffin'}, {'frequency': 'r', 'id': 867, 'synset': 'pug.n.01', 'synonyms': ['pug-dog'], 'def': 'small compact smooth-coated breed of Asiatic origin having a tightly curled tail and broad flat wrinkled muzzle', 'name': 'pug-dog'}, {'frequency': 'c', 'id': 868, 'synset': 'pumpkin.n.02', 'synonyms': ['pumpkin'], 'def': 'usually large pulpy deep-yellow round fruit of the squash family maturing in late summer or early autumn', 'name': 'pumpkin'}, {'frequency': 'r', 'id': 869, 'synset': 'punch.n.03', 'synonyms': ['puncher'], 'def': 'a tool for making holes or indentations', 'name': 'puncher'}, {'frequency': 'r', 'id': 870, 'synset': 'puppet.n.01', 'synonyms': ['puppet', 'marionette'], 'def': 'a small figure of a person operated from above with strings by a puppeteer', 'name': 'puppet'}, {'frequency': 'r', 'id': 871, 'synset': 'puppy.n.01', 'synonyms': ['puppy'], 'def': 'a young dog', 'name': 'puppy'}, {'frequency': 'r', 'id': 872, 'synset': 'quesadilla.n.01', 'synonyms': ['quesadilla'], 'def': 'a tortilla that is filled with cheese and heated', 'name': 'quesadilla'}, {'frequency': 'r', 'id': 873, 'synset': 'quiche.n.02', 'synonyms': ['quiche'], 'def': 'a tart filled with rich unsweetened custard; often contains other ingredients (as cheese or ham or seafood or vegetables)', 'name': 'quiche'}, {'frequency': 'f', 'id': 874, 'synset': 'quilt.n.01', 'synonyms': ['quilt', 'comforter'], 'def': 'bedding made of two layers of cloth filled with stuffing and stitched together', 'name': 'quilt'}, {'frequency': 'c', 'id': 875, 'synset': 'rabbit.n.01', 'synonyms': ['rabbit'], 'def': 'any of various burrowing animals of the family Leporidae having long ears and short tails', 'name': 'rabbit'}, {'frequency': 'r', 'id': 876, 'synset': 'racer.n.02', 'synonyms': ['race_car', 'racing_car'], 'def': 'a fast car that competes in races', 'name': 'race_car'}, {'frequency': 'c', 'id': 877, 'synset': 'racket.n.04', 'synonyms': ['racket', 'racquet'], 'def': 'a sports implement used to strike a ball in various games', 'name': 'racket'}, {'frequency': 'r', 'id': 878, 'synset': 'radar.n.01', 'synonyms': ['radar'], 'def': 'measuring instrument in which the echo of a pulse of microwave radiation is used to detect and locate distant objects', 'name': 'radar'}, {'frequency': 'c', 'id': 879, 'synset': 'radiator.n.03', 'synonyms': ['radiator'], 'def': 'a mechanism consisting of a metal honeycomb through which hot fluids circulate', 'name': 'radiator'}, {'frequency': 'c', 'id': 880, 'synset': 'radio_receiver.n.01', 'synonyms': ['radio_receiver', 'radio_set', 'radio', 'tuner_(radio)'], 'def': 'an electronic receiver that detects and demodulates and amplifies transmitted radio signals', 'name': 'radio_receiver'}, {'frequency': 'c', 'id': 881, 'synset': 'radish.n.03', 'synonyms': ['radish', 'daikon'], 'def': 'pungent edible root of any of various cultivated radish plants', 'name': 'radish'}, {'frequency': 'c', 'id': 882, 'synset': 'raft.n.01', 'synonyms': ['raft'], 'def': 'a flat float (usually made of logs or planks) that can be used for transport or as a platform for swimmers', 'name': 'raft'}, {'frequency': 'r', 'id': 883, 'synset': 'rag_doll.n.01', 'synonyms': ['rag_doll'], 'def': 'a cloth doll that is stuffed and (usually) painted', 'name': 'rag_doll'}, {'frequency': 'c', 'id': 884, 'synset': 'raincoat.n.01', 'synonyms': ['raincoat', 'waterproof_jacket'], 'def': 'a water-resistant coat', 'name': 'raincoat'}, {'frequency': 'c', 'id': 885, 'synset': 'ram.n.05', 'synonyms': ['ram_(animal)'], 'def': 'uncastrated adult male sheep', 'name': 'ram_(animal)'}, {'frequency': 'c', 'id': 886, 'synset': 'raspberry.n.02', 'synonyms': ['raspberry'], 'def': 'red or black edible aggregate berries usually smaller than the related blackberries', 'name': 'raspberry'}, {'frequency': 'r', 'id': 887, 'synset': 'rat.n.01', 'synonyms': ['rat'], 'def': 'any of various long-tailed rodents similar to but larger than a mouse', 'name': 'rat'}, {'frequency': 'c', 'id': 888, 'synset': 'razorblade.n.01', 'synonyms': ['razorblade'], 'def': 'a blade that has very sharp edge', 'name': 'razorblade'}, {'frequency': 'c', 'id': 889, 'synset': 'reamer.n.01', 'synonyms': ['reamer_(juicer)', 'juicer', 'juice_reamer'], 'def': 'a squeezer with a conical ridged center that is used for squeezing juice from citrus fruit', 'name': 'reamer_(juicer)'}, {'frequency': 'f', 'id': 890, 'synset': 'rearview_mirror.n.01', 'synonyms': ['rearview_mirror'], 'def': 'car mirror that reflects the view out of the rear window', 'name': 'rearview_mirror'}, {'frequency': 'c', 'id': 891, 'synset': 'receipt.n.02', 'synonyms': ['receipt'], 'def': 'an acknowledgment (usually tangible) that payment has been made', 'name': 'receipt'}, {'frequency': 'c', 'id': 892, 'synset': 'recliner.n.01', 'synonyms': ['recliner', 'reclining_chair', 'lounger_(chair)'], 'def': 'an armchair whose back can be lowered and foot can be raised to allow the sitter to recline in it', 'name': 'recliner'}, {'frequency': 'r', 'id': 893, 'synset': 'record_player.n.01', 'synonyms': ['record_player', 'phonograph_(record_player)', 'turntable'], 'def': 'machine in which rotating records cause a stylus to vibrate and the vibrations are amplified acoustically or electronically', 'name': 'record_player'}, {'frequency': 'r', 'id': 894, 'synset': 'red_cabbage.n.02', 'synonyms': ['red_cabbage'], 'def': 'compact head of purplish-red leaves', 'name': 'red_cabbage'}, {'frequency': 'f', 'id': 895, 'synset': 'reflector.n.01', 'synonyms': ['reflector'], 'def': 'device that reflects light, radiation, etc.', 'name': 'reflector'}, {'frequency': 'f', 'id': 896, 'synset': 'remote_control.n.01', 'synonyms': ['remote_control'], 'def': 'a device that can be used to control a machine or apparatus from a distance', 'name': 'remote_control'}, {'frequency': 'c', 'id': 897, 'synset': 'rhinoceros.n.01', 'synonyms': ['rhinoceros'], 'def': 'massive powerful herbivorous odd-toed ungulate of southeast Asia and Africa having very thick skin and one or two horns on the snout', 'name': 'rhinoceros'}, {'frequency': 'r', 'id': 898, 'synset': 'rib.n.03', 'synonyms': ['rib_(food)'], 'def': 'cut of meat including one or more ribs', 'name': 'rib_(food)'}, {'frequency': 'r', 'id': 899, 'synset': 'rifle.n.01', 'synonyms': ['rifle'], 'def': 'a shoulder firearm with a long barrel', 'name': 'rifle'}, {'frequency': 'f', 'id': 900, 'synset': 'ring.n.08', 'synonyms': ['ring'], 'def': 'jewelry consisting of a circlet of precious metal (often set with jewels) worn on the finger', 'name': 'ring'}, {'frequency': 'r', 'id': 901, 'synset': 'river_boat.n.01', 'synonyms': ['river_boat'], 'def': 'a boat used on rivers or to ply a river', 'name': 'river_boat'}, {'frequency': 'r', 'id': 902, 'synset': 'road_map.n.02', 'synonyms': ['road_map'], 'def': '(NOT A ROAD) a MAP showing roads (for automobile travel)', 'name': 'road_map'}, {'frequency': 'c', 'id': 903, 'synset': 'robe.n.01', 'synonyms': ['robe'], 'def': 'any loose flowing garment', 'name': 'robe'}, {'frequency': 'c', 'id': 904, 'synset': 'rocking_chair.n.01', 'synonyms': ['rocking_chair'], 'def': 'a chair mounted on rockers', 'name': 'rocking_chair'}, {'frequency': 'r', 'id': 905, 'synset': 'roller_skate.n.01', 'synonyms': ['roller_skate'], 'def': 'a shoe with pairs of rollers (small hard wheels) fixed to the sole', 'name': 'roller_skate'}, {'frequency': 'r', 'id': 906, 'synset': 'rollerblade.n.01', 'synonyms': ['Rollerblade'], 'def': 'an in-line variant of a roller skate', 'name': 'Rollerblade'}, {'frequency': 'c', 'id': 907, 'synset': 'rolling_pin.n.01', 'synonyms': ['rolling_pin'], 'def': 'utensil consisting of a cylinder (usually of wood) with a handle at each end; used to roll out dough', 'name': 'rolling_pin'}, {'frequency': 'r', 'id': 908, 'synset': 'root_beer.n.01', 'synonyms': ['root_beer'], 'def': 'carbonated drink containing extracts of roots and herbs', 'name': 'root_beer'}, {'frequency': 'c', 'id': 909, 'synset': 'router.n.02', 'synonyms': ['router_(computer_equipment)'], 'def': 'a device that forwards data packets between computer networks', 'name': 'router_(computer_equipment)'}, {'frequency': 'f', 'id': 910, 'synset': 'rubber_band.n.01', 'synonyms': ['rubber_band', 'elastic_band'], 'def': 'a narrow band of elastic rubber used to hold things (such as papers) together', 'name': 'rubber_band'}, {'frequency': 'c', 'id': 911, 'synset': 'runner.n.08', 'synonyms': ['runner_(carpet)'], 'def': 'a long narrow carpet', 'name': 'runner_(carpet)'}, {'frequency': 'f', 'id': 912, 'synset': 'sack.n.01', 'synonyms': ['plastic_bag', 'paper_bag'], 'def': "a bag made of paper or plastic for holding customer's purchases", 'name': 'plastic_bag'}, {'frequency': 'f', 'id': 913, 'synset': 'saddle.n.01', 'synonyms': ['saddle_(on_an_animal)'], 'def': 'a seat for the rider of a horse or camel', 'name': 'saddle_(on_an_animal)'}, {'frequency': 'f', 'id': 914, 'synset': 'saddle_blanket.n.01', 'synonyms': ['saddle_blanket', 'saddlecloth', 'horse_blanket'], 'def': 'stable gear consisting of a blanket placed under the saddle', 'name': 'saddle_blanket'}, {'frequency': 'c', 'id': 915, 'synset': 'saddlebag.n.01', 'synonyms': ['saddlebag'], 'def': 'a large bag (or pair of bags) hung over a saddle', 'name': 'saddlebag'}, {'frequency': 'r', 'id': 916, 'synset': 'safety_pin.n.01', 'synonyms': ['safety_pin'], 'def': 'a pin in the form of a clasp; has a guard so the point of the pin will not stick the user', 'name': 'safety_pin'}, {'frequency': 'c', 'id': 917, 'synset': 'sail.n.01', 'synonyms': ['sail'], 'def': 'a large piece of fabric by means of which wind is used to propel a sailing vessel', 'name': 'sail'}, {'frequency': 'c', 'id': 918, 'synset': 'salad.n.01', 'synonyms': ['salad'], 'def': 'food mixtures either arranged on a plate or tossed and served with a moist dressing; usually consisting of or including greens', 'name': 'salad'}, {'frequency': 'r', 'id': 919, 'synset': 'salad_plate.n.01', 'synonyms': ['salad_plate', 'salad_bowl'], 'def': 'a plate or bowl for individual servings of salad', 'name': 'salad_plate'}, {'frequency': 'r', 'id': 920, 'synset': 'salami.n.01', 'synonyms': ['salami'], 'def': 'highly seasoned fatty sausage of pork and beef usually dried', 'name': 'salami'}, {'frequency': 'r', 'id': 921, 'synset': 'salmon.n.01', 'synonyms': ['salmon_(fish)'], 'def': 'any of various large food and game fishes of northern waters', 'name': 'salmon_(fish)'}, {'frequency': 'r', 'id': 922, 'synset': 'salmon.n.03', 'synonyms': ['salmon_(food)'], 'def': 'flesh of any of various marine or freshwater fish of the family Salmonidae', 'name': 'salmon_(food)'}, {'frequency': 'r', 'id': 923, 'synset': 'salsa.n.01', 'synonyms': ['salsa'], 'def': 'spicy sauce of tomatoes and onions and chili peppers to accompany Mexican foods', 'name': 'salsa'}, {'frequency': 'f', 'id': 924, 'synset': 'saltshaker.n.01', 'synonyms': ['saltshaker'], 'def': 'a shaker with a perforated top for sprinkling salt', 'name': 'saltshaker'}, {'frequency': 'f', 'id': 925, 'synset': 'sandal.n.01', 'synonyms': ['sandal_(type_of_shoe)'], 'def': 'a shoe consisting of a sole fastened by straps to the foot', 'name': 'sandal_(type_of_shoe)'}, {'frequency': 'f', 'id': 926, 'synset': 'sandwich.n.01', 'synonyms': ['sandwich'], 'def': 'two (or more) slices of bread with a filling between them', 'name': 'sandwich'}, {'frequency': 'r', 'id': 927, 'synset': 'satchel.n.01', 'synonyms': ['satchel'], 'def': 'luggage consisting of a small case with a flat bottom and (usually) a shoulder strap', 'name': 'satchel'}, {'frequency': 'r', 'id': 928, 'synset': 'saucepan.n.01', 'synonyms': ['saucepan'], 'def': 'a deep pan with a handle; used for stewing or boiling', 'name': 'saucepan'}, {'frequency': 'f', 'id': 929, 'synset': 'saucer.n.02', 'synonyms': ['saucer'], 'def': 'a small shallow dish for holding a cup at the table', 'name': 'saucer'}, {'frequency': 'f', 'id': 930, 'synset': 'sausage.n.01', 'synonyms': ['sausage'], 'def': 'highly seasoned minced meat stuffed in casings', 'name': 'sausage'}, {'frequency': 'r', 'id': 931, 'synset': 'sawhorse.n.01', 'synonyms': ['sawhorse', 'sawbuck'], 'def': 'a framework for holding wood that is being sawed', 'name': 'sawhorse'}, {'frequency': 'r', 'id': 932, 'synset': 'sax.n.02', 'synonyms': ['saxophone'], 'def': "a wind instrument with a `J'-shaped form typically made of brass", 'name': 'saxophone'}, {'frequency': 'f', 'id': 933, 'synset': 'scale.n.07', 'synonyms': ['scale_(measuring_instrument)'], 'def': 'a measuring instrument for weighing; shows amount of mass', 'name': 'scale_(measuring_instrument)'}, {'frequency': 'r', 'id': 934, 'synset': 'scarecrow.n.01', 'synonyms': ['scarecrow', 'strawman'], 'def': 'an effigy in the shape of a man to frighten birds away from seeds', 'name': 'scarecrow'}, {'frequency': 'f', 'id': 935, 'synset': 'scarf.n.01', 'synonyms': ['scarf'], 'def': 'a garment worn around the head or neck or shoulders for warmth or decoration', 'name': 'scarf'}, {'frequency': 'c', 'id': 936, 'synset': 'school_bus.n.01', 'synonyms': ['school_bus'], 'def': 'a bus used to transport children to or from school', 'name': 'school_bus'}, {'frequency': 'f', 'id': 937, 'synset': 'scissors.n.01', 'synonyms': ['scissors'], 'def': 'a tool having two crossed pivoting blades with looped handles', 'name': 'scissors'}, {'frequency': 'c', 'id': 938, 'synset': 'scoreboard.n.01', 'synonyms': ['scoreboard'], 'def': 'a large board for displaying the score of a contest (and some other information)', 'name': 'scoreboard'}, {'frequency': 'c', 'id': 939, 'synset': 'scrambled_eggs.n.01', 'synonyms': ['scrambled_eggs'], 'def': 'eggs beaten and cooked to a soft firm consistency while stirring', 'name': 'scrambled_eggs'}, {'frequency': 'r', 'id': 940, 'synset': 'scraper.n.01', 'synonyms': ['scraper'], 'def': 'any of various hand tools for scraping', 'name': 'scraper'}, {'frequency': 'r', 'id': 941, 'synset': 'scratcher.n.03', 'synonyms': ['scratcher'], 'def': 'a device used for scratching', 'name': 'scratcher'}, {'frequency': 'c', 'id': 942, 'synset': 'screwdriver.n.01', 'synonyms': ['screwdriver'], 'def': 'a hand tool for driving screws; has a tip that fits into the head of a screw', 'name': 'screwdriver'}, {'frequency': 'c', 'id': 943, 'synset': 'scrub_brush.n.01', 'synonyms': ['scrubbing_brush'], 'def': 'a brush with short stiff bristles for heavy cleaning', 'name': 'scrubbing_brush'}, {'frequency': 'c', 'id': 944, 'synset': 'sculpture.n.01', 'synonyms': ['sculpture'], 'def': 'a three-dimensional work of art', 'name': 'sculpture'}, {'frequency': 'r', 'id': 945, 'synset': 'seabird.n.01', 'synonyms': ['seabird', 'seafowl'], 'def': 'a bird that frequents coastal waters and the open ocean: gulls; pelicans; gannets; cormorants; albatrosses; petrels; etc.', 'name': 'seabird'}, {'frequency': 'r', 'id': 946, 'synset': 'seahorse.n.02', 'synonyms': ['seahorse'], 'def': 'small fish with horse-like heads bent sharply downward and curled tails', 'name': 'seahorse'}, {'frequency': 'r', 'id': 947, 'synset': 'seaplane.n.01', 'synonyms': ['seaplane', 'hydroplane'], 'def': 'an airplane that can land on or take off from water', 'name': 'seaplane'}, {'frequency': 'c', 'id': 948, 'synset': 'seashell.n.01', 'synonyms': ['seashell'], 'def': 'the shell of a marine organism', 'name': 'seashell'}, {'frequency': 'r', 'id': 949, 'synset': 'seedling.n.01', 'synonyms': ['seedling'], 'def': 'young plant or tree grown from a seed', 'name': 'seedling'}, {'frequency': 'c', 'id': 950, 'synset': 'serving_dish.n.01', 'synonyms': ['serving_dish'], 'def': 'a dish used for serving food', 'name': 'serving_dish'}, {'frequency': 'r', 'id': 951, 'synset': 'sewing_machine.n.01', 'synonyms': ['sewing_machine'], 'def': 'a textile machine used as a home appliance for sewing', 'name': 'sewing_machine'}, {'frequency': 'r', 'id': 952, 'synset': 'shaker.n.03', 'synonyms': ['shaker'], 'def': 'a container in which something can be shaken', 'name': 'shaker'}, {'frequency': 'c', 'id': 953, 'synset': 'shampoo.n.01', 'synonyms': ['shampoo'], 'def': 'cleansing agent consisting of soaps or detergents used for washing the hair', 'name': 'shampoo'}, {'frequency': 'r', 'id': 954, 'synset': 'shark.n.01', 'synonyms': ['shark'], 'def': 'typically large carnivorous fishes with sharpe teeth', 'name': 'shark'}, {'frequency': 'r', 'id': 955, 'synset': 'sharpener.n.01', 'synonyms': ['sharpener'], 'def': 'any implement that is used to make something (an edge or a point) sharper', 'name': 'sharpener'}, {'frequency': 'r', 'id': 956, 'synset': 'sharpie.n.03', 'synonyms': ['Sharpie'], 'def': 'a pen with indelible ink that will write on any surface', 'name': 'Sharpie'}, {'frequency': 'r', 'id': 957, 'synset': 'shaver.n.03', 'synonyms': ['shaver_(electric)', 'electric_shaver', 'electric_razor'], 'def': 'a razor powered by an electric motor', 'name': 'shaver_(electric)'}, {'frequency': 'c', 'id': 958, 'synset': 'shaving_cream.n.01', 'synonyms': ['shaving_cream', 'shaving_soap'], 'def': 'toiletry consisting that forms a rich lather for softening the beard before shaving', 'name': 'shaving_cream'}, {'frequency': 'r', 'id': 959, 'synset': 'shawl.n.01', 'synonyms': ['shawl'], 'def': 'cloak consisting of an oblong piece of cloth used to cover the head and shoulders', 'name': 'shawl'}, {'frequency': 'r', 'id': 960, 'synset': 'shears.n.01', 'synonyms': ['shears'], 'def': 'large scissors with strong blades', 'name': 'shears'}, {'frequency': 'f', 'id': 961, 'synset': 'sheep.n.01', 'synonyms': ['sheep'], 'def': 'woolly usually horned ruminant mammal related to the goat', 'name': 'sheep'}, {'frequency': 'r', 'id': 962, 'synset': 'shepherd_dog.n.01', 'synonyms': ['shepherd_dog', 'sheepdog'], 'def': 'any of various usually long-haired breeds of dog reared to herd and guard sheep', 'name': 'shepherd_dog'}, {'frequency': 'r', 'id': 963, 'synset': 'sherbert.n.01', 'synonyms': ['sherbert', 'sherbet'], 'def': 'a frozen dessert made primarily of fruit juice and sugar', 'name': 'sherbert'}, {'frequency': 'r', 'id': 964, 'synset': 'shield.n.02', 'synonyms': ['shield'], 'def': 'armor carried on the arm to intercept blows', 'name': 'shield'}, {'frequency': 'f', 'id': 965, 'synset': 'shirt.n.01', 'synonyms': ['shirt'], 'def': 'a garment worn on the upper half of the body', 'name': 'shirt'}, {'frequency': 'f', 'id': 966, 'synset': 'shoe.n.01', 'synonyms': ['shoe', 'sneaker_(type_of_shoe)', 'tennis_shoe'], 'def': 'common footwear covering the foot', 'name': 'shoe'}, {'frequency': 'c', 'id': 967, 'synset': 'shopping_bag.n.01', 'synonyms': ['shopping_bag'], 'def': 'a bag made of plastic or strong paper (often with handles); used to transport goods after shopping', 'name': 'shopping_bag'}, {'frequency': 'c', 'id': 968, 'synset': 'shopping_cart.n.01', 'synonyms': ['shopping_cart'], 'def': 'a handcart that holds groceries or other goods while shopping', 'name': 'shopping_cart'}, {'frequency': 'f', 'id': 969, 'synset': 'short_pants.n.01', 'synonyms': ['short_pants', 'shorts_(clothing)', 'trunks_(clothing)'], 'def': 'trousers that end at or above the knee', 'name': 'short_pants'}, {'frequency': 'r', 'id': 970, 'synset': 'shot_glass.n.01', 'synonyms': ['shot_glass'], 'def': 'a small glass adequate to hold a single swallow of whiskey', 'name': 'shot_glass'}, {'frequency': 'c', 'id': 971, 'synset': 'shoulder_bag.n.01', 'synonyms': ['shoulder_bag'], 'def': 'a large handbag that can be carried by a strap looped over the shoulder', 'name': 'shoulder_bag'}, {'frequency': 'c', 'id': 972, 'synset': 'shovel.n.01', 'synonyms': ['shovel'], 'def': 'a hand tool for lifting loose material such as snow, dirt, etc.', 'name': 'shovel'}, {'frequency': 'f', 'id': 973, 'synset': 'shower.n.01', 'synonyms': ['shower_head'], 'def': 'a plumbing fixture that sprays water over you', 'name': 'shower_head'}, {'frequency': 'f', 'id': 974, 'synset': 'shower_curtain.n.01', 'synonyms': ['shower_curtain'], 'def': 'a curtain that keeps water from splashing out of the shower area', 'name': 'shower_curtain'}, {'frequency': 'r', 'id': 975, 'synset': 'shredder.n.01', 'synonyms': ['shredder_(for_paper)'], 'def': 'a device that shreds documents', 'name': 'shredder_(for_paper)'}, {'frequency': 'r', 'id': 976, 'synset': 'sieve.n.01', 'synonyms': ['sieve', 'screen_(sieve)'], 'def': 'a strainer for separating lumps from powdered material or grading particles', 'name': 'sieve'}, {'frequency': 'f', 'id': 977, 'synset': 'signboard.n.01', 'synonyms': ['signboard'], 'def': 'structure displaying a board on which advertisements can be posted', 'name': 'signboard'}, {'frequency': 'c', 'id': 978, 'synset': 'silo.n.01', 'synonyms': ['silo'], 'def': 'a cylindrical tower used for storing goods', 'name': 'silo'}, {'frequency': 'f', 'id': 979, 'synset': 'sink.n.01', 'synonyms': ['sink'], 'def': 'plumbing fixture consisting of a water basin fixed to a wall or floor and having a drainpipe', 'name': 'sink'}, {'frequency': 'f', 'id': 980, 'synset': 'skateboard.n.01', 'synonyms': ['skateboard'], 'def': 'a board with wheels that is ridden in a standing or crouching position and propelled by foot', 'name': 'skateboard'}, {'frequency': 'c', 'id': 981, 'synset': 'skewer.n.01', 'synonyms': ['skewer'], 'def': 'a long pin for holding meat in position while it is being roasted', 'name': 'skewer'}, {'frequency': 'f', 'id': 982, 'synset': 'ski.n.01', 'synonyms': ['ski'], 'def': 'sports equipment for skiing on snow', 'name': 'ski'}, {'frequency': 'f', 'id': 983, 'synset': 'ski_boot.n.01', 'synonyms': ['ski_boot'], 'def': 'a stiff boot that is fastened to a ski with a ski binding', 'name': 'ski_boot'}, {'frequency': 'f', 'id': 984, 'synset': 'ski_parka.n.01', 'synonyms': ['ski_parka', 'ski_jacket'], 'def': 'a parka to be worn while skiing', 'name': 'ski_parka'}, {'frequency': 'f', 'id': 985, 'synset': 'ski_pole.n.01', 'synonyms': ['ski_pole'], 'def': 'a pole with metal points used as an aid in skiing', 'name': 'ski_pole'}, {'frequency': 'f', 'id': 986, 'synset': 'skirt.n.02', 'synonyms': ['skirt'], 'def': 'a garment hanging from the waist; worn mainly by girls and women', 'name': 'skirt'}, {'frequency': 'c', 'id': 987, 'synset': 'sled.n.01', 'synonyms': ['sled', 'sledge', 'sleigh'], 'def': 'a vehicle or flat object for transportation over snow by sliding or pulled by dogs, etc.', 'name': 'sled'}, {'frequency': 'c', 'id': 988, 'synset': 'sleeping_bag.n.01', 'synonyms': ['sleeping_bag'], 'def': 'large padded bag designed to be slept in outdoors', 'name': 'sleeping_bag'}, {'frequency': 'r', 'id': 989, 'synset': 'sling.n.05', 'synonyms': ['sling_(bandage)', 'triangular_bandage'], 'def': 'bandage to support an injured forearm; slung over the shoulder or neck', 'name': 'sling_(bandage)'}, {'frequency': 'c', 'id': 990, 'synset': 'slipper.n.01', 'synonyms': ['slipper_(footwear)', 'carpet_slipper_(footwear)'], 'def': 'low footwear that can be slipped on and off easily; usually worn indoors', 'name': 'slipper_(footwear)'}, {'frequency': 'r', 'id': 991, 'synset': 'smoothie.n.02', 'synonyms': ['smoothie'], 'def': 'a thick smooth drink consisting of fresh fruit pureed with ice cream or yoghurt or milk', 'name': 'smoothie'}, {'frequency': 'r', 'id': 992, 'synset': 'snake.n.01', 'synonyms': ['snake', 'serpent'], 'def': 'limbless scaly elongate reptile; some are venomous', 'name': 'snake'}, {'frequency': 'f', 'id': 993, 'synset': 'snowboard.n.01', 'synonyms': ['snowboard'], 'def': 'a board that resembles a broad ski or a small surfboard; used in a standing position to slide down snow-covered slopes', 'name': 'snowboard'}, {'frequency': 'c', 'id': 994, 'synset': 'snowman.n.01', 'synonyms': ['snowman'], 'def': 'a figure of a person made of packed snow', 'name': 'snowman'}, {'frequency': 'c', 'id': 995, 'synset': 'snowmobile.n.01', 'synonyms': ['snowmobile'], 'def': 'tracked vehicle for travel on snow having skis in front', 'name': 'snowmobile'}, {'frequency': 'f', 'id': 996, 'synset': 'soap.n.01', 'synonyms': ['soap'], 'def': 'a cleansing agent made from the salts of vegetable or animal fats', 'name': 'soap'}, {'frequency': 'f', 'id': 997, 'synset': 'soccer_ball.n.01', 'synonyms': ['soccer_ball'], 'def': "an inflated ball used in playing soccer (called `football' outside of the United States)", 'name': 'soccer_ball'}, {'frequency': 'f', 'id': 998, 'synset': 'sock.n.01', 'synonyms': ['sock'], 'def': 'cloth covering for the foot; worn inside the shoe; reaches to between the ankle and the knee', 'name': 'sock'}, {'frequency': 'r', 'id': 999, 'synset': 'soda_fountain.n.02', 'synonyms': ['soda_fountain'], 'def': 'an apparatus for dispensing soda water', 'name': 'soda_fountain'}, {'frequency': 'r', 'id': 1000, 'synset': 'soda_water.n.01', 'synonyms': ['carbonated_water', 'club_soda', 'seltzer', 'sparkling_water'], 'def': 'effervescent beverage artificially charged with carbon dioxide', 'name': 'carbonated_water'}, {'frequency': 'f', 'id': 1001, 'synset': 'sofa.n.01', 'synonyms': ['sofa', 'couch', 'lounge'], 'def': 'an upholstered seat for more than one person', 'name': 'sofa'}, {'frequency': 'r', 'id': 1002, 'synset': 'softball.n.01', 'synonyms': ['softball'], 'def': 'ball used in playing softball', 'name': 'softball'}, {'frequency': 'c', 'id': 1003, 'synset': 'solar_array.n.01', 'synonyms': ['solar_array', 'solar_battery', 'solar_panel'], 'def': 'electrical device consisting of a large array of connected solar cells', 'name': 'solar_array'}, {'frequency': 'r', 'id': 1004, 'synset': 'sombrero.n.02', 'synonyms': ['sombrero'], 'def': 'a straw hat with a tall crown and broad brim; worn in American southwest and in Mexico', 'name': 'sombrero'}, {'frequency': 'c', 'id': 1005, 'synset': 'soup.n.01', 'synonyms': ['soup'], 'def': 'liquid food especially of meat or fish or vegetable stock often containing pieces of solid food', 'name': 'soup'}, {'frequency': 'r', 'id': 1006, 'synset': 'soup_bowl.n.01', 'synonyms': ['soup_bowl'], 'def': 'a bowl for serving soup', 'name': 'soup_bowl'}, {'frequency': 'c', 'id': 1007, 'synset': 'soupspoon.n.01', 'synonyms': ['soupspoon'], 'def': 'a spoon with a rounded bowl for eating soup', 'name': 'soupspoon'}, {'frequency': 'c', 'id': 1008, 'synset': 'sour_cream.n.01', 'synonyms': ['sour_cream', 'soured_cream'], 'def': 'soured light cream', 'name': 'sour_cream'}, {'frequency': 'r', 'id': 1009, 'synset': 'soya_milk.n.01', 'synonyms': ['soya_milk', 'soybean_milk', 'soymilk'], 'def': 'a milk substitute containing soybean flour and water; used in some infant formulas and in making tofu', 'name': 'soya_milk'}, {'frequency': 'r', 'id': 1010, 'synset': 'space_shuttle.n.01', 'synonyms': ['space_shuttle'], 'def': "a reusable spacecraft with wings for a controlled descent through the Earth's atmosphere", 'name': 'space_shuttle'}, {'frequency': 'r', 'id': 1011, 'synset': 'sparkler.n.02', 'synonyms': ['sparkler_(fireworks)'], 'def': 'a firework that burns slowly and throws out a shower of sparks', 'name': 'sparkler_(fireworks)'}, {'frequency': 'f', 'id': 1012, 'synset': 'spatula.n.02', 'synonyms': ['spatula'], 'def': 'a hand tool with a thin flexible blade used to mix or spread soft substances', 'name': 'spatula'}, {'frequency': 'r', 'id': 1013, 'synset': 'spear.n.01', 'synonyms': ['spear', 'lance'], 'def': 'a long pointed rod used as a tool or weapon', 'name': 'spear'}, {'frequency': 'f', 'id': 1014, 'synset': 'spectacles.n.01', 'synonyms': ['spectacles', 'specs', 'eyeglasses', 'glasses'], 'def': 'optical instrument consisting of a frame that holds a pair of lenses for correcting defective vision', 'name': 'spectacles'}, {'frequency': 'c', 'id': 1015, 'synset': 'spice_rack.n.01', 'synonyms': ['spice_rack'], 'def': 'a rack for displaying containers filled with spices', 'name': 'spice_rack'}, {'frequency': 'r', 'id': 1016, 'synset': 'spider.n.01', 'synonyms': ['spider'], 'def': 'predatory arachnid with eight legs, two poison fangs, two feelers, and usually two silk-spinning organs at the back end of the body', 'name': 'spider'}, {'frequency': 'c', 'id': 1017, 'synset': 'sponge.n.01', 'synonyms': ['sponge'], 'def': 'a porous mass usable to absorb water typically used for cleaning', 'name': 'sponge'}, {'frequency': 'f', 'id': 1018, 'synset': 'spoon.n.01', 'synonyms': ['spoon'], 'def': 'a piece of cutlery with a shallow bowl-shaped container and a handle', 'name': 'spoon'}, {'frequency': 'c', 'id': 1019, 'synset': 'sportswear.n.01', 'synonyms': ['sportswear', 'athletic_wear', 'activewear'], 'def': 'attire worn for sport or for casual wear', 'name': 'sportswear'}, {'frequency': 'c', 'id': 1020, 'synset': 'spotlight.n.02', 'synonyms': ['spotlight'], 'def': 'a lamp that produces a strong beam of light to illuminate a restricted area; used to focus attention of a stage performer', 'name': 'spotlight'}, {'frequency': 'r', 'id': 1021, 'synset': 'squirrel.n.01', 'synonyms': ['squirrel'], 'def': 'a kind of arboreal rodent having a long bushy tail', 'name': 'squirrel'}, {'frequency': 'c', 'id': 1022, 'synset': 'stapler.n.01', 'synonyms': ['stapler_(stapling_machine)'], 'def': 'a machine that inserts staples into sheets of paper in order to fasten them together', 'name': 'stapler_(stapling_machine)'}, {'frequency': 'r', 'id': 1023, 'synset': 'starfish.n.01', 'synonyms': ['starfish', 'sea_star'], 'def': 'echinoderms characterized by five arms extending from a central disk', 'name': 'starfish'}, {'frequency': 'f', 'id': 1024, 'synset': 'statue.n.01', 'synonyms': ['statue_(sculpture)'], 'def': 'a sculpture representing a human or animal', 'name': 'statue_(sculpture)'}, {'frequency': 'c', 'id': 1025, 'synset': 'steak.n.01', 'synonyms': ['steak_(food)'], 'def': 'a slice of meat cut from the fleshy part of an animal or large fish', 'name': 'steak_(food)'}, {'frequency': 'r', 'id': 1026, 'synset': 'steak_knife.n.01', 'synonyms': ['steak_knife'], 'def': 'a sharp table knife used in eating steak', 'name': 'steak_knife'}, {'frequency': 'r', 'id': 1027, 'synset': 'steamer.n.02', 'synonyms': ['steamer_(kitchen_appliance)'], 'def': 'a cooking utensil that can be used to cook food by steaming it', 'name': 'steamer_(kitchen_appliance)'}, {'frequency': 'f', 'id': 1028, 'synset': 'steering_wheel.n.01', 'synonyms': ['steering_wheel'], 'def': 'a handwheel that is used for steering', 'name': 'steering_wheel'}, {'frequency': 'r', 'id': 1029, 'synset': 'stencil.n.01', 'synonyms': ['stencil'], 'def': 'a sheet of material (metal, plastic, etc.) that has been perforated with a pattern; ink or paint can pass through the perforations to create the printed pattern on the surface below', 'name': 'stencil'}, {'frequency': 'r', 'id': 1030, 'synset': 'step_ladder.n.01', 'synonyms': ['stepladder'], 'def': 'a folding portable ladder hinged at the top', 'name': 'stepladder'}, {'frequency': 'c', 'id': 1031, 'synset': 'step_stool.n.01', 'synonyms': ['step_stool'], 'def': 'a stool that has one or two steps that fold under the seat', 'name': 'step_stool'}, {'frequency': 'c', 'id': 1032, 'synset': 'stereo.n.01', 'synonyms': ['stereo_(sound_system)'], 'def': 'electronic device for playing audio', 'name': 'stereo_(sound_system)'}, {'frequency': 'r', 'id': 1033, 'synset': 'stew.n.02', 'synonyms': ['stew'], 'def': 'food prepared by stewing especially meat or fish with vegetables', 'name': 'stew'}, {'frequency': 'r', 'id': 1034, 'synset': 'stirrer.n.02', 'synonyms': ['stirrer'], 'def': 'an implement used for stirring', 'name': 'stirrer'}, {'frequency': 'f', 'id': 1035, 'synset': 'stirrup.n.01', 'synonyms': ['stirrup'], 'def': "support consisting of metal loops into which rider's feet go", 'name': 'stirrup'}, {'frequency': 'c', 'id': 1036, 'synset': 'stocking.n.01', 'synonyms': ['stockings_(leg_wear)'], 'def': 'close-fitting hosiery to cover the foot and leg; come in matched pairs', 'name': 'stockings_(leg_wear)'}, {'frequency': 'f', 'id': 1037, 'synset': 'stool.n.01', 'synonyms': ['stool'], 'def': 'a simple seat without a back or arms', 'name': 'stool'}, {'frequency': 'f', 'id': 1038, 'synset': 'stop_sign.n.01', 'synonyms': ['stop_sign'], 'def': 'a traffic sign to notify drivers that they must come to a complete stop', 'name': 'stop_sign'}, {'frequency': 'f', 'id': 1039, 'synset': 'stoplight.n.01', 'synonyms': ['brake_light'], 'def': 'a red light on the rear of a motor vehicle that signals when the brakes are applied', 'name': 'brake_light'}, {'frequency': 'f', 'id': 1040, 'synset': 'stove.n.01', 'synonyms': ['stove', 'kitchen_stove', 'range_(kitchen_appliance)', 'kitchen_range', 'cooking_stove'], 'def': 'a kitchen appliance used for cooking food', 'name': 'stove'}, {'frequency': 'c', 'id': 1041, 'synset': 'strainer.n.01', 'synonyms': ['strainer'], 'def': 'a filter to retain larger pieces while smaller pieces and liquids pass through', 'name': 'strainer'}, {'frequency': 'f', 'id': 1042, 'synset': 'strap.n.01', 'synonyms': ['strap'], 'def': 'an elongated strip of material for binding things together or holding', 'name': 'strap'}, {'frequency': 'f', 'id': 1043, 'synset': 'straw.n.04', 'synonyms': ['straw_(for_drinking)', 'drinking_straw'], 'def': 'a thin paper or plastic tube used to suck liquids into the mouth', 'name': 'straw_(for_drinking)'}, {'frequency': 'f', 'id': 1044, 'synset': 'strawberry.n.01', 'synonyms': ['strawberry'], 'def': 'sweet fleshy red fruit', 'name': 'strawberry'}, {'frequency': 'f', 'id': 1045, 'synset': 'street_sign.n.01', 'synonyms': ['street_sign'], 'def': 'a sign visible from the street', 'name': 'street_sign'}, {'frequency': 'f', 'id': 1046, 'synset': 'streetlight.n.01', 'synonyms': ['streetlight', 'street_lamp'], 'def': 'a lamp supported on a lamppost; for illuminating a street', 'name': 'streetlight'}, {'frequency': 'r', 'id': 1047, 'synset': 'string_cheese.n.01', 'synonyms': ['string_cheese'], 'def': 'cheese formed in long strings twisted together', 'name': 'string_cheese'}, {'frequency': 'r', 'id': 1048, 'synset': 'stylus.n.02', 'synonyms': ['stylus'], 'def': 'a pointed tool for writing or drawing or engraving', 'name': 'stylus'}, {'frequency': 'r', 'id': 1049, 'synset': 'subwoofer.n.01', 'synonyms': ['subwoofer'], 'def': 'a loudspeaker that is designed to reproduce very low bass frequencies', 'name': 'subwoofer'}, {'frequency': 'r', 'id': 1050, 'synset': 'sugar_bowl.n.01', 'synonyms': ['sugar_bowl'], 'def': 'a dish in which sugar is served', 'name': 'sugar_bowl'}, {'frequency': 'r', 'id': 1051, 'synset': 'sugarcane.n.01', 'synonyms': ['sugarcane_(plant)'], 'def': 'juicy canes whose sap is a source of molasses and commercial sugar; fresh canes are sometimes chewed for the juice', 'name': 'sugarcane_(plant)'}, {'frequency': 'c', 'id': 1052, 'synset': 'suit.n.01', 'synonyms': ['suit_(clothing)'], 'def': 'a set of garments (usually including a jacket and trousers or skirt) for outerwear all of the same fabric and color', 'name': 'suit_(clothing)'}, {'frequency': 'c', 'id': 1053, 'synset': 'sunflower.n.01', 'synonyms': ['sunflower'], 'def': 'any plant of the genus Helianthus having large flower heads with dark disk florets and showy yellow rays', 'name': 'sunflower'}, {'frequency': 'f', 'id': 1054, 'synset': 'sunglasses.n.01', 'synonyms': ['sunglasses'], 'def': 'spectacles that are darkened or polarized to protect the eyes from the glare of the sun', 'name': 'sunglasses'}, {'frequency': 'c', 'id': 1055, 'synset': 'sunhat.n.01', 'synonyms': ['sunhat'], 'def': 'a hat with a broad brim that protects the face from direct exposure to the sun', 'name': 'sunhat'}, {'frequency': 'r', 'id': 1056, 'synset': 'sunscreen.n.01', 'synonyms': ['sunscreen', 'sunblock'], 'def': 'a cream spread on the skin; contains a chemical to filter out ultraviolet light and so protect from sunburn', 'name': 'sunscreen'}, {'frequency': 'f', 'id': 1057, 'synset': 'surfboard.n.01', 'synonyms': ['surfboard'], 'def': 'a narrow buoyant board for riding surf', 'name': 'surfboard'}, {'frequency': 'c', 'id': 1058, 'synset': 'sushi.n.01', 'synonyms': ['sushi'], 'def': 'rice (with raw fish) wrapped in seaweed', 'name': 'sushi'}, {'frequency': 'c', 'id': 1059, 'synset': 'swab.n.02', 'synonyms': ['mop'], 'def': 'cleaning implement consisting of absorbent material fastened to a handle; for cleaning floors', 'name': 'mop'}, {'frequency': 'c', 'id': 1060, 'synset': 'sweat_pants.n.01', 'synonyms': ['sweat_pants'], 'def': 'loose-fitting trousers with elastic cuffs; worn by athletes', 'name': 'sweat_pants'}, {'frequency': 'c', 'id': 1061, 'synset': 'sweatband.n.02', 'synonyms': ['sweatband'], 'def': 'a band of material tied around the forehead or wrist to absorb sweat', 'name': 'sweatband'}, {'frequency': 'f', 'id': 1062, 'synset': 'sweater.n.01', 'synonyms': ['sweater'], 'def': 'a crocheted or knitted garment covering the upper part of the body', 'name': 'sweater'}, {'frequency': 'f', 'id': 1063, 'synset': 'sweatshirt.n.01', 'synonyms': ['sweatshirt'], 'def': 'cotton knit pullover with long sleeves worn during athletic activity', 'name': 'sweatshirt'}, {'frequency': 'c', 'id': 1064, 'synset': 'sweet_potato.n.02', 'synonyms': ['sweet_potato'], 'def': 'the edible tuberous root of the sweet potato vine', 'name': 'sweet_potato'}, {'frequency': 'f', 'id': 1065, 'synset': 'swimsuit.n.01', 'synonyms': ['swimsuit', 'swimwear', 'bathing_suit', 'swimming_costume', 'bathing_costume', 'swimming_trunks', 'bathing_trunks'], 'def': 'garment worn for swimming', 'name': 'swimsuit'}, {'frequency': 'c', 'id': 1066, 'synset': 'sword.n.01', 'synonyms': ['sword'], 'def': 'a cutting or thrusting weapon that has a long metal blade', 'name': 'sword'}, {'frequency': 'r', 'id': 1067, 'synset': 'syringe.n.01', 'synonyms': ['syringe'], 'def': 'a medical instrument used to inject or withdraw fluids', 'name': 'syringe'}, {'frequency': 'r', 'id': 1068, 'synset': 'tabasco.n.02', 'synonyms': ['Tabasco_sauce'], 'def': 'very spicy sauce (trade name Tabasco) made from fully-aged red peppers', 'name': 'Tabasco_sauce'}, {'frequency': 'r', 'id': 1069, 'synset': 'table-tennis_table.n.01', 'synonyms': ['table-tennis_table', 'ping-pong_table'], 'def': 'a table used for playing table tennis', 'name': 'table-tennis_table'}, {'frequency': 'f', 'id': 1070, 'synset': 'table.n.02', 'synonyms': ['table'], 'def': 'a piece of furniture having a smooth flat top that is usually supported by one or more vertical legs', 'name': 'table'}, {'frequency': 'c', 'id': 1071, 'synset': 'table_lamp.n.01', 'synonyms': ['table_lamp'], 'def': 'a lamp that sits on a table', 'name': 'table_lamp'}, {'frequency': 'f', 'id': 1072, 'synset': 'tablecloth.n.01', 'synonyms': ['tablecloth'], 'def': 'a covering spread over a dining table', 'name': 'tablecloth'}, {'frequency': 'r', 'id': 1073, 'synset': 'tachometer.n.01', 'synonyms': ['tachometer'], 'def': 'measuring instrument for indicating speed of rotation', 'name': 'tachometer'}, {'frequency': 'r', 'id': 1074, 'synset': 'taco.n.02', 'synonyms': ['taco'], 'def': 'a small tortilla cupped around a filling', 'name': 'taco'}, {'frequency': 'f', 'id': 1075, 'synset': 'tag.n.02', 'synonyms': ['tag'], 'def': 'a label associated with something for the purpose of identification or information', 'name': 'tag'}, {'frequency': 'f', 'id': 1076, 'synset': 'taillight.n.01', 'synonyms': ['taillight', 'rear_light'], 'def': 'lamp (usually red) mounted at the rear of a motor vehicle', 'name': 'taillight'}, {'frequency': 'r', 'id': 1077, 'synset': 'tambourine.n.01', 'synonyms': ['tambourine'], 'def': 'a shallow drum with a single drumhead and with metallic disks in the sides', 'name': 'tambourine'}, {'frequency': 'r', 'id': 1078, 'synset': 'tank.n.01', 'synonyms': ['army_tank', 'armored_combat_vehicle', 'armoured_combat_vehicle'], 'def': 'an enclosed armored military vehicle; has a cannon and moves on caterpillar treads', 'name': 'army_tank'}, {'frequency': 'c', 'id': 1079, 'synset': 'tank.n.02', 'synonyms': ['tank_(storage_vessel)', 'storage_tank'], 'def': 'a large (usually metallic) vessel for holding gases or liquids', 'name': 'tank_(storage_vessel)'}, {'frequency': 'f', 'id': 1080, 'synset': 'tank_top.n.01', 'synonyms': ['tank_top_(clothing)'], 'def': 'a tight-fitting sleeveless shirt with wide shoulder straps and low neck and no front opening', 'name': 'tank_top_(clothing)'}, {'frequency': 'c', 'id': 1081, 'synset': 'tape.n.01', 'synonyms': ['tape_(sticky_cloth_or_paper)'], 'def': 'a long thin piece of cloth or paper as used for binding or fastening', 'name': 'tape_(sticky_cloth_or_paper)'}, {'frequency': 'c', 'id': 1082, 'synset': 'tape.n.04', 'synonyms': ['tape_measure', 'measuring_tape'], 'def': 'measuring instrument consisting of a narrow strip (cloth or metal) marked in inches or centimeters and used for measuring lengths', 'name': 'tape_measure'}, {'frequency': 'c', 'id': 1083, 'synset': 'tapestry.n.02', 'synonyms': ['tapestry'], 'def': 'a heavy textile with a woven design; used for curtains and upholstery', 'name': 'tapestry'}, {'frequency': 'f', 'id': 1084, 'synset': 'tarpaulin.n.01', 'synonyms': ['tarp'], 'def': 'waterproofed canvas', 'name': 'tarp'}, {'frequency': 'c', 'id': 1085, 'synset': 'tartan.n.01', 'synonyms': ['tartan', 'plaid'], 'def': 'a cloth having a crisscross design', 'name': 'tartan'}, {'frequency': 'c', 'id': 1086, 'synset': 'tassel.n.01', 'synonyms': ['tassel'], 'def': 'adornment consisting of a bunch of cords fastened at one end', 'name': 'tassel'}, {'frequency': 'r', 'id': 1087, 'synset': 'tea_bag.n.01', 'synonyms': ['tea_bag'], 'def': 'a measured amount of tea in a bag for an individual serving of tea', 'name': 'tea_bag'}, {'frequency': 'c', 'id': 1088, 'synset': 'teacup.n.02', 'synonyms': ['teacup'], 'def': 'a cup from which tea is drunk', 'name': 'teacup'}, {'frequency': 'c', 'id': 1089, 'synset': 'teakettle.n.01', 'synonyms': ['teakettle'], 'def': 'kettle for boiling water to make tea', 'name': 'teakettle'}, {'frequency': 'c', 'id': 1090, 'synset': 'teapot.n.01', 'synonyms': ['teapot'], 'def': 'pot for brewing tea; usually has a spout and handle', 'name': 'teapot'}, {'frequency': 'f', 'id': 1091, 'synset': 'teddy.n.01', 'synonyms': ['teddy_bear'], 'def': "plaything consisting of a child's toy bear (usually plush and stuffed with soft materials)", 'name': 'teddy_bear'}, {'frequency': 'f', 'id': 1092, 'synset': 'telephone.n.01', 'synonyms': ['telephone', 'phone', 'telephone_set'], 'def': 'electronic device for communicating by voice over long distances', 'name': 'telephone'}, {'frequency': 'c', 'id': 1093, 'synset': 'telephone_booth.n.01', 'synonyms': ['telephone_booth', 'phone_booth', 'call_box', 'telephone_box', 'telephone_kiosk'], 'def': 'booth for using a telephone', 'name': 'telephone_booth'}, {'frequency': 'f', 'id': 1094, 'synset': 'telephone_pole.n.01', 'synonyms': ['telephone_pole', 'telegraph_pole', 'telegraph_post'], 'def': 'tall pole supporting telephone wires', 'name': 'telephone_pole'}, {'frequency': 'r', 'id': 1095, 'synset': 'telephoto_lens.n.01', 'synonyms': ['telephoto_lens', 'zoom_lens'], 'def': 'a camera lens that magnifies the image', 'name': 'telephoto_lens'}, {'frequency': 'c', 'id': 1096, 'synset': 'television_camera.n.01', 'synonyms': ['television_camera', 'tv_camera'], 'def': 'television equipment for capturing and recording video', 'name': 'television_camera'}, {'frequency': 'f', 'id': 1097, 'synset': 'television_receiver.n.01', 'synonyms': ['television_set', 'tv', 'tv_set'], 'def': 'an electronic device that receives television signals and displays them on a screen', 'name': 'television_set'}, {'frequency': 'f', 'id': 1098, 'synset': 'tennis_ball.n.01', 'synonyms': ['tennis_ball'], 'def': 'ball about the size of a fist used in playing tennis', 'name': 'tennis_ball'}, {'frequency': 'f', 'id': 1099, 'synset': 'tennis_racket.n.01', 'synonyms': ['tennis_racket'], 'def': 'a racket used to play tennis', 'name': 'tennis_racket'}, {'frequency': 'r', 'id': 1100, 'synset': 'tequila.n.01', 'synonyms': ['tequila'], 'def': 'Mexican liquor made from fermented juices of an agave plant', 'name': 'tequila'}, {'frequency': 'c', 'id': 1101, 'synset': 'thermometer.n.01', 'synonyms': ['thermometer'], 'def': 'measuring instrument for measuring temperature', 'name': 'thermometer'}, {'frequency': 'c', 'id': 1102, 'synset': 'thermos.n.01', 'synonyms': ['thermos_bottle'], 'def': 'vacuum flask that preserves temperature of hot or cold drinks', 'name': 'thermos_bottle'}, {'frequency': 'c', 'id': 1103, 'synset': 'thermostat.n.01', 'synonyms': ['thermostat'], 'def': 'a regulator for automatically regulating temperature by starting or stopping the supply of heat', 'name': 'thermostat'}, {'frequency': 'r', 'id': 1104, 'synset': 'thimble.n.02', 'synonyms': ['thimble'], 'def': 'a small metal cap to protect the finger while sewing; can be used as a small container', 'name': 'thimble'}, {'frequency': 'c', 'id': 1105, 'synset': 'thread.n.01', 'synonyms': ['thread', 'yarn'], 'def': 'a fine cord of twisted fibers (of cotton or silk or wool or nylon etc.) used in sewing and weaving', 'name': 'thread'}, {'frequency': 'c', 'id': 1106, 'synset': 'thumbtack.n.01', 'synonyms': ['thumbtack', 'drawing_pin', 'pushpin'], 'def': 'a tack for attaching papers to a bulletin board or drawing board', 'name': 'thumbtack'}, {'frequency': 'c', 'id': 1107, 'synset': 'tiara.n.01', 'synonyms': ['tiara'], 'def': 'a jeweled headdress worn by women on formal occasions', 'name': 'tiara'}, {'frequency': 'c', 'id': 1108, 'synset': 'tiger.n.02', 'synonyms': ['tiger'], 'def': 'large feline of forests in most of Asia having a tawny coat with black stripes', 'name': 'tiger'}, {'frequency': 'c', 'id': 1109, 'synset': 'tights.n.01', 'synonyms': ['tights_(clothing)', 'leotards'], 'def': 'skintight knit hose covering the body from the waist to the feet worn by acrobats and dancers and as stockings by women and girls', 'name': 'tights_(clothing)'}, {'frequency': 'c', 'id': 1110, 'synset': 'timer.n.01', 'synonyms': ['timer', 'stopwatch'], 'def': 'a timepiece that measures a time interval and signals its end', 'name': 'timer'}, {'frequency': 'f', 'id': 1111, 'synset': 'tinfoil.n.01', 'synonyms': ['tinfoil'], 'def': 'foil made of tin or an alloy of tin and lead', 'name': 'tinfoil'}, {'frequency': 'r', 'id': 1112, 'synset': 'tinsel.n.01', 'synonyms': ['tinsel'], 'def': 'a showy decoration that is basically valueless', 'name': 'tinsel'}, {'frequency': 'f', 'id': 1113, 'synset': 'tissue.n.02', 'synonyms': ['tissue_paper'], 'def': 'a soft thin (usually translucent) paper', 'name': 'tissue_paper'}, {'frequency': 'c', 'id': 1114, 'synset': 'toast.n.01', 'synonyms': ['toast_(food)'], 'def': 'slice of bread that has been toasted', 'name': 'toast_(food)'}, {'frequency': 'f', 'id': 1115, 'synset': 'toaster.n.02', 'synonyms': ['toaster'], 'def': 'a kitchen appliance (usually electric) for toasting bread', 'name': 'toaster'}, {'frequency': 'c', 'id': 1116, 'synset': 'toaster_oven.n.01', 'synonyms': ['toaster_oven'], 'def': 'kitchen appliance consisting of a small electric oven for toasting or warming food', 'name': 'toaster_oven'}, {'frequency': 'f', 'id': 1117, 'synset': 'toilet.n.02', 'synonyms': ['toilet'], 'def': 'a plumbing fixture for defecation and urination', 'name': 'toilet'}, {'frequency': 'f', 'id': 1118, 'synset': 'toilet_tissue.n.01', 'synonyms': ['toilet_tissue', 'toilet_paper', 'bathroom_tissue'], 'def': 'a soft thin absorbent paper for use in toilets', 'name': 'toilet_tissue'}, {'frequency': 'f', 'id': 1119, 'synset': 'tomato.n.01', 'synonyms': ['tomato'], 'def': 'mildly acid red or yellow pulpy fruit eaten as a vegetable', 'name': 'tomato'}, {'frequency': 'c', 'id': 1120, 'synset': 'tongs.n.01', 'synonyms': ['tongs'], 'def': 'any of various devices for taking hold of objects; usually have two hinged legs with handles above and pointed hooks below', 'name': 'tongs'}, {'frequency': 'c', 'id': 1121, 'synset': 'toolbox.n.01', 'synonyms': ['toolbox'], 'def': 'a box or chest or cabinet for holding hand tools', 'name': 'toolbox'}, {'frequency': 'f', 'id': 1122, 'synset': 'toothbrush.n.01', 'synonyms': ['toothbrush'], 'def': 'small brush; has long handle; used to clean teeth', 'name': 'toothbrush'}, {'frequency': 'f', 'id': 1123, 'synset': 'toothpaste.n.01', 'synonyms': ['toothpaste'], 'def': 'a dentifrice in the form of a paste', 'name': 'toothpaste'}, {'frequency': 'c', 'id': 1124, 'synset': 'toothpick.n.01', 'synonyms': ['toothpick'], 'def': 'pick consisting of a small strip of wood or plastic; used to pick food from between the teeth', 'name': 'toothpick'}, {'frequency': 'c', 'id': 1125, 'synset': 'top.n.09', 'synonyms': ['cover'], 'def': 'covering for a hole (especially a hole in the top of a container)', 'name': 'cover'}, {'frequency': 'c', 'id': 1126, 'synset': 'tortilla.n.01', 'synonyms': ['tortilla'], 'def': 'thin unleavened pancake made from cornmeal or wheat flour', 'name': 'tortilla'}, {'frequency': 'c', 'id': 1127, 'synset': 'tow_truck.n.01', 'synonyms': ['tow_truck'], 'def': 'a truck equipped to hoist and pull wrecked cars (or to remove cars from no-parking zones)', 'name': 'tow_truck'}, {'frequency': 'f', 'id': 1128, 'synset': 'towel.n.01', 'synonyms': ['towel'], 'def': 'a rectangular piece of absorbent cloth (or paper) for drying or wiping', 'name': 'towel'}, {'frequency': 'f', 'id': 1129, 'synset': 'towel_rack.n.01', 'synonyms': ['towel_rack', 'towel_rail', 'towel_bar'], 'def': 'a rack consisting of one or more bars on which towels can be hung', 'name': 'towel_rack'}, {'frequency': 'f', 'id': 1130, 'synset': 'toy.n.03', 'synonyms': ['toy'], 'def': 'a device regarded as providing amusement', 'name': 'toy'}, {'frequency': 'c', 'id': 1131, 'synset': 'tractor.n.01', 'synonyms': ['tractor_(farm_equipment)'], 'def': 'a wheeled vehicle with large wheels; used in farming and other applications', 'name': 'tractor_(farm_equipment)'}, {'frequency': 'f', 'id': 1132, 'synset': 'traffic_light.n.01', 'synonyms': ['traffic_light'], 'def': 'a device to control vehicle traffic often consisting of three or more lights', 'name': 'traffic_light'}, {'frequency': 'r', 'id': 1133, 'synset': 'trail_bike.n.01', 'synonyms': ['dirt_bike'], 'def': 'a lightweight motorcycle equipped with rugged tires and suspension for off-road use', 'name': 'dirt_bike'}, {'frequency': 'c', 'id': 1134, 'synset': 'trailer_truck.n.01', 'synonyms': ['trailer_truck', 'tractor_trailer', 'trucking_rig', 'articulated_lorry', 'semi_truck'], 'def': 'a truck consisting of a tractor and trailer together', 'name': 'trailer_truck'}, {'frequency': 'f', 'id': 1135, 'synset': 'train.n.01', 'synonyms': ['train_(railroad_vehicle)', 'railroad_train'], 'def': 'public or private transport provided by a line of railway cars coupled together and drawn by a locomotive', 'name': 'train_(railroad_vehicle)'}, {'frequency': 'r', 'id': 1136, 'synset': 'trampoline.n.01', 'synonyms': ['trampoline'], 'def': 'gymnastic apparatus consisting of a strong canvas sheet attached with springs to a metal frame', 'name': 'trampoline'}, {'frequency': 'f', 'id': 1137, 'synset': 'tray.n.01', 'synonyms': ['tray'], 'def': 'an open receptacle for holding or displaying or serving articles or food', 'name': 'tray'}, {'frequency': 'r', 'id': 1138, 'synset': 'tree_house.n.01', 'synonyms': ['tree_house'], 'def': '(NOT A TREE) a PLAYHOUSE built in the branches of a tree', 'name': 'tree_house'}, {'frequency': 'r', 'id': 1139, 'synset': 'trench_coat.n.01', 'synonyms': ['trench_coat'], 'def': 'a military style raincoat; belted with deep pockets', 'name': 'trench_coat'}, {'frequency': 'r', 'id': 1140, 'synset': 'triangle.n.05', 'synonyms': ['triangle_(musical_instrument)'], 'def': 'a percussion instrument consisting of a metal bar bent in the shape of an open triangle', 'name': 'triangle_(musical_instrument)'}, {'frequency': 'r', 'id': 1141, 'synset': 'tricycle.n.01', 'synonyms': ['tricycle'], 'def': 'a vehicle with three wheels that is moved by foot pedals', 'name': 'tricycle'}, {'frequency': 'c', 'id': 1142, 'synset': 'tripod.n.01', 'synonyms': ['tripod'], 'def': 'a three-legged rack used for support', 'name': 'tripod'}, {'frequency': 'f', 'id': 1143, 'synset': 'trouser.n.01', 'synonyms': ['trousers', 'pants_(clothing)'], 'def': 'a garment extending from the waist to the knee or ankle, covering each leg separately', 'name': 'trousers'}, {'frequency': 'f', 'id': 1144, 'synset': 'truck.n.01', 'synonyms': ['truck'], 'def': 'an automotive vehicle suitable for hauling', 'name': 'truck'}, {'frequency': 'r', 'id': 1145, 'synset': 'truffle.n.03', 'synonyms': ['truffle_(chocolate)', 'chocolate_truffle'], 'def': 'creamy chocolate candy', 'name': 'truffle_(chocolate)'}, {'frequency': 'c', 'id': 1146, 'synset': 'trunk.n.02', 'synonyms': ['trunk'], 'def': 'luggage consisting of a large strong case used when traveling or for storage', 'name': 'trunk'}, {'frequency': 'r', 'id': 1147, 'synset': 'tub.n.02', 'synonyms': ['vat'], 'def': 'a large open vessel for holding or storing liquids', 'name': 'vat'}, {'frequency': 'c', 'id': 1148, 'synset': 'turban.n.01', 'synonyms': ['turban'], 'def': 'a traditional headdress consisting of a long scarf wrapped around the head', 'name': 'turban'}, {'frequency': 'r', 'id': 1149, 'synset': 'turkey.n.01', 'synonyms': ['turkey_(bird)'], 'def': 'large gallinaceous bird with fan-shaped tail; widely domesticated for food', 'name': 'turkey_(bird)'}, {'frequency': 'c', 'id': 1150, 'synset': 'turkey.n.04', 'synonyms': ['turkey_(food)'], 'def': 'flesh of large domesticated fowl usually roasted', 'name': 'turkey_(food)'}, {'frequency': 'r', 'id': 1151, 'synset': 'turnip.n.01', 'synonyms': ['turnip'], 'def': 'widely cultivated plant having a large fleshy edible white or yellow root', 'name': 'turnip'}, {'frequency': 'c', 'id': 1152, 'synset': 'turtle.n.02', 'synonyms': ['turtle'], 'def': 'any of various aquatic and land reptiles having a bony shell and flipper-like limbs for swimming', 'name': 'turtle'}, {'frequency': 'r', 'id': 1153, 'synset': 'turtleneck.n.01', 'synonyms': ['turtleneck_(clothing)', 'polo-neck'], 'def': 'a sweater or jersey with a high close-fitting collar', 'name': 'turtleneck_(clothing)'}, {'frequency': 'r', 'id': 1154, 'synset': 'typewriter.n.01', 'synonyms': ['typewriter'], 'def': 'hand-operated character printer for printing written messages one character at a time', 'name': 'typewriter'}, {'frequency': 'f', 'id': 1155, 'synset': 'umbrella.n.01', 'synonyms': ['umbrella'], 'def': 'a lightweight handheld collapsible canopy', 'name': 'umbrella'}, {'frequency': 'c', 'id': 1156, 'synset': 'underwear.n.01', 'synonyms': ['underwear', 'underclothes', 'underclothing', 'underpants'], 'def': 'undergarment worn next to the skin and under the outer garments', 'name': 'underwear'}, {'frequency': 'r', 'id': 1157, 'synset': 'unicycle.n.01', 'synonyms': ['unicycle'], 'def': 'a vehicle with a single wheel that is driven by pedals', 'name': 'unicycle'}, {'frequency': 'c', 'id': 1158, 'synset': 'urinal.n.01', 'synonyms': ['urinal'], 'def': 'a plumbing fixture (usually attached to the wall) used by men to urinate', 'name': 'urinal'}, {'frequency': 'r', 'id': 1159, 'synset': 'urn.n.01', 'synonyms': ['urn'], 'def': 'a large vase that usually has a pedestal or feet', 'name': 'urn'}, {'frequency': 'c', 'id': 1160, 'synset': 'vacuum.n.04', 'synonyms': ['vacuum_cleaner'], 'def': 'an electrical home appliance that cleans by suction', 'name': 'vacuum_cleaner'}, {'frequency': 'c', 'id': 1161, 'synset': 'valve.n.03', 'synonyms': ['valve'], 'def': 'control consisting of a mechanical device for controlling the flow of a fluid', 'name': 'valve'}, {'frequency': 'f', 'id': 1162, 'synset': 'vase.n.01', 'synonyms': ['vase'], 'def': 'an open jar of glass or porcelain used as an ornament or to hold flowers', 'name': 'vase'}, {'frequency': 'c', 'id': 1163, 'synset': 'vending_machine.n.01', 'synonyms': ['vending_machine'], 'def': 'a slot machine for selling goods', 'name': 'vending_machine'}, {'frequency': 'f', 'id': 1164, 'synset': 'vent.n.01', 'synonyms': ['vent', 'blowhole', 'air_vent'], 'def': 'a hole for the escape of gas or air', 'name': 'vent'}, {'frequency': 'c', 'id': 1165, 'synset': 'videotape.n.01', 'synonyms': ['videotape'], 'def': 'a video recording made on magnetic tape', 'name': 'videotape'}, {'frequency': 'r', 'id': 1166, 'synset': 'vinegar.n.01', 'synonyms': ['vinegar'], 'def': 'sour-tasting liquid produced usually by oxidation of the alcohol in wine or cider and used as a condiment or food preservative', 'name': 'vinegar'}, {'frequency': 'r', 'id': 1167, 'synset': 'violin.n.01', 'synonyms': ['violin', 'fiddle'], 'def': 'bowed stringed instrument that is the highest member of the violin family', 'name': 'violin'}, {'frequency': 'r', 'id': 1168, 'synset': 'vodka.n.01', 'synonyms': ['vodka'], 'def': 'unaged colorless liquor originating in Russia', 'name': 'vodka'}, {'frequency': 'r', 'id': 1169, 'synset': 'volleyball.n.02', 'synonyms': ['volleyball'], 'def': 'an inflated ball used in playing volleyball', 'name': 'volleyball'}, {'frequency': 'r', 'id': 1170, 'synset': 'vulture.n.01', 'synonyms': ['vulture'], 'def': 'any of various large birds of prey having naked heads and weak claws and feeding chiefly on carrion', 'name': 'vulture'}, {'frequency': 'c', 'id': 1171, 'synset': 'waffle.n.01', 'synonyms': ['waffle'], 'def': 'pancake batter baked in a waffle iron', 'name': 'waffle'}, {'frequency': 'r', 'id': 1172, 'synset': 'waffle_iron.n.01', 'synonyms': ['waffle_iron'], 'def': 'a kitchen appliance for baking waffles', 'name': 'waffle_iron'}, {'frequency': 'c', 'id': 1173, 'synset': 'wagon.n.01', 'synonyms': ['wagon'], 'def': 'any of various kinds of wheeled vehicles drawn by an animal or a tractor', 'name': 'wagon'}, {'frequency': 'c', 'id': 1174, 'synset': 'wagon_wheel.n.01', 'synonyms': ['wagon_wheel'], 'def': 'a wheel of a wagon', 'name': 'wagon_wheel'}, {'frequency': 'c', 'id': 1175, 'synset': 'walking_stick.n.01', 'synonyms': ['walking_stick'], 'def': 'a stick carried in the hand for support in walking', 'name': 'walking_stick'}, {'frequency': 'c', 'id': 1176, 'synset': 'wall_clock.n.01', 'synonyms': ['wall_clock'], 'def': 'a clock mounted on a wall', 'name': 'wall_clock'}, {'frequency': 'f', 'id': 1177, 'synset': 'wall_socket.n.01', 'synonyms': ['wall_socket', 'wall_plug', 'electric_outlet', 'electrical_outlet', 'outlet', 'electric_receptacle'], 'def': 'receptacle providing a place in a wiring system where current can be taken to run electrical devices', 'name': 'wall_socket'}, {'frequency': 'c', 'id': 1178, 'synset': 'wallet.n.01', 'synonyms': ['wallet', 'billfold'], 'def': 'a pocket-size case for holding papers and paper money', 'name': 'wallet'}, {'frequency': 'r', 'id': 1179, 'synset': 'walrus.n.01', 'synonyms': ['walrus'], 'def': 'either of two large northern marine mammals having ivory tusks and tough hide over thick blubber', 'name': 'walrus'}, {'frequency': 'r', 'id': 1180, 'synset': 'wardrobe.n.01', 'synonyms': ['wardrobe'], 'def': 'a tall piece of furniture that provides storage space for clothes; has a door and rails or hooks for hanging clothes', 'name': 'wardrobe'}, {'frequency': 'r', 'id': 1181, 'synset': 'wasabi.n.02', 'synonyms': ['wasabi'], 'def': 'the thick green root of the wasabi plant that the Japanese use in cooking and that tastes like strong horseradish', 'name': 'wasabi'}, {'frequency': 'c', 'id': 1182, 'synset': 'washer.n.03', 'synonyms': ['automatic_washer', 'washing_machine'], 'def': 'a home appliance for washing clothes and linens automatically', 'name': 'automatic_washer'}, {'frequency': 'f', 'id': 1183, 'synset': 'watch.n.01', 'synonyms': ['watch', 'wristwatch'], 'def': 'a small, portable timepiece', 'name': 'watch'}, {'frequency': 'f', 'id': 1184, 'synset': 'water_bottle.n.01', 'synonyms': ['water_bottle'], 'def': 'a bottle for holding water', 'name': 'water_bottle'}, {'frequency': 'c', 'id': 1185, 'synset': 'water_cooler.n.01', 'synonyms': ['water_cooler'], 'def': 'a device for cooling and dispensing drinking water', 'name': 'water_cooler'}, {'frequency': 'c', 'id': 1186, 'synset': 'water_faucet.n.01', 'synonyms': ['water_faucet', 'water_tap', 'tap_(water_faucet)'], 'def': 'a faucet for drawing water from a pipe or cask', 'name': 'water_faucet'}, {'frequency': 'r', 'id': 1187, 'synset': 'water_filter.n.01', 'synonyms': ['water_filter'], 'def': 'a filter to remove impurities from the water supply', 'name': 'water_filter'}, {'frequency': 'r', 'id': 1188, 'synset': 'water_heater.n.01', 'synonyms': ['water_heater', 'hot-water_heater'], 'def': 'a heater and storage tank to supply heated water', 'name': 'water_heater'}, {'frequency': 'r', 'id': 1189, 'synset': 'water_jug.n.01', 'synonyms': ['water_jug'], 'def': 'a jug that holds water', 'name': 'water_jug'}, {'frequency': 'r', 'id': 1190, 'synset': 'water_pistol.n.01', 'synonyms': ['water_gun', 'squirt_gun'], 'def': 'plaything consisting of a toy pistol that squirts water', 'name': 'water_gun'}, {'frequency': 'c', 'id': 1191, 'synset': 'water_scooter.n.01', 'synonyms': ['water_scooter', 'sea_scooter', 'jet_ski'], 'def': 'a motorboat resembling a motor scooter (NOT A SURFBOARD OR WATER SKI)', 'name': 'water_scooter'}, {'frequency': 'c', 'id': 1192, 'synset': 'water_ski.n.01', 'synonyms': ['water_ski'], 'def': 'broad ski for skimming over water towed by a speedboat (DO NOT MARK WATER)', 'name': 'water_ski'}, {'frequency': 'c', 'id': 1193, 'synset': 'water_tower.n.01', 'synonyms': ['water_tower'], 'def': 'a large reservoir for water', 'name': 'water_tower'}, {'frequency': 'c', 'id': 1194, 'synset': 'watering_can.n.01', 'synonyms': ['watering_can'], 'def': 'a container with a handle and a spout with a perforated nozzle; used to sprinkle water over plants', 'name': 'watering_can'}, {'frequency': 'c', 'id': 1195, 'synset': 'watermelon.n.02', 'synonyms': ['watermelon'], 'def': 'large oblong or roundish melon with a hard green rind and sweet watery red or occasionally yellowish pulp', 'name': 'watermelon'}, {'frequency': 'f', 'id': 1196, 'synset': 'weathervane.n.01', 'synonyms': ['weathervane', 'vane_(weathervane)', 'wind_vane'], 'def': 'mechanical device attached to an elevated structure; rotates freely to show the direction of the wind', 'name': 'weathervane'}, {'frequency': 'c', 'id': 1197, 'synset': 'webcam.n.01', 'synonyms': ['webcam'], 'def': 'a digital camera designed to take digital photographs and transmit them over the internet', 'name': 'webcam'}, {'frequency': 'c', 'id': 1198, 'synset': 'wedding_cake.n.01', 'synonyms': ['wedding_cake', 'bridecake'], 'def': 'a rich cake with two or more tiers and covered with frosting and decorations; served at a wedding reception', 'name': 'wedding_cake'}, {'frequency': 'c', 'id': 1199, 'synset': 'wedding_ring.n.01', 'synonyms': ['wedding_ring', 'wedding_band'], 'def': 'a ring given to the bride and/or groom at the wedding', 'name': 'wedding_ring'}, {'frequency': 'f', 'id': 1200, 'synset': 'wet_suit.n.01', 'synonyms': ['wet_suit'], 'def': 'a close-fitting garment made of a permeable material; worn in cold water to retain body heat', 'name': 'wet_suit'}, {'frequency': 'f', 'id': 1201, 'synset': 'wheel.n.01', 'synonyms': ['wheel'], 'def': 'a circular frame with spokes (or a solid disc) that can rotate on a shaft or axle', 'name': 'wheel'}, {'frequency': 'c', 'id': 1202, 'synset': 'wheelchair.n.01', 'synonyms': ['wheelchair'], 'def': 'a movable chair mounted on large wheels', 'name': 'wheelchair'}, {'frequency': 'c', 'id': 1203, 'synset': 'whipped_cream.n.01', 'synonyms': ['whipped_cream'], 'def': 'cream that has been beaten until light and fluffy', 'name': 'whipped_cream'}, {'frequency': 'r', 'id': 1204, 'synset': 'whiskey.n.01', 'synonyms': ['whiskey'], 'def': 'a liquor made from fermented mash of grain', 'name': 'whiskey'}, {'frequency': 'r', 'id': 1205, 'synset': 'whistle.n.03', 'synonyms': ['whistle'], 'def': 'a small wind instrument that produces a whistling sound by blowing into it', 'name': 'whistle'}, {'frequency': 'r', 'id': 1206, 'synset': 'wick.n.02', 'synonyms': ['wick'], 'def': 'a loosely woven cord in a candle or oil lamp that is lit on fire', 'name': 'wick'}, {'frequency': 'c', 'id': 1207, 'synset': 'wig.n.01', 'synonyms': ['wig'], 'def': 'hairpiece covering the head and made of real or synthetic hair', 'name': 'wig'}, {'frequency': 'c', 'id': 1208, 'synset': 'wind_chime.n.01', 'synonyms': ['wind_chime'], 'def': 'a decorative arrangement of pieces of metal or glass or pottery that hang together loosely so the wind can cause them to tinkle', 'name': 'wind_chime'}, {'frequency': 'c', 'id': 1209, 'synset': 'windmill.n.01', 'synonyms': ['windmill'], 'def': 'a mill that is powered by the wind', 'name': 'windmill'}, {'frequency': 'c', 'id': 1210, 'synset': 'window_box.n.01', 'synonyms': ['window_box_(for_plants)'], 'def': 'a container for growing plants on a windowsill', 'name': 'window_box_(for_plants)'}, {'frequency': 'f', 'id': 1211, 'synset': 'windshield_wiper.n.01', 'synonyms': ['windshield_wiper', 'windscreen_wiper', 'wiper_(for_windshield/screen)'], 'def': 'a mechanical device that cleans the windshield', 'name': 'windshield_wiper'}, {'frequency': 'c', 'id': 1212, 'synset': 'windsock.n.01', 'synonyms': ['windsock', 'air_sock', 'air-sleeve', 'wind_sleeve', 'wind_cone'], 'def': 'a truncated cloth cone mounted on a mast/pole; shows wind direction', 'name': 'windsock'}, {'frequency': 'f', 'id': 1213, 'synset': 'wine_bottle.n.01', 'synonyms': ['wine_bottle'], 'def': 'a bottle for holding wine', 'name': 'wine_bottle'}, {'frequency': 'r', 'id': 1214, 'synset': 'wine_bucket.n.01', 'synonyms': ['wine_bucket', 'wine_cooler'], 'def': 'a bucket of ice used to chill a bottle of wine', 'name': 'wine_bucket'}, {'frequency': 'f', 'id': 1215, 'synset': 'wineglass.n.01', 'synonyms': ['wineglass'], 'def': 'a glass that has a stem and in which wine is served', 'name': 'wineglass'}, {'frequency': 'r', 'id': 1216, 'synset': 'wing_chair.n.01', 'synonyms': ['wing_chair'], 'def': 'easy chair having wings on each side of a high back', 'name': 'wing_chair'}, {'frequency': 'c', 'id': 1217, 'synset': 'winker.n.02', 'synonyms': ['blinder_(for_horses)'], 'def': 'blinds that prevent a horse from seeing something on either side', 'name': 'blinder_(for_horses)'}, {'frequency': 'c', 'id': 1218, 'synset': 'wok.n.01', 'synonyms': ['wok'], 'def': 'pan with a convex bottom; used for frying in Chinese cooking', 'name': 'wok'}, {'frequency': 'r', 'id': 1219, 'synset': 'wolf.n.01', 'synonyms': ['wolf'], 'def': 'a wild carnivorous mammal of the dog family, living and hunting in packs', 'name': 'wolf'}, {'frequency': 'c', 'id': 1220, 'synset': 'wooden_spoon.n.02', 'synonyms': ['wooden_spoon'], 'def': 'a spoon made of wood', 'name': 'wooden_spoon'}, {'frequency': 'c', 'id': 1221, 'synset': 'wreath.n.01', 'synonyms': ['wreath'], 'def': 'an arrangement of flowers, leaves, or stems fastened in a ring', 'name': 'wreath'}, {'frequency': 'c', 'id': 1222, 'synset': 'wrench.n.03', 'synonyms': ['wrench', 'spanner'], 'def': 'a hand tool that is used to hold or twist a nut or bolt', 'name': 'wrench'}, {'frequency': 'c', 'id': 1223, 'synset': 'wristband.n.01', 'synonyms': ['wristband'], 'def': 'band consisting of a part of a sleeve that covers the wrist', 'name': 'wristband'}, {'frequency': 'f', 'id': 1224, 'synset': 'wristlet.n.01', 'synonyms': ['wristlet', 'wrist_band'], 'def': 'a band or bracelet worn around the wrist', 'name': 'wristlet'}, {'frequency': 'r', 'id': 1225, 'synset': 'yacht.n.01', 'synonyms': ['yacht'], 'def': 'an expensive vessel propelled by sail or power and used for cruising or racing', 'name': 'yacht'}, {'frequency': 'r', 'id': 1226, 'synset': 'yak.n.02', 'synonyms': ['yak'], 'def': 'large long-haired wild ox of Tibet often domesticated', 'name': 'yak'}, {'frequency': 'c', 'id': 1227, 'synset': 'yogurt.n.01', 'synonyms': ['yogurt', 'yoghurt', 'yoghourt'], 'def': 'a custard-like food made from curdled milk', 'name': 'yogurt'}, {'frequency': 'r', 'id': 1228, 'synset': 'yoke.n.07', 'synonyms': ['yoke_(animal_equipment)'], 'def': 'gear joining two animals at the neck; NOT egg yolk', 'name': 'yoke_(animal_equipment)'}, {'frequency': 'f', 'id': 1229, 'synset': 'zebra.n.01', 'synonyms': ['zebra'], 'def': 'any of several fleet black-and-white striped African equines', 'name': 'zebra'}, {'frequency': 'c', 'id': 1230, 'synset': 'zucchini.n.02', 'synonyms': ['zucchini', 'courgette'], 'def': 'small cucumber-shaped vegetable marrow; typically dark green', 'name': 'zucchini'}] # noqa +# fmt: on diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/pascal_voc.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/pascal_voc.py new file mode 100644 index 0000000000000000000000000000000000000000..5872d96575b428e90b29a7759a2f7b32dcc15d25 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/pascal_voc.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import numpy as np +import os +import xml.etree.ElementTree as ET +from fvcore.common.file_io import PathManager + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.structures import BoxMode + +__all__ = ["register_pascal_voc"] + + +# fmt: off +CLASS_NAMES = [ + "aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat", + "chair", "cow", "diningtable", "dog", "horse", "motorbike", "person", + "pottedplant", "sheep", "sofa", "train", "tvmonitor", +] +# fmt: on + + +def load_voc_instances(dirname: str, split: str): + """ + Load Pascal VOC detection annotations to Detectron2 format. + + Args: + dirname: Contain "Annotations", "ImageSets", "JPEGImages" + split (str): one of "train", "test", "val", "trainval" + """ + with PathManager.open(os.path.join(dirname, "ImageSets", "Main", split + ".txt")) as f: + fileids = np.loadtxt(f, dtype=np.str) + + # Needs to read many small annotation files. Makes sense at local + annotation_dirname = PathManager.get_local_path(os.path.join(dirname, "Annotations/")) + dicts = [] + for fileid in fileids: + anno_file = os.path.join(annotation_dirname, fileid + ".xml") + jpeg_file = os.path.join(dirname, "JPEGImages", fileid + ".jpg") + + with PathManager.open(anno_file) as f: + tree = ET.parse(f) + + r = { + "file_name": jpeg_file, + "image_id": fileid, + "height": int(tree.findall("./size/height")[0].text), + "width": int(tree.findall("./size/width")[0].text), + } + instances = [] + + for obj in tree.findall("object"): + cls = obj.find("name").text + # We include "difficult" samples in training. + # Based on limited experiments, they don't hurt accuracy. + # difficult = int(obj.find("difficult").text) + # if difficult == 1: + # continue + bbox = obj.find("bndbox") + bbox = [float(bbox.find(x).text) for x in ["xmin", "ymin", "xmax", "ymax"]] + # Original annotations are integers in the range [1, W or H] + # Assuming they mean 1-based pixel indices (inclusive), + # a box with annotation (xmin=1, xmax=W) covers the whole image. + # In coordinate space this is represented by (xmin=0, xmax=W) + bbox[0] -= 1.0 + bbox[1] -= 1.0 + instances.append( + {"category_id": CLASS_NAMES.index(cls), "bbox": bbox, "bbox_mode": BoxMode.XYXY_ABS} + ) + r["annotations"] = instances + dicts.append(r) + return dicts + + +def register_pascal_voc(name, dirname, split, year): + DatasetCatalog.register(name, lambda: load_voc_instances(dirname, split)) + MetadataCatalog.get(name).set( + thing_classes=CLASS_NAMES, dirname=dirname, year=year, split=split + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/register_coco.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/register_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a0a4db66f23ffbf42f551bf56e18c7acbfe3f71e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/datasets/register_coco.py @@ -0,0 +1,129 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import copy +import os + +from detectron2.data import DatasetCatalog, MetadataCatalog + +from .coco import load_coco_json, load_sem_seg + +""" +This file contains functions to register a COCO-format dataset to the DatasetCatalog. +""" + +__all__ = ["register_coco_instances", "register_coco_panoptic_separated"] + + +def register_coco_instances(name, metadata, json_file, image_root): + """ + Register a dataset in COCO's json annotation format for + instance detection, instance segmentation and keypoint detection. + (i.e., Type 1 and 2 in http://cocodataset.org/#format-data. + `instances*.json` and `person_keypoints*.json` in the dataset). + + This is an example of how to register a new dataset. + You can do something similar to this function, to register new data. + + Args: + name (str): the name that identifies a dataset, e.g. "coco_2014_train". + metadata (dict): extra metadata associated with this dataset. You can + leave it as an empty dict. + json_file (str): path to the json instance annotation file. + image_root (str or path-like): directory which contains all the images. + """ + assert isinstance(name, str), name + assert isinstance(json_file, (str, os.PathLike)), json_file + assert isinstance(image_root, (str, os.PathLike)), image_root + # 1. register a function which returns dicts + DatasetCatalog.register(name, lambda: load_coco_json(json_file, image_root, name)) + + # 2. Optionally, add metadata about this dataset, + # since they might be useful in evaluation, visualization or logging + MetadataCatalog.get(name).set( + json_file=json_file, image_root=image_root, evaluator_type="coco", **metadata + ) + + +def register_coco_panoptic_separated( + name, metadata, image_root, panoptic_root, panoptic_json, sem_seg_root, instances_json +): + """ + Register a COCO panoptic segmentation dataset named `name`. + The annotations in this registered dataset will contain both instance annotations and + semantic annotations, each with its own contiguous ids. Hence it's called "separated". + + It follows the setting used by the PanopticFPN paper: + + 1. The instance annotations directly come from polygons in the COCO + instances annotation task, rather than from the masks in the COCO panoptic annotations. + + The two format have small differences: + Polygons in the instance annotations may have overlaps. + The mask annotations are produced by labeling the overlapped polygons + with depth ordering. + + 2. The semantic annotations are converted from panoptic annotations, where + all "things" are assigned a semantic id of 0. + All semantic categories will therefore have ids in contiguous + range [1, #stuff_categories]. + + This function will also register a pure semantic segmentation dataset + named ``name + '_stuffonly'``. + + Args: + name (str): the name that identifies a dataset, + e.g. "coco_2017_train_panoptic" + metadata (dict): extra metadata associated with this dataset. + image_root (str): directory which contains all the images + panoptic_root (str): directory which contains panoptic annotation images + panoptic_json (str): path to the json panoptic annotation file + sem_seg_root (str): directory which contains all the ground truth segmentation annotations. + instances_json (str): path to the json instance annotation file + """ + panoptic_name = name + "_separated" + DatasetCatalog.register( + panoptic_name, + lambda: merge_to_panoptic( + load_coco_json(instances_json, image_root, panoptic_name), + load_sem_seg(sem_seg_root, image_root), + ), + ) + MetadataCatalog.get(panoptic_name).set( + panoptic_root=panoptic_root, + image_root=image_root, + panoptic_json=panoptic_json, + sem_seg_root=sem_seg_root, + json_file=instances_json, # TODO rename + evaluator_type="coco_panoptic_seg", + **metadata + ) + + semantic_name = name + "_stuffonly" + DatasetCatalog.register(semantic_name, lambda: load_sem_seg(sem_seg_root, image_root)) + MetadataCatalog.get(semantic_name).set( + sem_seg_root=sem_seg_root, image_root=image_root, evaluator_type="sem_seg", **metadata + ) + + +def merge_to_panoptic(detection_dicts, sem_seg_dicts): + """ + Create dataset dicts for panoptic segmentation, by + merging two dicts using "file_name" field to match their entries. + + Args: + detection_dicts (list[dict]): lists of dicts for object detection or instance segmentation. + sem_seg_dicts (list[dict]): lists of dicts for semantic segmentation. + + Returns: + list[dict] (one per input image): Each dict contains all (key, value) pairs from dicts in + both detection_dicts and sem_seg_dicts that correspond to the same image. + The function assumes that the same key in different dicts has the same value. + """ + results = [] + sem_seg_file_to_entry = {x["file_name"]: x for x in sem_seg_dicts} + assert len(sem_seg_file_to_entry) > 0 + + for det_dict in detection_dicts: + dic = copy.copy(det_dict) + dic.update(sem_seg_file_to_entry[dic["file_name"]]) + results.append(dic) + return results diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/detection_utils.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/detection_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e19c7e2f2b4600b77923141ccd04693d4086562f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/detection_utils.py @@ -0,0 +1,516 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +""" +Common data processing utilities that are used in a +typical object detection data pipeline. +""" +import logging +import numpy as np +import pycocotools.mask as mask_util +import torch +from fvcore.common.file_io import PathManager +from PIL import Image, ImageOps + +from detectron2.structures import ( + BitMasks, + Boxes, + BoxMode, + Instances, + Keypoints, + PolygonMasks, + RotatedBoxes, + polygons_to_bitmask, +) + +from . import transforms as T +from .catalog import MetadataCatalog + + +class SizeMismatchError(ValueError): + """ + When loaded image has difference width/height compared with annotation. + """ + + +# https://en.wikipedia.org/wiki/YUV#SDTV_with_BT.601 +_M_RGB2YUV = [[0.299, 0.587, 0.114], [-0.14713, -0.28886, 0.436], [0.615, -0.51499, -0.10001]] +_M_YUV2RGB = [[1.0, 0.0, 1.13983], [1.0, -0.39465, -0.58060], [1.0, 2.03211, 0.0]] + + +def convert_PIL_to_numpy(image, format): + """ + Convert PIL image to numpy array of target format. + + Args: + image (PIL.Image): a PIL image + format (str): the format of output image + + Returns: + (np.ndarray): also see `read_image` + """ + if format is not None: + # PIL only supports RGB, so convert to RGB and flip channels over below + conversion_format = format + if format in ["BGR", "YUV-BT.601"]: + conversion_format = "RGB" + image = image.convert(conversion_format) + image = np.asarray(image) + # PIL squeezes out the channel dimension for "L", so make it HWC + if format == "L": + image = np.expand_dims(image, -1) + + # handle formats not supported by PIL + elif format == "BGR": + # flip channels if needed + image = image[:, :, ::-1] + elif format == "YUV-BT.601": + image = image / 255.0 + image = np.dot(image, np.array(_M_RGB2YUV).T) + + return image + + +def convert_image_to_rgb(image, format): + """ + Convert numpy image from given format to RGB. + + Args: + image (np.ndarray): a numpy image + format (str): the format of input image, also see `read_image` + + Returns: + (np.ndarray): HWC RGB image in 0-255 range, can be either float or uint8 + """ + if format == "BGR": + image = image[:, :, [2, 1, 0]] + elif format == "YUV-BT.601": + image = np.dot(image, np.array(_M_YUV2RGB).T) + image = image * 255.0 + else: + if format == "L": + image = image[:, :, 0] + image = image.astype(np.uint8) + image = np.asarray(Image.fromarray(image, mode=format).convert("RGB")) + return image + + +def read_image(file_name, format=None): + """ + Read an image into the given format. + Will apply rotation and flipping if the image has such exif information. + + Args: + file_name (str): image file path + format (str): one of the supported image modes in PIL, or "BGR" or "YUV-BT.601" + + Returns: + image (np.ndarray): an HWC image in the given format, which is 0-255, uint8 for + supported image modes in PIL or "BGR"; float (0-1 for Y) for YUV-BT.601. + """ + with PathManager.open(file_name, "rb") as f: + image = Image.open(f) + + # capture and ignore this bug: https://github.com/python-pillow/Pillow/issues/3973 + try: + image = ImageOps.exif_transpose(image) + except Exception: + pass + + return convert_PIL_to_numpy(image, format) + + +def check_image_size(dataset_dict, image): + """ + Raise an error if the image does not match the size specified in the dict. + """ + if "width" in dataset_dict or "height" in dataset_dict: + image_wh = (image.shape[1], image.shape[0]) + expected_wh = (dataset_dict["width"], dataset_dict["height"]) + if not image_wh == expected_wh: + raise SizeMismatchError( + "Mismatched (W,H){}, got {}, expect {}".format( + " for image " + dataset_dict["file_name"] + if "file_name" in dataset_dict + else "", + image_wh, + expected_wh, + ) + ) + + # To ensure bbox always remap to original image size + if "width" not in dataset_dict: + dataset_dict["width"] = image.shape[1] + if "height" not in dataset_dict: + dataset_dict["height"] = image.shape[0] + + +def transform_proposals(dataset_dict, image_shape, transforms, min_box_side_len, proposal_topk): + """ + Apply transformations to the proposals in dataset_dict, if any. + + Args: + dataset_dict (dict): a dict read from the dataset, possibly + contains fields "proposal_boxes", "proposal_objectness_logits", "proposal_bbox_mode" + image_shape (tuple): height, width + transforms (TransformList): + min_box_side_len (int): keep proposals with at least this size + proposal_topk (int): only keep top-K scoring proposals + + The input dict is modified in-place, with abovementioned keys removed. A new + key "proposals" will be added. Its value is an `Instances` + object which contains the transformed proposals in its field + "proposal_boxes" and "objectness_logits". + """ + if "proposal_boxes" in dataset_dict: + # Transform proposal boxes + boxes = transforms.apply_box( + BoxMode.convert( + dataset_dict.pop("proposal_boxes"), + dataset_dict.pop("proposal_bbox_mode"), + BoxMode.XYXY_ABS, + ) + ) + boxes = Boxes(boxes) + objectness_logits = torch.as_tensor( + dataset_dict.pop("proposal_objectness_logits").astype("float32") + ) + + boxes.clip(image_shape) + keep = boxes.nonempty(threshold=min_box_side_len) + boxes = boxes[keep] + objectness_logits = objectness_logits[keep] + + proposals = Instances(image_shape) + proposals.proposal_boxes = boxes[:proposal_topk] + proposals.objectness_logits = objectness_logits[:proposal_topk] + dataset_dict["proposals"] = proposals + + +def transform_instance_annotations( + annotation, transforms, image_size, *, keypoint_hflip_indices=None +): + """ + Apply transforms to box, segmentation and keypoints annotations of a single instance. + + It will use `transforms.apply_box` for the box, and + `transforms.apply_coords` for segmentation polygons & keypoints. + If you need anything more specially designed for each data structure, + you'll need to implement your own version of this function or the transforms. + + Args: + annotation (dict): dict of instance annotations for a single instance. + It will be modified in-place. + transforms (TransformList): + image_size (tuple): the height, width of the transformed image + keypoint_hflip_indices (ndarray[int]): see `create_keypoint_hflip_indices`. + + Returns: + dict: + the same input dict with fields "bbox", "segmentation", "keypoints" + transformed according to `transforms`. + The "bbox_mode" field will be set to XYXY_ABS. + """ + bbox = BoxMode.convert(annotation["bbox"], annotation["bbox_mode"], BoxMode.XYXY_ABS) + # Note that bbox is 1d (per-instance bounding box) + annotation["bbox"] = transforms.apply_box([bbox])[0] + annotation["bbox_mode"] = BoxMode.XYXY_ABS + + if "segmentation" in annotation: + # each instance contains 1 or more polygons + segm = annotation["segmentation"] + if isinstance(segm, list): + # polygons + polygons = [np.asarray(p).reshape(-1, 2) for p in segm] + annotation["segmentation"] = [ + p.reshape(-1) for p in transforms.apply_polygons(polygons) + ] + elif isinstance(segm, dict): + # RLE + mask = mask_util.decode(segm) + mask = transforms.apply_segmentation(mask) + assert tuple(mask.shape[:2]) == image_size + annotation["segmentation"] = mask + else: + raise ValueError( + "Cannot transform segmentation of type '{}'!" + "Supported types are: polygons as list[list[float] or ndarray]," + " COCO-style RLE as a dict.".format(type(segm)) + ) + + if "keypoints" in annotation: + keypoints = transform_keypoint_annotations( + annotation["keypoints"], transforms, image_size, keypoint_hflip_indices + ) + annotation["keypoints"] = keypoints + + return annotation + + +def transform_keypoint_annotations(keypoints, transforms, image_size, keypoint_hflip_indices=None): + """ + Transform keypoint annotations of an image. + + Args: + keypoints (list[float]): Nx3 float in Detectron2 Dataset format. + transforms (TransformList): + image_size (tuple): the height, width of the transformed image + keypoint_hflip_indices (ndarray[int]): see `create_keypoint_hflip_indices`. + """ + # (N*3,) -> (N, 3) + keypoints = np.asarray(keypoints, dtype="float64").reshape(-1, 3) + keypoints[:, :2] = transforms.apply_coords(keypoints[:, :2]) + + # This assumes that HorizFlipTransform is the only one that does flip + do_hflip = sum(isinstance(t, T.HFlipTransform) for t in transforms.transforms) % 2 == 1 + + # Alternative way: check if probe points was horizontally flipped. + # probe = np.asarray([[0.0, 0.0], [image_width, 0.0]]) + # probe_aug = transforms.apply_coords(probe.copy()) + # do_hflip = np.sign(probe[1][0] - probe[0][0]) != np.sign(probe_aug[1][0] - probe_aug[0][0]) # noqa + + # If flipped, swap each keypoint with its opposite-handed equivalent + if do_hflip: + assert keypoint_hflip_indices is not None + keypoints = keypoints[keypoint_hflip_indices, :] + + # Maintain COCO convention that if visibility == 0, then x, y = 0 + # TODO may need to reset visibility for cropped keypoints, + # but it does not matter for our existing algorithms + keypoints[keypoints[:, 2] == 0] = 0 + return keypoints + + +def annotations_to_instances(annos, image_size, mask_format="polygon"): + """ + Create an :class:`Instances` object used by the models, + from instance annotations in the dataset dict. + + Args: + annos (list[dict]): a list of instance annotations in one image, each + element for one instance. + image_size (tuple): height, width + + Returns: + Instances: + It will contain fields "gt_boxes", "gt_classes", + "gt_masks", "gt_keypoints", if they can be obtained from `annos`. + This is the format that builtin models expect. + """ + boxes = [BoxMode.convert(obj["bbox"], obj["bbox_mode"], BoxMode.XYXY_ABS) for obj in annos] + target = Instances(image_size) + boxes = target.gt_boxes = Boxes(boxes) + boxes.clip(image_size) + + classes = [obj["category_id"] for obj in annos] + classes = torch.tensor(classes, dtype=torch.int64) + target.gt_classes = classes + + if len(annos) and "segmentation" in annos[0]: + segms = [obj["segmentation"] for obj in annos] + if mask_format == "polygon": + masks = PolygonMasks(segms) + else: + assert mask_format == "bitmask", mask_format + masks = [] + for segm in segms: + if isinstance(segm, list): + # polygon + masks.append(polygons_to_bitmask(segm, *image_size)) + elif isinstance(segm, dict): + # COCO RLE + masks.append(mask_util.decode(segm)) + elif isinstance(segm, np.ndarray): + assert segm.ndim == 2, "Expect segmentation of 2 dimensions, got {}.".format( + segm.ndim + ) + # mask array + masks.append(segm) + else: + raise ValueError( + "Cannot convert segmentation of type '{}' to BitMasks!" + "Supported types are: polygons as list[list[float] or ndarray]," + " COCO-style RLE as a dict, or a full-image segmentation mask " + "as a 2D ndarray.".format(type(segm)) + ) + # torch.from_numpy does not support array with negative stride. + masks = BitMasks( + torch.stack([torch.from_numpy(np.ascontiguousarray(x)) for x in masks]) + ) + target.gt_masks = masks + + if len(annos) and "keypoints" in annos[0]: + kpts = [obj.get("keypoints", []) for obj in annos] + target.gt_keypoints = Keypoints(kpts) + + return target + + +def annotations_to_instances_rotated(annos, image_size): + """ + Create an :class:`Instances` object used by the models, + from instance annotations in the dataset dict. + Compared to `annotations_to_instances`, this function is for rotated boxes only + + Args: + annos (list[dict]): a list of instance annotations in one image, each + element for one instance. + image_size (tuple): height, width + + Returns: + Instances: + Containing fields "gt_boxes", "gt_classes", + if they can be obtained from `annos`. + This is the format that builtin models expect. + """ + boxes = [obj["bbox"] for obj in annos] + target = Instances(image_size) + boxes = target.gt_boxes = RotatedBoxes(boxes) + boxes.clip(image_size) + + classes = [obj["category_id"] for obj in annos] + classes = torch.tensor(classes, dtype=torch.int64) + target.gt_classes = classes + + return target + + +def filter_empty_instances(instances, by_box=True, by_mask=True, box_threshold=1e-5): + """ + Filter out empty instances in an `Instances` object. + + Args: + instances (Instances): + by_box (bool): whether to filter out instances with empty boxes + by_mask (bool): whether to filter out instances with empty masks + box_threshold (float): minimum width and height to be considered non-empty + + Returns: + Instances: the filtered instances. + """ + assert by_box or by_mask + r = [] + if by_box: + r.append(instances.gt_boxes.nonempty(threshold=box_threshold)) + if instances.has("gt_masks") and by_mask: + r.append(instances.gt_masks.nonempty()) + + # TODO: can also filter visible keypoints + + if not r: + return instances + m = r[0] + for x in r[1:]: + m = m & x + return instances[m] + + +def create_keypoint_hflip_indices(dataset_names): + """ + Args: + dataset_names (list[str]): list of dataset names + Returns: + ndarray[int]: a vector of size=#keypoints, storing the + horizontally-flipped keypoint indices. + """ + + check_metadata_consistency("keypoint_names", dataset_names) + check_metadata_consistency("keypoint_flip_map", dataset_names) + + meta = MetadataCatalog.get(dataset_names[0]) + names = meta.keypoint_names + # TODO flip -> hflip + flip_map = dict(meta.keypoint_flip_map) + flip_map.update({v: k for k, v in flip_map.items()}) + flipped_names = [i if i not in flip_map else flip_map[i] for i in names] + flip_indices = [names.index(i) for i in flipped_names] + return np.asarray(flip_indices) + + +def gen_crop_transform_with_instance(crop_size, image_size, instance): + """ + Generate a CropTransform so that the cropping region contains + the center of the given instance. + + Args: + crop_size (tuple): h, w in pixels + image_size (tuple): h, w + instance (dict): an annotation dict of one instance, in Detectron2's + dataset format. + """ + crop_size = np.asarray(crop_size, dtype=np.int32) + bbox = BoxMode.convert(instance["bbox"], instance["bbox_mode"], BoxMode.XYXY_ABS) + center_yx = (bbox[1] + bbox[3]) * 0.5, (bbox[0] + bbox[2]) * 0.5 + assert ( + image_size[0] >= center_yx[0] and image_size[1] >= center_yx[1] + ), "The annotation bounding box is outside of the image!" + assert ( + image_size[0] >= crop_size[0] and image_size[1] >= crop_size[1] + ), "Crop size is larger than image size!" + + min_yx = np.maximum(np.floor(center_yx).astype(np.int32) - crop_size, 0) + max_yx = np.maximum(np.asarray(image_size, dtype=np.int32) - crop_size, 0) + max_yx = np.minimum(max_yx, np.ceil(center_yx).astype(np.int32)) + + y0 = np.random.randint(min_yx[0], max_yx[0] + 1) + x0 = np.random.randint(min_yx[1], max_yx[1] + 1) + return T.CropTransform(x0, y0, crop_size[1], crop_size[0]) + + +def check_metadata_consistency(key, dataset_names): + """ + Check that the data have consistent metadata. + + Args: + key (str): a metadata key + dataset_names (list[str]): a list of dataset names + + Raises: + AttributeError: if the key does not exist in the metadata + ValueError: if the given data do not have the same metadata values defined by key + """ + if len(dataset_names) == 0: + return + logger = logging.getLogger(__name__) + entries_per_dataset = [getattr(MetadataCatalog.get(d), key) for d in dataset_names] + for idx, entry in enumerate(entries_per_dataset): + if entry != entries_per_dataset[0]: + logger.error( + "Metadata '{}' for dataset '{}' is '{}'".format(key, dataset_names[idx], str(entry)) + ) + logger.error( + "Metadata '{}' for dataset '{}' is '{}'".format( + key, dataset_names[0], str(entries_per_dataset[0]) + ) + ) + raise ValueError("Datasets have different metadata '{}'!".format(key)) + + +def build_transform_gen(cfg, is_train): + """ + Create a list of :class:`TransformGen` from config. + Now it includes resizing and flipping. + + Returns: + list[TransformGen] + """ + if is_train: + min_size = cfg.INPUT.MIN_SIZE_TRAIN + max_size = cfg.INPUT.MAX_SIZE_TRAIN + sample_style = cfg.INPUT.MIN_SIZE_TRAIN_SAMPLING + else: + min_size = cfg.INPUT.MIN_SIZE_TEST + max_size = cfg.INPUT.MAX_SIZE_TEST + sample_style = "choice" + if sample_style == "range": + assert len(min_size) == 2, "more than 2 ({}) min_size(s) are provided for ranges".format( + len(min_size) + ) + + logger = logging.getLogger(__name__) + tfm_gens = [] + tfm_gens.append(T.ResizeShortestEdge(min_size, max_size, sample_style)) + if is_train: + tfm_gens.append(T.RandomFlip()) + logger.info("TransformGens used in training: " + str(tfm_gens)) + return tfm_gens diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/samplers/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/samplers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9cfa8a65259a850b8259016d482a0eac1bbafb38 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/samplers/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .distributed_sampler import InferenceSampler, RepeatFactorTrainingSampler, TrainingSampler +from .grouped_batch_sampler import GroupedBatchSampler + +__all__ = [ + "GroupedBatchSampler", + "TrainingSampler", + "InferenceSampler", + "RepeatFactorTrainingSampler", +] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/samplers/distributed_sampler.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/samplers/distributed_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..4ac57bbd10519be99114155d717802deac53e8fb --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/samplers/distributed_sampler.py @@ -0,0 +1,199 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import itertools +import math +from collections import defaultdict +from typing import Optional +import torch +from torch.utils.data.sampler import Sampler + +from detectron2.utils import comm + + +class TrainingSampler(Sampler): + """ + In training, we only care about the "infinite stream" of training data. + So this sampler produces an infinite stream of indices and + all workers cooperate to correctly shuffle the indices and sample different indices. + + The samplers in each worker effectively produces `indices[worker_id::num_workers]` + where `indices` is an infinite stream of indices consisting of + `shuffle(range(size)) + shuffle(range(size)) + ...` (if shuffle is True) + or `range(size) + range(size) + ...` (if shuffle is False) + """ + + def __init__(self, size: int, shuffle: bool = True, seed: Optional[int] = None): + """ + Args: + size (int): the total number of data of the underlying dataset to sample from + shuffle (bool): whether to shuffle the indices or not + seed (int): the initial seed of the shuffle. Must be the same + across all workers. If None, will use a random seed shared + among workers (require synchronization among all workers). + """ + self._size = size + assert size > 0 + self._shuffle = shuffle + if seed is None: + seed = comm.shared_random_seed() + self._seed = int(seed) + + self._rank = comm.get_rank() + self._world_size = comm.get_world_size() + + def __iter__(self): + start = self._rank + yield from itertools.islice(self._infinite_indices(), start, None, self._world_size) + + def _infinite_indices(self): + g = torch.Generator() + g.manual_seed(self._seed) + while True: + if self._shuffle: + yield from torch.randperm(self._size, generator=g) + else: + yield from torch.arange(self._size) + + +class RepeatFactorTrainingSampler(Sampler): + """ + Similar to TrainingSampler, but suitable for training on class imbalanced data + like LVIS. In each epoch, an image may appear multiple times based on its "repeat + factor". The repeat factor for an image is a function of the frequency the rarest + category labeled in that image. The "frequency of category c" in [0, 1] is defined + as the fraction of images in the training set (without repeats) in which category c + appears. + + See :paper:`lvis` (>= v2) Appendix B.2. + """ + + def __init__(self, dataset_dicts, repeat_thresh, shuffle=True, seed=None): + """ + Args: + dataset_dicts (list[dict]): annotations in Detectron2 dataset format. + repeat_thresh (float): frequency threshold below which data is repeated. + shuffle (bool): whether to shuffle the indices or not + seed (int): the initial seed of the shuffle. Must be the same + across all workers. If None, will use a random seed shared + among workers (require synchronization among all workers). + """ + self._shuffle = shuffle + if seed is None: + seed = comm.shared_random_seed() + self._seed = int(seed) + + self._rank = comm.get_rank() + self._world_size = comm.get_world_size() + + # Get fractional repeat factors and split into whole number (_int_part) + # and fractional (_frac_part) parts. + rep_factors = self._get_repeat_factors(dataset_dicts, repeat_thresh) + self._int_part = torch.trunc(rep_factors) + self._frac_part = rep_factors - self._int_part + + def _get_repeat_factors(self, dataset_dicts, repeat_thresh): + """ + Compute (fractional) per-image repeat factors. + + Args: + See __init__. + + Returns: + torch.Tensor: the i-th element is the repeat factor for the dataset image + at index i. + """ + # 1. For each category c, compute the fraction of images that contain it: f(c) + category_freq = defaultdict(int) + for dataset_dict in dataset_dicts: # For each image (without repeats) + cat_ids = {ann["category_id"] for ann in dataset_dict["annotations"]} + for cat_id in cat_ids: + category_freq[cat_id] += 1 + num_images = len(dataset_dicts) + for k, v in category_freq.items(): + category_freq[k] = v / num_images + + # 2. For each category c, compute the category-level repeat factor: + # r(c) = max(1, sqrt(t / f(c))) + category_rep = { + cat_id: max(1.0, math.sqrt(repeat_thresh / cat_freq)) + for cat_id, cat_freq in category_freq.items() + } + + # 3. For each image I, compute the image-level repeat factor: + # r(I) = max_{c in I} r(c) + rep_factors = [] + for dataset_dict in dataset_dicts: + cat_ids = {ann["category_id"] for ann in dataset_dict["annotations"]} + rep_factor = max({category_rep[cat_id] for cat_id in cat_ids}) + rep_factors.append(rep_factor) + + return torch.tensor(rep_factors, dtype=torch.float32) + + def _get_epoch_indices(self, generator): + """ + Create a list of dataset indices (with repeats) to use for one epoch. + + Args: + generator (torch.Generator): pseudo random number generator used for + stochastic rounding. + + Returns: + torch.Tensor: list of dataset indices to use in one epoch. Each index + is repeated based on its calculated repeat factor. + """ + # Since repeat factors are fractional, we use stochastic rounding so + # that the target repeat factor is achieved in expectation over the + # course of training + rands = torch.rand(len(self._frac_part), generator=generator) + rep_factors = self._int_part + (rands < self._frac_part).float() + # Construct a list of indices in which we repeat images as specified + indices = [] + for dataset_index, rep_factor in enumerate(rep_factors): + indices.extend([dataset_index] * int(rep_factor.item())) + return torch.tensor(indices, dtype=torch.int64) + + def __iter__(self): + start = self._rank + yield from itertools.islice(self._infinite_indices(), start, None, self._world_size) + + def _infinite_indices(self): + g = torch.Generator() + g.manual_seed(self._seed) + while True: + # Sample indices with repeats determined by stochastic rounding; each + # "epoch" may have a slightly different size due to the rounding. + indices = self._get_epoch_indices(g) + if self._shuffle: + randperm = torch.randperm(len(indices), generator=g) + yield from indices[randperm] + else: + yield from indices + + +class InferenceSampler(Sampler): + """ + Produce indices for inference. + Inference needs to run on the __exact__ set of samples, + therefore when the total number of samples is not divisible by the number of workers, + this sampler produces different number of samples on different workers. + """ + + def __init__(self, size: int): + """ + Args: + size (int): the total number of data of the underlying dataset to sample from + """ + self._size = size + assert size > 0 + self._rank = comm.get_rank() + self._world_size = comm.get_world_size() + + shard_size = (self._size - 1) // self._world_size + 1 + begin = shard_size * self._rank + end = min(shard_size * (self._rank + 1), self._size) + self._local_indices = range(begin, end) + + def __iter__(self): + yield from self._local_indices + + def __len__(self): + return len(self._local_indices) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/samplers/grouped_batch_sampler.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/samplers/grouped_batch_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..138e106136083383d9f8729f1da930804463b297 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/samplers/grouped_batch_sampler.py @@ -0,0 +1,47 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import numpy as np +from torch.utils.data.sampler import BatchSampler, Sampler + + +class GroupedBatchSampler(BatchSampler): + """ + Wraps another sampler to yield a mini-batch of indices. + It enforces that the batch only contain elements from the same group. + It also tries to provide mini-batches which follows an ordering which is + as close as possible to the ordering from the original sampler. + """ + + def __init__(self, sampler, group_ids, batch_size): + """ + Args: + sampler (Sampler): Base sampler. + group_ids (list[int]): If the sampler produces indices in range [0, N), + `group_ids` must be a list of `N` ints which contains the group id of each sample. + The group ids must be a set of integers in the range [0, num_groups). + batch_size (int): Size of mini-batch. + """ + if not isinstance(sampler, Sampler): + raise ValueError( + "sampler should be an instance of " + "torch.utils.data.Sampler, but got sampler={}".format(sampler) + ) + self.sampler = sampler + self.group_ids = np.asarray(group_ids) + assert self.group_ids.ndim == 1 + self.batch_size = batch_size + groups = np.unique(self.group_ids).tolist() + + # buffer the indices of each group until batch size is reached + self.buffer_per_group = {k: [] for k in groups} + + def __iter__(self): + for idx in self.sampler: + group_id = self.group_ids[idx] + group_buffer = self.buffer_per_group[group_id] + group_buffer.append(idx) + if len(group_buffer) == self.batch_size: + yield group_buffer[:] # yield a copy of the list + del group_buffer[:] + + def __len__(self): + raise NotImplementedError("len() of GroupedBatchSampler is not well-defined.") diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/transforms/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/transforms/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f7638bb58009ff3e00eb1373f2faa5dc2f30100d --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/transforms/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .transform import * +from fvcore.transforms.transform import * +from .transform_gen import * + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/transforms/transform.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/transforms/transform.py new file mode 100644 index 0000000000000000000000000000000000000000..bd937538da4bed77ccb6a7ee45d7f15dc0281384 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/transforms/transform.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# File: transform.py + +import numpy as np +import torch +import torch.nn.functional as F +from fvcore.transforms.transform import HFlipTransform, NoOpTransform, Transform +from PIL import Image + +try: + import cv2 # noqa +except ImportError: + # OpenCV is an optional dependency at the moment + pass + +__all__ = ["ExtentTransform", "ResizeTransform", "RotationTransform"] + + +class ExtentTransform(Transform): + """ + Extracts a subregion from the source image and scales it to the output size. + + The fill color is used to map pixels from the source rect that fall outside + the source image. + + See: https://pillow.readthedocs.io/en/latest/PIL.html#PIL.ImageTransform.ExtentTransform + """ + + def __init__(self, src_rect, output_size, interp=Image.LINEAR, fill=0): + """ + Args: + src_rect (x0, y0, x1, y1): src coordinates + output_size (h, w): dst image size + interp: PIL interpolation methods + fill: Fill color used when src_rect extends outside image + """ + super().__init__() + self._set_attributes(locals()) + + def apply_image(self, img, interp=None): + h, w = self.output_size + ret = Image.fromarray(img).transform( + size=(w, h), + method=Image.EXTENT, + data=self.src_rect, + resample=interp if interp else self.interp, + fill=self.fill, + ) + return np.asarray(ret) + + def apply_coords(self, coords): + # Transform image center from source coordinates into output coordinates + # and then map the new origin to the corner of the output image. + h, w = self.output_size + x0, y0, x1, y1 = self.src_rect + new_coords = coords.astype(np.float32) + new_coords[:, 0] -= 0.5 * (x0 + x1) + new_coords[:, 1] -= 0.5 * (y0 + y1) + new_coords[:, 0] *= w / (x1 - x0) + new_coords[:, 1] *= h / (y1 - y0) + new_coords[:, 0] += 0.5 * w + new_coords[:, 1] += 0.5 * h + return new_coords + + def apply_segmentation(self, segmentation): + segmentation = self.apply_image(segmentation, interp=Image.NEAREST) + return segmentation + + +class ResizeTransform(Transform): + """ + Resize the image to a target size. + """ + + def __init__(self, h, w, new_h, new_w, interp=None): + """ + Args: + h, w (int): original image size + new_h, new_w (int): new image size + interp: PIL interpolation methods, defaults to bilinear. + """ + # TODO decide on PIL vs opencv + super().__init__() + if interp is None: + interp = Image.BILINEAR + self._set_attributes(locals()) + + def apply_image(self, img, interp=None): + assert img.shape[:2] == (self.h, self.w) + assert len(img.shape) <= 4 + + if img.dtype == np.uint8: + pil_image = Image.fromarray(img) + interp_method = interp if interp is not None else self.interp + pil_image = pil_image.resize((self.new_w, self.new_h), interp_method) + ret = np.asarray(pil_image) + else: + # PIL only supports uint8 + img = torch.from_numpy(img) + shape = list(img.shape) + shape_4d = shape[:2] + [1] * (4 - len(shape)) + shape[2:] + img = img.view(shape_4d).permute(2, 3, 0, 1) # hw(c) -> nchw + _PIL_RESIZE_TO_INTERPOLATE_MODE = {Image.BILINEAR: "bilinear", Image.BICUBIC: "bicubic"} + mode = _PIL_RESIZE_TO_INTERPOLATE_MODE[self.interp] + img = F.interpolate(img, (self.new_h, self.new_w), mode=mode, align_corners=False) + shape[:2] = (self.new_h, self.new_w) + ret = img.permute(2, 3, 0, 1).view(shape).numpy() # nchw -> hw(c) + + return ret + + def apply_coords(self, coords): + coords[:, 0] = coords[:, 0] * (self.new_w * 1.0 / self.w) + coords[:, 1] = coords[:, 1] * (self.new_h * 1.0 / self.h) + return coords + + def apply_segmentation(self, segmentation): + segmentation = self.apply_image(segmentation, interp=Image.NEAREST) + return segmentation + + def inverse(self): + return ResizeTransform(self.new_h, self.new_w, self.h, self.w, self.interp) + + +class RotationTransform(Transform): + """ + This method returns a copy of this image, rotated the given + number of degrees counter clockwise around its center. + """ + + def __init__(self, h, w, angle, expand=True, center=None, interp=None): + """ + Args: + h, w (int): original image size + angle (float): degrees for rotation + expand (bool): choose if the image should be resized to fit the whole + rotated image (default), or simply cropped + center (tuple (width, height)): coordinates of the rotation center + if left to None, the center will be fit to the center of each image + center has no effect if expand=True because it only affects shifting + interp: cv2 interpolation method, default cv2.INTER_LINEAR + """ + super().__init__() + image_center = np.array((w / 2, h / 2)) + if center is None: + center = image_center + if interp is None: + interp = cv2.INTER_LINEAR + abs_cos, abs_sin = abs(np.cos(np.deg2rad(angle))), abs(np.sin(np.deg2rad(angle))) + if expand: + # find the new width and height bounds + bound_w, bound_h = np.rint( + [h * abs_sin + w * abs_cos, h * abs_cos + w * abs_sin] + ).astype(int) + else: + bound_w, bound_h = w, h + + self._set_attributes(locals()) + self.rm_coords = self.create_rotation_matrix() + # Needed because of this problem https://github.com/opencv/opencv/issues/11784 + self.rm_image = self.create_rotation_matrix(offset=-0.5) + + def apply_image(self, img, interp=None): + """ + demo should be a numpy array, formatted as Height * Width * Nchannels + """ + if len(img) == 0 or self.angle % 360 == 0: + return img + assert img.shape[:2] == (self.h, self.w) + interp = interp if interp is not None else self.interp + return cv2.warpAffine(img, self.rm_image, (self.bound_w, self.bound_h), flags=interp) + + def apply_coords(self, coords): + """ + coords should be a N * 2 array-like, containing N couples of (x, y) points + """ + coords = np.asarray(coords, dtype=float) + if len(coords) == 0 or self.angle % 360 == 0: + return coords + return cv2.transform(coords[:, np.newaxis, :], self.rm_coords)[:, 0, :] + + def apply_segmentation(self, segmentation): + segmentation = self.apply_image(segmentation, interp=cv2.INTER_NEAREST) + return segmentation + + def create_rotation_matrix(self, offset=0): + center = (self.center[0] + offset, self.center[1] + offset) + rm = cv2.getRotationMatrix2D(tuple(center), self.angle, 1) + if self.expand: + # Find the coordinates of the center of rotation in the new image + # The only point for which we know the future coordinates is the center of the image + rot_im_center = cv2.transform(self.image_center[None, None, :] + offset, rm)[0, 0, :] + new_center = np.array([self.bound_w / 2, self.bound_h / 2]) + offset - rot_im_center + # shift the rotation center to the new coordinates + rm[:, 2] += new_center + return rm + + +def HFlip_rotated_box(transform, rotated_boxes): + """ + Apply the horizontal flip transform on rotated boxes. + + Args: + rotated_boxes (ndarray): Nx5 floating point array of + (x_center, y_center, width, height, angle_degrees) format + in absolute coordinates. + """ + # Transform x_center + rotated_boxes[:, 0] = transform.width - rotated_boxes[:, 0] + # Transform angle + rotated_boxes[:, 4] = -rotated_boxes[:, 4] + return rotated_boxes + + +def Resize_rotated_box(transform, rotated_boxes): + """ + Apply the resizing transform on rotated boxes. For details of how these (approximation) + formulas are derived, please refer to :meth:`RotatedBoxes.scale`. + + Args: + rotated_boxes (ndarray): Nx5 floating point array of + (x_center, y_center, width, height, angle_degrees) format + in absolute coordinates. + """ + scale_factor_x = transform.new_w * 1.0 / transform.w + scale_factor_y = transform.new_h * 1.0 / transform.h + rotated_boxes[:, 0] *= scale_factor_x + rotated_boxes[:, 1] *= scale_factor_y + theta = rotated_boxes[:, 4] * np.pi / 180.0 + c = np.cos(theta) + s = np.sin(theta) + rotated_boxes[:, 2] *= np.sqrt(np.square(scale_factor_x * c) + np.square(scale_factor_y * s)) + rotated_boxes[:, 3] *= np.sqrt(np.square(scale_factor_x * s) + np.square(scale_factor_y * c)) + rotated_boxes[:, 4] = np.arctan2(scale_factor_x * s, scale_factor_y * c) * 180 / np.pi + + return rotated_boxes + + +HFlipTransform.register_type("rotated_box", HFlip_rotated_box) +NoOpTransform.register_type("rotated_box", lambda t, x: x) +ResizeTransform.register_type("rotated_box", Resize_rotated_box) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/transforms/transform_gen.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/transforms/transform_gen.py new file mode 100644 index 0000000000000000000000000000000000000000..197a0ebf6750a7ea459aa7e14413b4a41adcd42e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/data/transforms/transform_gen.py @@ -0,0 +1,534 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# File: transformer.py + +import inspect +import numpy as np +import pprint +import sys +from abc import ABCMeta, abstractmethod +from fvcore.transforms.transform import ( + BlendTransform, + CropTransform, + HFlipTransform, + NoOpTransform, + Transform, + TransformList, + VFlipTransform, +) +from PIL import Image + +from .transform import ExtentTransform, ResizeTransform, RotationTransform + +__all__ = [ + "RandomApply", + "RandomBrightness", + "RandomContrast", + "RandomCrop", + "RandomExtent", + "RandomFlip", + "RandomSaturation", + "RandomLighting", + "RandomRotation", + "Resize", + "ResizeShortestEdge", + "TransformGen", + "apply_transform_gens", +] + + +def check_dtype(img): + assert isinstance(img, np.ndarray), "[TransformGen] Needs an numpy array, but got a {}!".format( + type(img) + ) + assert not isinstance(img.dtype, np.integer) or ( + img.dtype == np.uint8 + ), "[TransformGen] Got image of type {}, use uint8 or floating points instead!".format( + img.dtype + ) + assert img.ndim in [2, 3], img.ndim + + +class TransformGen(metaclass=ABCMeta): + """ + TransformGen takes an image of type uint8 in range [0, 255], or + floating point in range [0, 1] or [0, 255] as input. + + It creates a :class:`Transform` based on the given image, sometimes with randomness. + The transform can then be used to transform images + or other data (boxes, points, annotations, etc.) associated with it. + + The assumption made in this class + is that the image itself is sufficient to instantiate a transform. + When this assumption is not true, you need to create the transforms by your own. + + A list of `TransformGen` can be applied with :func:`apply_transform_gens`. + """ + + def _init(self, params=None): + if params: + for k, v in params.items(): + if k != "self" and not k.startswith("_"): + setattr(self, k, v) + + @abstractmethod + def get_transform(self, img): + pass + + def _rand_range(self, low=1.0, high=None, size=None): + """ + Uniform float random number between low and high. + """ + if high is None: + low, high = 0, low + if size is None: + size = [] + return np.random.uniform(low, high, size) + + def __repr__(self): + """ + Produce something like: + "MyTransformGen(field1={self.field1}, field2={self.field2})" + """ + try: + sig = inspect.signature(self.__init__) + classname = type(self).__name__ + argstr = [] + for name, param in sig.parameters.items(): + assert ( + param.kind != param.VAR_POSITIONAL and param.kind != param.VAR_KEYWORD + ), "The default __repr__ doesn't support *args or **kwargs" + assert hasattr(self, name), ( + "Attribute {} not found! " + "Default __repr__ only works if attributes match the constructor.".format(name) + ) + attr = getattr(self, name) + default = param.default + if default is attr: + continue + argstr.append("{}={}".format(name, pprint.pformat(attr))) + return "{}({})".format(classname, ", ".join(argstr)) + except AssertionError: + return super().__repr__() + + __str__ = __repr__ + + +class RandomApply(TransformGen): + """ + Randomly apply the wrapper transformation with a given probability. + """ + + def __init__(self, transform, prob=0.5): + """ + Args: + transform (Transform, TransformGen): the transform to be wrapped + by the `RandomApply`. The `transform` can either be a + `Transform` or `TransformGen` instance. + prob (float): probability between 0.0 and 1.0 that + the wrapper transformation is applied + """ + super().__init__() + assert isinstance(transform, (Transform, TransformGen)), ( + f"The given transform must either be a Transform or TransformGen instance. " + f"Not {type(transform)}" + ) + assert 0.0 <= prob <= 1.0, f"Probablity must be between 0.0 and 1.0 (given: {prob})" + self.prob = prob + self.transform = transform + + def get_transform(self, img): + do = self._rand_range() < self.prob + if do: + if isinstance(self.transform, TransformGen): + return self.transform.get_transform(img) + else: + return self.transform + else: + return NoOpTransform() + + +class RandomFlip(TransformGen): + """ + Flip the image horizontally or vertically with the given probability. + """ + + def __init__(self, prob=0.5, *, horizontal=True, vertical=False): + """ + Args: + prob (float): probability of flip. + horizontal (boolean): whether to apply horizontal flipping + vertical (boolean): whether to apply vertical flipping + """ + super().__init__() + + if horizontal and vertical: + raise ValueError("Cannot do both horiz and vert. Please use two Flip instead.") + if not horizontal and not vertical: + raise ValueError("At least one of horiz or vert has to be True!") + self._init(locals()) + + def get_transform(self, img): + h, w = img.shape[:2] + do = self._rand_range() < self.prob + if do: + if self.horizontal: + return HFlipTransform(w) + elif self.vertical: + return VFlipTransform(h) + else: + return NoOpTransform() + + +class Resize(TransformGen): + """ Resize image to a target size""" + + def __init__(self, shape, interp=Image.BILINEAR): + """ + Args: + shape: (h, w) tuple or a int + interp: PIL interpolation method + """ + if isinstance(shape, int): + shape = (shape, shape) + shape = tuple(shape) + self._init(locals()) + + def get_transform(self, img): + return ResizeTransform( + img.shape[0], img.shape[1], self.shape[0], self.shape[1], self.interp + ) + + +class ResizeShortestEdge(TransformGen): + """ + Scale the shorter edge to the given size, with a limit of `max_size` on the longer edge. + If `max_size` is reached, then downscale so that the longer edge does not exceed max_size. + """ + + def __init__( + self, short_edge_length, max_size=sys.maxsize, sample_style="range", interp=Image.BILINEAR + ): + """ + Args: + short_edge_length (list[int]): If ``sample_style=="range"``, + a [min, max] interval from which to sample the shortest edge length. + If ``sample_style=="choice"``, a list of shortest edge lengths to sample from. + max_size (int): maximum allowed longest edge length. + sample_style (str): either "range" or "choice". + """ + super().__init__() + assert sample_style in ["range", "choice"], sample_style + + self.is_range = sample_style == "range" + if isinstance(short_edge_length, int): + short_edge_length = (short_edge_length, short_edge_length) + self._init(locals()) + + def get_transform(self, img): + h, w = img.shape[:2] + + if self.is_range: + size = np.random.randint(self.short_edge_length[0], self.short_edge_length[1] + 1) + else: + size = np.random.choice(self.short_edge_length) + if size == 0: + return NoOpTransform() + + scale = size * 1.0 / min(h, w) + if h < w: + newh, neww = size, scale * w + else: + newh, neww = scale * h, size + if max(newh, neww) > self.max_size: + scale = self.max_size * 1.0 / max(newh, neww) + newh = newh * scale + neww = neww * scale + neww = int(neww + 0.5) + newh = int(newh + 0.5) + return ResizeTransform(h, w, newh, neww, self.interp) + + +class RandomRotation(TransformGen): + """ + This method returns a copy of this image, rotated the given + number of degrees counter clockwise around the given center. + """ + + def __init__(self, angle, expand=True, center=None, sample_style="range", interp=None): + """ + Args: + angle (list[float]): If ``sample_style=="range"``, + a [min, max] interval from which to sample the angle (in degrees). + If ``sample_style=="choice"``, a list of angles to sample from + expand (bool): choose if the image should be resized to fit the whole + rotated image (default), or simply cropped + center (list[[float, float]]): If ``sample_style=="range"``, + a [[minx, miny], [maxx, maxy]] relative interval from which to sample the center, + [0, 0] being the top left of the image and [1, 1] the bottom right. + If ``sample_style=="choice"``, a list of centers to sample from + Default: None, which means that the center of rotation is the center of the image + center has no effect if expand=True because it only affects shifting + """ + super().__init__() + assert sample_style in ["range", "choice"], sample_style + self.is_range = sample_style == "range" + if isinstance(angle, (float, int)): + angle = (angle, angle) + if center is not None and isinstance(center[0], (float, int)): + center = (center, center) + self._init(locals()) + + def get_transform(self, img): + h, w = img.shape[:2] + center = None + if self.is_range: + angle = np.random.uniform(self.angle[0], self.angle[1]) + if self.center is not None: + center = ( + np.random.uniform(self.center[0][0], self.center[1][0]), + np.random.uniform(self.center[0][1], self.center[1][1]), + ) + else: + angle = np.random.choice(self.angle) + if self.center is not None: + center = np.random.choice(self.center) + + if center is not None: + center = (w * center[0], h * center[1]) # Convert to absolute coordinates + + return RotationTransform(h, w, angle, expand=self.expand, center=center, interp=self.interp) + + +class RandomCrop(TransformGen): + """ + Randomly crop a subimage out of an image. + """ + + def __init__(self, crop_type: str, crop_size): + """ + Args: + crop_type (str): one of "relative_range", "relative", "absolute". + See `config/defaults.py` for explanation. + crop_size (tuple[float]): the relative ratio or absolute pixels of + height and width + """ + super().__init__() + assert crop_type in ["relative_range", "relative", "absolute"] + self._init(locals()) + + def get_transform(self, img): + h, w = img.shape[:2] + croph, cropw = self.get_crop_size((h, w)) + assert h >= croph and w >= cropw, "Shape computation in {} has bugs.".format(self) + h0 = np.random.randint(h - croph + 1) + w0 = np.random.randint(w - cropw + 1) + return CropTransform(w0, h0, cropw, croph) + + def get_crop_size(self, image_size): + """ + Args: + image_size (tuple): height, width + + Returns: + crop_size (tuple): height, width in absolute pixels + """ + h, w = image_size + if self.crop_type == "relative": + ch, cw = self.crop_size + return int(h * ch + 0.5), int(w * cw + 0.5) + elif self.crop_type == "relative_range": + crop_size = np.asarray(self.crop_size, dtype=np.float32) + ch, cw = crop_size + np.random.rand(2) * (1 - crop_size) + return int(h * ch + 0.5), int(w * cw + 0.5) + elif self.crop_type == "absolute": + return (min(self.crop_size[0], h), min(self.crop_size[1], w)) + else: + NotImplementedError("Unknown crop type {}".format(self.crop_type)) + + +class RandomExtent(TransformGen): + """ + Outputs an image by cropping a random "subrect" of the source image. + + The subrect can be parameterized to include pixels outside the source image, + in which case they will be set to zeros (i.e. black). The size of the output + image will vary with the size of the random subrect. + """ + + def __init__(self, scale_range, shift_range): + """ + Args: + output_size (h, w): Dimensions of output image + scale_range (l, h): Range of input-to-output size scaling factor + shift_range (x, y): Range of shifts of the cropped subrect. The rect + is shifted by [w / 2 * Uniform(-x, x), h / 2 * Uniform(-y, y)], + where (w, h) is the (width, height) of the input image. Set each + component to zero to crop at the image's center. + """ + super().__init__() + self._init(locals()) + + def get_transform(self, img): + img_h, img_w = img.shape[:2] + + # Initialize src_rect to fit the input image. + src_rect = np.array([-0.5 * img_w, -0.5 * img_h, 0.5 * img_w, 0.5 * img_h]) + + # Apply a random scaling to the src_rect. + src_rect *= np.random.uniform(self.scale_range[0], self.scale_range[1]) + + # Apply a random shift to the coordinates origin. + src_rect[0::2] += self.shift_range[0] * img_w * (np.random.rand() - 0.5) + src_rect[1::2] += self.shift_range[1] * img_h * (np.random.rand() - 0.5) + + # Map src_rect coordinates into image coordinates (center at corner). + src_rect[0::2] += 0.5 * img_w + src_rect[1::2] += 0.5 * img_h + + return ExtentTransform( + src_rect=(src_rect[0], src_rect[1], src_rect[2], src_rect[3]), + output_size=(int(src_rect[3] - src_rect[1]), int(src_rect[2] - src_rect[0])), + ) + + +class RandomContrast(TransformGen): + """ + Randomly transforms image contrast. + + Contrast intensity is uniformly sampled in (intensity_min, intensity_max). + - intensity < 1 will reduce contrast + - intensity = 1 will preserve the input image + - intensity > 1 will increase contrast + + See: https://pillow.readthedocs.io/en/3.0.x/reference/ImageEnhance.html + """ + + def __init__(self, intensity_min, intensity_max): + """ + Args: + intensity_min (float): Minimum augmentation + intensity_max (float): Maximum augmentation + """ + super().__init__() + self._init(locals()) + + def get_transform(self, img): + w = np.random.uniform(self.intensity_min, self.intensity_max) + return BlendTransform(src_image=img.mean(), src_weight=1 - w, dst_weight=w) + + +class RandomBrightness(TransformGen): + """ + Randomly transforms image brightness. + + Brightness intensity is uniformly sampled in (intensity_min, intensity_max). + - intensity < 1 will reduce brightness + - intensity = 1 will preserve the input image + - intensity > 1 will increase brightness + + See: https://pillow.readthedocs.io/en/3.0.x/reference/ImageEnhance.html + """ + + def __init__(self, intensity_min, intensity_max): + """ + Args: + intensity_min (float): Minimum augmentation + intensity_max (float): Maximum augmentation + """ + super().__init__() + self._init(locals()) + + def get_transform(self, img): + w = np.random.uniform(self.intensity_min, self.intensity_max) + return BlendTransform(src_image=0, src_weight=1 - w, dst_weight=w) + + +class RandomSaturation(TransformGen): + """ + Randomly transforms image saturation. + + Saturation intensity is uniformly sampled in (intensity_min, intensity_max). + - intensity < 1 will reduce saturation (make the image more grayscale) + - intensity = 1 will preserve the input image + - intensity > 1 will increase saturation + + See: https://pillow.readthedocs.io/en/3.0.x/reference/ImageEnhance.html + """ + + def __init__(self, intensity_min, intensity_max): + """ + Args: + intensity_min (float): Minimum augmentation (1 preserves input). + intensity_max (float): Maximum augmentation (1 preserves input). + """ + super().__init__() + self._init(locals()) + + def get_transform(self, img): + assert img.shape[-1] == 3, "Saturation only works on RGB images" + w = np.random.uniform(self.intensity_min, self.intensity_max) + grayscale = img.dot([0.299, 0.587, 0.114])[:, :, np.newaxis] + return BlendTransform(src_image=grayscale, src_weight=1 - w, dst_weight=w) + + +class RandomLighting(TransformGen): + """ + Randomly transforms image color using fixed PCA over ImageNet. + + The degree of color jittering is randomly sampled via a normal distribution, + with standard deviation given by the scale parameter. + """ + + def __init__(self, scale): + """ + Args: + scale (float): Standard deviation of principal component weighting. + """ + super().__init__() + self._init(locals()) + self.eigen_vecs = np.array( + [[-0.5675, 0.7192, 0.4009], [-0.5808, -0.0045, -0.8140], [-0.5836, -0.6948, 0.4203]] + ) + self.eigen_vals = np.array([0.2175, 0.0188, 0.0045]) + + def get_transform(self, img): + assert img.shape[-1] == 3, "Saturation only works on RGB images" + weights = np.random.normal(scale=self.scale, size=3) + return BlendTransform( + src_image=self.eigen_vecs.dot(weights * self.eigen_vals), src_weight=1.0, dst_weight=1.0 + ) + + +def apply_transform_gens(transform_gens, img): + """ + Apply a list of :class:`TransformGen` or :class:`Transform` on the input image, and + returns the transformed image and a list of transforms. + + We cannot simply create and return all transforms without + applying it to the image, because a subsequent transform may + need the output of the previous one. + + Args: + transform_gens (list): list of :class:`TransformGen` or :class:`Transform` instance to + be applied. + img (ndarray): uint8 or floating point images with 1 or 3 channels. + + Returns: + ndarray: the transformed image + TransformList: contain the transforms that's used. + """ + for g in transform_gens: + assert isinstance(g, (Transform, TransformGen)), g + + check_dtype(img) + + tfms = [] + for g in transform_gens: + tfm = g.get_transform(img) if isinstance(g, TransformGen) else g + assert isinstance( + tfm, Transform + ), "TransformGen {} must return an instance of Transform! Got {} instead".format(g, tfm) + img = tfm.apply_image(img) + tfms.append(tfm) + return img, TransformList(tfms) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6a4538da3e66593e4ef8916cd9cbca3c83b8c14e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +from .launch import * +from .train_loop import * + +__all__ = [k for k in globals().keys() if not k.startswith("_")] + + +# prefer to let hooks and defaults live in separate namespaces (therefore not in __all__) +# but still make them available here +from .hooks import * +from .defaults import * diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/defaults.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..db9ab68f21d77b9e3be730c4784abe665df3d96a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/defaults.py @@ -0,0 +1,531 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +""" +This file contains components with some default boilerplate logic user may need +in training / testing. They will not work for everyone, but many users may find them useful. + +The behavior of functions/classes in this file is subject to change, +since they are meant to represent the "common default behavior" people need in their projects. +""" + +import argparse +import logging +import os +import sys +from collections import OrderedDict +import torch +from fvcore.common.file_io import PathManager +from fvcore.nn.precise_bn import get_bn_modules +from torch.nn.parallel import DistributedDataParallel + +import detectron2.data.transforms as T +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.data import ( + MetadataCatalog, + build_detection_test_loader, + build_detection_train_loader, +) +from detectron2.evaluation import ( + DatasetEvaluator, + inference_on_dataset, + print_csv_format, + verify_results, +) +from detectron2.modeling import build_model +from detectron2.solver import build_lr_scheduler, build_optimizer +from detectron2.utils import comm +from detectron2.utils.collect_env import collect_env_info +from detectron2.utils.env import seed_all_rng +from detectron2.utils.events import CommonMetricPrinter, JSONWriter, TensorboardXWriter +from detectron2.utils.logger import setup_logger + +from . import hooks +from .train_loop import SimpleTrainer + +__all__ = ["default_argument_parser", "default_setup", "DefaultPredictor", "DefaultTrainer"] + + +def default_argument_parser(epilog=None): + """ + Create a parser with some common arguments used by detectron2 users. + + Args: + epilog (str): epilog passed to ArgumentParser describing the usage. + + Returns: + argparse.ArgumentParser: + """ + parser = argparse.ArgumentParser( + epilog=epilog + or f""" +Examples: + +Run on single machine: + $ {sys.argv[0]} --num-gpus 8 --config-file cfg.yaml MODEL.WEIGHTS /path/to/weight.pth + +Run on multiple machines: + (machine0)$ {sys.argv[0]} --machine-rank 0 --num-machines 2 --dist-url [--other-flags] + (machine1)$ {sys.argv[0]} --machine-rank 1 --num-machines 2 --dist-url [--other-flags] +""", + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument("--config-file", default="", metavar="FILE", help="path to config file") + parser.add_argument( + "--resume", + action="store_true", + help="whether to attempt to resume from the checkpoint directory", + ) + parser.add_argument("--eval-only", action="store_true", help="perform evaluation only") + parser.add_argument("--num-gpus", type=int, default=1, help="number of gpus *per machine*") + parser.add_argument("--num-machines", type=int, default=1, help="total number of machines") + parser.add_argument( + "--machine-rank", type=int, default=0, help="the rank of this machine (unique per machine)" + ) + + # PyTorch still may leave orphan processes in multi-gpu training. + # Therefore we use a deterministic way to obtain port, + # so that users are aware of orphan processes by seeing the port occupied. + port = 2 ** 15 + 2 ** 14 + hash(os.getuid() if sys.platform != "win32" else 1) % 2 ** 14 + parser.add_argument( + "--dist-url", + default="tcp://127.0.0.1:{}".format(port), + help="initialization URL for pytorch distributed backend. See " + "https://pytorch.org/docs/stable/distributed.html for details.", + ) + parser.add_argument( + "opts", + help="Modify config options using the command-line", + default=None, + nargs=argparse.REMAINDER, + ) + return parser + + +def default_setup(cfg, args): + """ + Perform some basic common setups at the beginning of a job, including: + + 1. Set up the detectron2 logger + 2. Log basic information about environment, cmdline arguments, and config + 3. Backup the config to the output directory + + Args: + cfg (CfgNode): the full config to be used + args (argparse.NameSpace): the command line arguments to be logged + """ + output_dir = cfg.OUTPUT_DIR + if comm.is_main_process() and output_dir: + PathManager.mkdirs(output_dir) + + rank = comm.get_rank() + setup_logger(output_dir, distributed_rank=rank, name="fvcore") + logger = setup_logger(output_dir, distributed_rank=rank) + + logger.info("Rank of current process: {}. World size: {}".format(rank, comm.get_world_size())) + logger.info("Environment info:\n" + collect_env_info()) + + logger.info("Command line arguments: " + str(args)) + if hasattr(args, "config_file") and args.config_file != "": + logger.info( + "Contents of args.config_file={}:\n{}".format( + args.config_file, PathManager.open(args.config_file, "r").read() + ) + ) + + logger.info("Running with full config:\n{}".format(cfg)) + if comm.is_main_process() and output_dir: + # Note: some of our scripts may expect the existence of + # config.yaml in output directory + path = os.path.join(output_dir, "config.yaml") + with PathManager.open(path, "w") as f: + f.write(cfg.dump()) + logger.info("Full config saved to {}".format(path)) + + # make sure each worker has a different, yet deterministic seed if specified + seed_all_rng(None if cfg.SEED < 0 else cfg.SEED + rank) + + # cudnn benchmark has large overhead. It shouldn't be used considering the small size of + # typical validation set. + if not (hasattr(args, "eval_only") and args.eval_only): + torch.backends.cudnn.benchmark = cfg.CUDNN_BENCHMARK + + +class DefaultPredictor: + """ + Create a simple end-to-end predictor with the given config that runs on + single device for a single input image. + + Compared to using the model directly, this class does the following additions: + + 1. Load checkpoint from `cfg.MODEL.WEIGHTS`. + 2. Always take BGR image as the input and apply conversion defined by `cfg.INPUT.FORMAT`. + 3. Apply resizing defined by `cfg.INPUT.{MIN,MAX}_SIZE_TEST`. + 4. Take one input image and produce a single output, instead of a batch. + + If you'd like to do anything more fancy, please refer to its source code + as examples to build and use the model manually. + + Attributes: + metadata (Metadata): the metadata of the underlying dataset, obtained from + cfg.DATASETS.TEST. + + Examples: + + .. code-block:: python + + pred = DefaultPredictor(cfg) + inputs = cv2.imread("input.jpg") + outputs = pred(inputs) + """ + + def __init__(self, cfg): + self.cfg = cfg.clone() # cfg can be modified by model + self.model = build_model(self.cfg) + self.model.eval() + self.metadata = MetadataCatalog.get(cfg.DATASETS.TEST[0]) + + checkpointer = DetectionCheckpointer(self.model) + checkpointer.load(cfg.MODEL.WEIGHTS) + + self.transform_gen = T.ResizeShortestEdge( + [cfg.INPUT.MIN_SIZE_TEST, cfg.INPUT.MIN_SIZE_TEST], cfg.INPUT.MAX_SIZE_TEST + ) + + self.input_format = cfg.INPUT.FORMAT + assert self.input_format in ["RGB", "BGR"], self.input_format + + def __call__(self, original_image): + """ + Args: + original_image (np.ndarray): an image of shape (H, W, C) (in BGR order). + + Returns: + predictions (dict): + the output of the model for one image only. + See :doc:`/tutorials/models` for details about the format. + """ + with torch.no_grad(): # https://github.com/sphinx-doc/sphinx/issues/4258 + # Apply pre-processing to image. + if self.input_format == "RGB": + # whether the model expects BGR inputs or RGB + original_image = original_image[:, :, ::-1] + height, width = original_image.shape[:2] + image = self.transform_gen.get_transform(original_image).apply_image(original_image) + image = torch.as_tensor(image.astype("float32").transpose(2, 0, 1)) + + inputs = {"image": image, "height": height, "width": width} + predictions = self.model([inputs])[0] + return predictions + + +class DefaultTrainer(SimpleTrainer): + """ + A trainer with default training logic. Compared to `SimpleTrainer`, it + contains the following logic in addition: + + 1. Create model, optimizer, scheduler, dataloader from the given config. + 2. Load a checkpoint or `cfg.MODEL.WEIGHTS`, if exists, when + `resume_or_load` is called. + 3. Register a few common hooks. + + It is created to simplify the **standard model training workflow** and reduce code boilerplate + for users who only need the standard training workflow, with standard features. + It means this class makes *many assumptions* about your training logic that + may easily become invalid in a new research. In fact, any assumptions beyond those made in the + :class:`SimpleTrainer` are too much for research. + + The code of this class has been annotated about restrictive assumptions it mades. + When they do not work for you, you're encouraged to: + + 1. Overwrite methods of this class, OR: + 2. Use :class:`SimpleTrainer`, which only does minimal SGD training and + nothing else. You can then add your own hooks if needed. OR: + 3. Write your own training loop similar to `tools/plain_train_net.py`. + + Also note that the behavior of this class, like other functions/classes in + this file, is not stable, since it is meant to represent the "common default behavior". + It is only guaranteed to work well with the standard models and training workflow in detectron2. + To obtain more stable behavior, write your own training logic with other public APIs. + + Examples: + + .. code-block:: python + + trainer = DefaultTrainer(cfg) + trainer.resume_or_load() # load last checkpoint or MODEL.WEIGHTS + trainer.train() + + Attributes: + scheduler: + checkpointer (DetectionCheckpointer): + cfg (CfgNode): + """ + + def __init__(self, cfg): + """ + Args: + cfg (CfgNode): + """ + logger = logging.getLogger("detectron2") + if not logger.isEnabledFor(logging.INFO): # setup_logger is not called for d2 + setup_logger() + # Assume these objects must be constructed in this order. + model = self.build_model(cfg) + optimizer = self.build_optimizer(cfg, model) + data_loader = self.build_train_loader(cfg) + + # For training, wrap with DDP. But don't need this for inference. + if comm.get_world_size() > 1: + model = DistributedDataParallel( + model, device_ids=[comm.get_local_rank()], broadcast_buffers=False + ) + super().__init__(model, data_loader, optimizer) + + self.scheduler = self.build_lr_scheduler(cfg, optimizer) + # Assume no other objects need to be checkpointed. + # We can later make it checkpoint the stateful hooks + self.checkpointer = DetectionCheckpointer( + # Assume you want to save checkpoints together with logs/statistics + model, + cfg.OUTPUT_DIR, + optimizer=optimizer, + scheduler=self.scheduler, + ) + self.start_iter = 0 + self.max_iter = cfg.SOLVER.MAX_ITER + self.cfg = cfg + + self.register_hooks(self.build_hooks()) + + def resume_or_load(self, resume=True): + """ + If `resume==True`, and last checkpoint exists, resume from it and load all + checkpointables (eg. optimizer and scheduler). + + Otherwise, load the model specified by the config (skip all checkpointables). + + Args: + resume (bool): whether to do resume or not + """ + checkpoint = self.checkpointer.resume_or_load(self.cfg.MODEL.WEIGHTS, resume=resume) + self.start_iter = checkpoint.get("iteration", -1) if resume else -1 + # The checkpoint stores the training iteration that just finished, thus we start + # at the next iteration (or iter zero if there's no checkpoint). + self.start_iter += 1 + + def build_hooks(self): + """ + Build a list of default hooks, including timing, evaluation, + checkpointing, lr scheduling, precise BN, writing events. + + Returns: + list[HookBase]: + """ + cfg = self.cfg.clone() + cfg.defrost() + cfg.DATALOADER.NUM_WORKERS = 0 # save some memory and time for PreciseBN + + ret = [ + hooks.IterationTimer(), + hooks.LRScheduler(self.optimizer, self.scheduler), + hooks.PreciseBN( + # Run at the same freq as (but before) evaluation. + cfg.TEST.EVAL_PERIOD, + self.model, + # Build a new data loader to not affect training + self.build_train_loader(cfg), + cfg.TEST.PRECISE_BN.NUM_ITER, + ) + if cfg.TEST.PRECISE_BN.ENABLED and get_bn_modules(self.model) + else None, + ] + + # Do PreciseBN before checkpointer, because it updates the model and need to + # be saved by checkpointer. + # This is not always the best: if checkpointing has a different frequency, + # some checkpoints may have more precise statistics than others. + if comm.is_main_process(): + ret.append(hooks.PeriodicCheckpointer(self.checkpointer, cfg.SOLVER.CHECKPOINT_PERIOD)) + + def test_and_save_results(): + self._last_eval_results = self.test(self.cfg, self.model) + return self._last_eval_results + + # Do evaluation after checkpointer, because then if it fails, + # we can use the saved checkpoint to debug. + ret.append(hooks.EvalHook(cfg.TEST.EVAL_PERIOD, test_and_save_results)) + + if comm.is_main_process(): + # run writers in the end, so that evaluation metrics are written + ret.append(hooks.PeriodicWriter(self.build_writers(), period=20)) + return ret + + def build_writers(self): + """ + Build a list of writers to be used. By default it contains + writers that write metrics to the screen, + a json file, and a tensorboard event file respectively. + If you'd like a different list of writers, you can overwrite it in + your trainer. + + Returns: + list[EventWriter]: a list of :class:`EventWriter` objects. + + It is now implemented by: + + .. code-block:: python + + return [ + CommonMetricPrinter(self.max_iter), + JSONWriter(os.path.join(self.cfg.OUTPUT_DIR, "metrics.json")), + TensorboardXWriter(self.cfg.OUTPUT_DIR), + ] + + """ + # Here the default print/log frequency of each writer is used. + return [ + # It may not always print what you want to see, since it prints "common" metrics only. + CommonMetricPrinter(self.max_iter), + JSONWriter(os.path.join(self.cfg.OUTPUT_DIR, "metrics.json")), + TensorboardXWriter(self.cfg.OUTPUT_DIR), + ] + + def train(self): + """ + Run training. + + Returns: + OrderedDict of results, if evaluation is enabled. Otherwise None. + """ + super().train(self.start_iter, self.max_iter) + if len(self.cfg.TEST.EXPECTED_RESULTS) and comm.is_main_process(): + assert hasattr( + self, "_last_eval_results" + ), "No evaluation results obtained during training!" + verify_results(self.cfg, self._last_eval_results) + return self._last_eval_results + + @classmethod + def build_model(cls, cfg): + """ + Returns: + torch.nn.Module: + + It now calls :func:`detectron2.modeling.build_model`. + Overwrite it if you'd like a different model. + """ + model = build_model(cfg) + logger = logging.getLogger(__name__) + logger.info("Model:\n{}".format(model)) + return model + + @classmethod + def build_optimizer(cls, cfg, model): + """ + Returns: + torch.optim.Optimizer: + + It now calls :func:`detectron2.solver.build_optimizer`. + Overwrite it if you'd like a different optimizer. + """ + return build_optimizer(cfg, model) + + @classmethod + def build_lr_scheduler(cls, cfg, optimizer): + """ + It now calls :func:`detectron2.solver.build_lr_scheduler`. + Overwrite it if you'd like a different scheduler. + """ + return build_lr_scheduler(cfg, optimizer) + + @classmethod + def build_train_loader(cls, cfg): + """ + Returns: + iterable + + It now calls :func:`detectron2.data.build_detection_train_loader`. + Overwrite it if you'd like a different data loader. + """ + return build_detection_train_loader(cfg) + + @classmethod + def build_test_loader(cls, cfg, dataset_name): + """ + Returns: + iterable + + It now calls :func:`detectron2.data.build_detection_test_loader`. + Overwrite it if you'd like a different data loader. + """ + return build_detection_test_loader(cfg, dataset_name) + + @classmethod + def build_evaluator(cls, cfg, dataset_name): + """ + Returns: + DatasetEvaluator or None + + It is not implemented by default. + """ + raise NotImplementedError( + """ +If you want DefaultTrainer to automatically run evaluation, +please implement `build_evaluator()` in subclasses (see train_net.py for example). +Alternatively, you can call evaluation functions yourself (see Colab balloon tutorial for example). +""" + ) + + @classmethod + def test(cls, cfg, model, evaluators=None): + """ + Args: + cfg (CfgNode): + model (nn.Module): + evaluators (list[DatasetEvaluator] or None): if None, will call + :meth:`build_evaluator`. Otherwise, must have the same length as + `cfg.DATASETS.TEST`. + + Returns: + dict: a dict of result metrics + """ + logger = logging.getLogger(__name__) + if isinstance(evaluators, DatasetEvaluator): + evaluators = [evaluators] + if evaluators is not None: + assert len(cfg.DATASETS.TEST) == len(evaluators), "{} != {}".format( + len(cfg.DATASETS.TEST), len(evaluators) + ) + + results = OrderedDict() + for idx, dataset_name in enumerate(cfg.DATASETS.TEST): + data_loader = cls.build_test_loader(cfg, dataset_name) + # When evaluators are passed in as arguments, + # implicitly assume that evaluators can be created before data_loader. + if evaluators is not None: + evaluator = evaluators[idx] + else: + try: + evaluator = cls.build_evaluator(cfg, dataset_name) + except NotImplementedError: + logger.warn( + "No evaluator found. Use `DefaultTrainer.test(evaluators=)`, " + "or implement its `build_evaluator` method." + ) + results[dataset_name] = {} + continue + results_i = inference_on_dataset(model, data_loader, evaluator) + results[dataset_name] = results_i + if comm.is_main_process(): + assert isinstance( + results_i, dict + ), "Evaluator must return a dict on the main process. Got {} instead.".format( + results_i + ) + logger.info("Evaluation results for {} in csv format:".format(dataset_name)) + print_csv_format(results_i) + + if len(results) == 1: + results = list(results.values())[0] + return results diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/hooks.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/hooks.py new file mode 100644 index 0000000000000000000000000000000000000000..e5085b4561302d2328ab505568dec4e9fc5ee0ad --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/hooks.py @@ -0,0 +1,427 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import datetime +import itertools +import logging +import os +import tempfile +import time +from collections import Counter +import torch +from fvcore.common.checkpoint import PeriodicCheckpointer as _PeriodicCheckpointer +from fvcore.common.file_io import PathManager +from fvcore.common.timer import Timer +from fvcore.nn.precise_bn import get_bn_modules, update_bn_stats + +import detectron2.utils.comm as comm +from detectron2.evaluation.testing import flatten_results_dict +from detectron2.utils.events import EventStorage, EventWriter + +from .train_loop import HookBase + +__all__ = [ + "CallbackHook", + "IterationTimer", + "PeriodicWriter", + "PeriodicCheckpointer", + "LRScheduler", + "AutogradProfiler", + "EvalHook", + "PreciseBN", +] + + +""" +Implement some common hooks. +""" + + +class CallbackHook(HookBase): + """ + Create a hook using callback functions provided by the user. + """ + + def __init__(self, *, before_train=None, after_train=None, before_step=None, after_step=None): + """ + Each argument is a function that takes one argument: the trainer. + """ + self._before_train = before_train + self._before_step = before_step + self._after_step = after_step + self._after_train = after_train + + def before_train(self): + if self._before_train: + self._before_train(self.trainer) + + def after_train(self): + if self._after_train: + self._after_train(self.trainer) + # The functions may be closures that hold reference to the trainer + # Therefore, delete them to avoid circular reference. + del self._before_train, self._after_train + del self._before_step, self._after_step + + def before_step(self): + if self._before_step: + self._before_step(self.trainer) + + def after_step(self): + if self._after_step: + self._after_step(self.trainer) + + +class IterationTimer(HookBase): + """ + Track the time spent for each iteration (each run_step call in the trainer). + Print a summary in the end of training. + + This hook uses the time between the call to its :meth:`before_step` + and :meth:`after_step` methods. + Under the convention that :meth:`before_step` of all hooks should only + take negligible amount of time, the :class:`IterationTimer` hook should be + placed at the beginning of the list of hooks to obtain accurate timing. + """ + + def __init__(self, warmup_iter=3): + """ + Args: + warmup_iter (int): the number of iterations at the beginning to exclude + from timing. + """ + self._warmup_iter = warmup_iter + self._step_timer = Timer() + self._start_time = time.perf_counter() + self._total_timer = Timer() + + def before_train(self): + self._start_time = time.perf_counter() + self._total_timer.reset() + self._total_timer.pause() + + def after_train(self): + logger = logging.getLogger(__name__) + total_time = time.perf_counter() - self._start_time + total_time_minus_hooks = self._total_timer.seconds() + hook_time = total_time - total_time_minus_hooks + + num_iter = self.trainer.iter + 1 - self.trainer.start_iter - self._warmup_iter + + if num_iter > 0 and total_time_minus_hooks > 0: + # Speed is meaningful only after warmup + # NOTE this format is parsed by grep in some scripts + logger.info( + "Overall training speed: {} iterations in {} ({:.4f} s / it)".format( + num_iter, + str(datetime.timedelta(seconds=int(total_time_minus_hooks))), + total_time_minus_hooks / num_iter, + ) + ) + + logger.info( + "Total training time: {} ({} on hooks)".format( + str(datetime.timedelta(seconds=int(total_time))), + str(datetime.timedelta(seconds=int(hook_time))), + ) + ) + + def before_step(self): + self._step_timer.reset() + self._total_timer.resume() + + def after_step(self): + # +1 because we're in after_step + iter_done = self.trainer.iter - self.trainer.start_iter + 1 + if iter_done >= self._warmup_iter: + sec = self._step_timer.seconds() + self.trainer.storage.put_scalars(time=sec) + else: + self._start_time = time.perf_counter() + self._total_timer.reset() + + self._total_timer.pause() + + +class PeriodicWriter(HookBase): + """ + Write events to EventStorage periodically. + + It is executed every ``period`` iterations and after the last iteration. + """ + + def __init__(self, writers, period=20): + """ + Args: + writers (list[EventWriter]): a list of EventWriter objects + period (int): + """ + self._writers = writers + for w in writers: + assert isinstance(w, EventWriter), w + self._period = period + + def after_step(self): + if (self.trainer.iter + 1) % self._period == 0 or ( + self.trainer.iter == self.trainer.max_iter - 1 + ): + for writer in self._writers: + writer.write() + + def after_train(self): + for writer in self._writers: + writer.close() + + +class PeriodicCheckpointer(_PeriodicCheckpointer, HookBase): + """ + Same as :class:`detectron2.checkpoint.PeriodicCheckpointer`, but as a hook. + + Note that when used as a hook, + it is unable to save additional data other than what's defined + by the given `checkpointer`. + + It is executed every ``period`` iterations and after the last iteration. + """ + + def before_train(self): + self.max_iter = self.trainer.max_iter + + def after_step(self): + # No way to use **kwargs + self.step(self.trainer.iter) + + +class LRScheduler(HookBase): + """ + A hook which executes a torch builtin LR scheduler and summarizes the LR. + It is executed after every iteration. + """ + + def __init__(self, optimizer, scheduler): + """ + Args: + optimizer (torch.optim.Optimizer): + scheduler (torch.optim._LRScheduler) + """ + self._optimizer = optimizer + self._scheduler = scheduler + + # NOTE: some heuristics on what LR to summarize + # summarize the param group with most parameters + largest_group = max(len(g["params"]) for g in optimizer.param_groups) + + if largest_group == 1: + # If all groups have one parameter, + # then find the most common initial LR, and use it for summary + lr_count = Counter([g["lr"] for g in optimizer.param_groups]) + lr = lr_count.most_common()[0][0] + for i, g in enumerate(optimizer.param_groups): + if g["lr"] == lr: + self._best_param_group_id = i + break + else: + for i, g in enumerate(optimizer.param_groups): + if len(g["params"]) == largest_group: + self._best_param_group_id = i + break + + def after_step(self): + lr = self._optimizer.param_groups[self._best_param_group_id]["lr"] + self.trainer.storage.put_scalar("lr", lr, smoothing_hint=False) + self._scheduler.step() + + +class AutogradProfiler(HookBase): + """ + A hook which runs `torch.autograd.profiler.profile`. + + Examples: + + .. code-block:: python + + hooks.AutogradProfiler( + lambda trainer: trainer.iter > 10 and trainer.iter < 20, self.cfg.OUTPUT_DIR + ) + + The above example will run the profiler for iteration 10~20 and dump + results to ``OUTPUT_DIR``. We did not profile the first few iterations + because they are typically slower than the rest. + The result files can be loaded in the ``chrome://tracing`` page in chrome browser. + + Note: + When used together with NCCL on older version of GPUs, + autograd profiler may cause deadlock because it unnecessarily allocates + memory on every device it sees. The memory management calls, if + interleaved with NCCL calls, lead to deadlock on GPUs that do not + support `cudaLaunchCooperativeKernelMultiDevice`. + """ + + def __init__(self, enable_predicate, output_dir, *, use_cuda=True): + """ + Args: + enable_predicate (callable[trainer -> bool]): a function which takes a trainer, + and returns whether to enable the profiler. + It will be called once every step, and can be used to select which steps to profile. + output_dir (str): the output directory to dump tracing files. + use_cuda (bool): same as in `torch.autograd.profiler.profile`. + """ + self._enable_predicate = enable_predicate + self._use_cuda = use_cuda + self._output_dir = output_dir + + def before_step(self): + if self._enable_predicate(self.trainer): + self._profiler = torch.autograd.profiler.profile(use_cuda=self._use_cuda) + self._profiler.__enter__() + else: + self._profiler = None + + def after_step(self): + if self._profiler is None: + return + self._profiler.__exit__(None, None, None) + PathManager.mkdirs(self._output_dir) + out_file = os.path.join( + self._output_dir, "profiler-trace-iter{}.json".format(self.trainer.iter) + ) + if "://" not in out_file: + self._profiler.export_chrome_trace(out_file) + else: + # Support non-posix filesystems + with tempfile.TemporaryDirectory(prefix="detectron2_profiler") as d: + tmp_file = os.path.join(d, "tmp.json") + self._profiler.export_chrome_trace(tmp_file) + with open(tmp_file) as f: + content = f.read() + with PathManager.open(out_file, "w") as f: + f.write(content) + + +class EvalHook(HookBase): + """ + Run an evaluation function periodically, and at the end of training. + + It is executed every ``eval_period`` iterations and after the last iteration. + """ + + def __init__(self, eval_period, eval_function): + """ + Args: + eval_period (int): the period to run `eval_function`. + eval_function (callable): a function which takes no arguments, and + returns a nested dict of evaluation metrics. + + Note: + This hook must be enabled in all or none workers. + If you would like only certain workers to perform evaluation, + give other workers a no-op function (`eval_function=lambda: None`). + """ + self._period = eval_period + self._func = eval_function + + def _do_eval(self): + results = self._func() + + if results: + assert isinstance( + results, dict + ), "Eval function must return a dict. Got {} instead.".format(results) + + flattened_results = flatten_results_dict(results) + for k, v in flattened_results.items(): + try: + v = float(v) + except Exception: + raise ValueError( + "[EvalHook] eval_function should return a nested dict of float. " + "Got '{}: {}' instead.".format(k, v) + ) + self.trainer.storage.put_scalars(**flattened_results, smoothing_hint=False) + + # Evaluation may take different time among workers. + # A barrier make them start the next iteration together. + comm.synchronize() + + def after_step(self): + next_iter = self.trainer.iter + 1 + is_final = next_iter == self.trainer.max_iter + if is_final or (self._period > 0 and next_iter % self._period == 0): + self._do_eval() + + def after_train(self): + # func is likely a closure that holds reference to the trainer + # therefore we clean it to avoid circular reference in the end + del self._func + + +class PreciseBN(HookBase): + """ + The standard implementation of BatchNorm uses EMA in inference, which is + sometimes suboptimal. + This class computes the true average of statistics rather than the moving average, + and put true averages to every BN layer in the given model. + + It is executed every ``period`` iterations and after the last iteration. + """ + + def __init__(self, period, model, data_loader, num_iter): + """ + Args: + period (int): the period this hook is run, or 0 to not run during training. + The hook will always run in the end of training. + model (nn.Module): a module whose all BN layers in training mode will be + updated by precise BN. + Note that user is responsible for ensuring the BN layers to be + updated are in training mode when this hook is triggered. + data_loader (iterable): it will produce data to be run by `model(data)`. + num_iter (int): number of iterations used to compute the precise + statistics. + """ + self._logger = logging.getLogger(__name__) + if len(get_bn_modules(model)) == 0: + self._logger.info( + "PreciseBN is disabled because model does not contain BN layers in training mode." + ) + self._disabled = True + return + + self._model = model + self._data_loader = data_loader + self._num_iter = num_iter + self._period = period + self._disabled = False + + self._data_iter = None + + def after_step(self): + next_iter = self.trainer.iter + 1 + is_final = next_iter == self.trainer.max_iter + if is_final or (self._period > 0 and next_iter % self._period == 0): + self.update_stats() + + def update_stats(self): + """ + Update the model with precise statistics. Users can manually call this method. + """ + if self._disabled: + return + + if self._data_iter is None: + self._data_iter = iter(self._data_loader) + + def data_loader(): + for num_iter in itertools.count(1): + if num_iter % 100 == 0: + self._logger.info( + "Running precise-BN ... {}/{} iterations.".format(num_iter, self._num_iter) + ) + # This way we can reuse the same iterator + yield next(self._data_iter) + + with EventStorage(): # capture events in a new storage to discard them + self._logger.info( + "Running precise-BN for {} iterations... ".format(self._num_iter) + + "Note that this could produce different statistics every time." + ) + update_bn_stats(self._model, data_loader(), self._num_iter) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/launch.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/launch.py new file mode 100644 index 0000000000000000000000000000000000000000..9efbb0395d2c788d8cfe2cbbf66cde6ddc053585 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/launch.py @@ -0,0 +1,89 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import torch +import torch.distributed as dist +import torch.multiprocessing as mp + +from detectron2.utils import comm + +__all__ = ["launch"] + + +def _find_free_port(): + import socket + + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + # Binding to port 0 will cause the OS to find an available port for us + sock.bind(("", 0)) + port = sock.getsockname()[1] + sock.close() + # NOTE: there is still a chance the port could be taken by other processes. + return port + + +def launch(main_func, num_gpus_per_machine, num_machines=1, machine_rank=0, dist_url=None, args=()): + """ + Args: + main_func: a function that will be called by `main_func(*args)` + num_machines (int): the total number of machines + machine_rank (int): the rank of this machine (one per machine) + dist_url (str): url to connect to for distributed jobs, including protocol + e.g. "tcp://127.0.0.1:8686". + Can be set to "auto" to automatically select a free port on localhost + args (tuple): arguments passed to main_func + """ + world_size = num_machines * num_gpus_per_machine + if world_size > 1: + # https://github.com/pytorch/pytorch/pull/14391 + # TODO prctl in spawned processes + + if dist_url == "auto": + assert num_machines == 1, "dist_url=auto not supported in multi-machine jobs." + port = _find_free_port() + dist_url = f"tcp://127.0.0.1:{port}" + if num_machines > 1 and dist_url.startswith("file://"): + logger = logging.getLogger(__name__) + logger.warning( + "file:// is not a reliable init_method in multi-machine jobs. Prefer tcp://" + ) + + mp.spawn( + _distributed_worker, + nprocs=num_gpus_per_machine, + args=(main_func, world_size, num_gpus_per_machine, machine_rank, dist_url, args), + daemon=False, + ) + else: + main_func(*args) + + +def _distributed_worker( + local_rank, main_func, world_size, num_gpus_per_machine, machine_rank, dist_url, args +): + assert torch.cuda.is_available(), "cuda is not available. Please check your installation." + global_rank = machine_rank * num_gpus_per_machine + local_rank + try: + dist.init_process_group( + backend="NCCL", init_method=dist_url, world_size=world_size, rank=global_rank + ) + except Exception as e: + logger = logging.getLogger(__name__) + logger.error("Process group URL: {}".format(dist_url)) + raise e + # synchronize is needed here to prevent a possible timeout after calling init_process_group + # See: https://github.com/facebookresearch/maskrcnn-benchmark/issues/172 + comm.synchronize() + + assert num_gpus_per_machine <= torch.cuda.device_count() + torch.cuda.set_device(local_rank) + + # Setup the local process group (which contains ranks within the same machine) + assert comm._LOCAL_PROCESS_GROUP is None + num_machines = world_size // num_gpus_per_machine + for i in range(num_machines): + ranks_on_i = list(range(i * num_gpus_per_machine, (i + 1) * num_gpus_per_machine)) + pg = dist.new_group(ranks_on_i) + if i == machine_rank: + comm._LOCAL_PROCESS_GROUP = pg + + main_func(*args) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/train_loop.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/train_loop.py new file mode 100644 index 0000000000000000000000000000000000000000..453c9acfde2d65a182fbf18a6bce4b4583df5ca5 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/engine/train_loop.py @@ -0,0 +1,273 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import logging +import numpy as np +import time +import weakref +import torch + +import detectron2.utils.comm as comm +from detectron2.utils.events import EventStorage + +__all__ = ["HookBase", "TrainerBase", "SimpleTrainer"] + + +class HookBase: + """ + Base class for hooks that can be registered with :class:`TrainerBase`. + + Each hook can implement 4 methods. The way they are called is demonstrated + in the following snippet: + + .. code-block:: python + + hook.before_train() + for iter in range(start_iter, max_iter): + hook.before_step() + trainer.run_step() + hook.after_step() + hook.after_train() + + Notes: + 1. In the hook method, users can access `self.trainer` to access more + properties about the context (e.g., current iteration). + + 2. A hook that does something in :meth:`before_step` can often be + implemented equivalently in :meth:`after_step`. + If the hook takes non-trivial time, it is strongly recommended to + implement the hook in :meth:`after_step` instead of :meth:`before_step`. + The convention is that :meth:`before_step` should only take negligible time. + + Following this convention will allow hooks that do care about the difference + between :meth:`before_step` and :meth:`after_step` (e.g., timer) to + function properly. + + Attributes: + trainer: A weak reference to the trainer object. Set by the trainer when the hook is + registered. + """ + + def before_train(self): + """ + Called before the first iteration. + """ + pass + + def after_train(self): + """ + Called after the last iteration. + """ + pass + + def before_step(self): + """ + Called before each iteration. + """ + pass + + def after_step(self): + """ + Called after each iteration. + """ + pass + + +class TrainerBase: + """ + Base class for iterative trainer with hooks. + + The only assumption we made here is: the training runs in a loop. + A subclass can implement what the loop is. + We made no assumptions about the existence of dataloader, optimizer, model, etc. + + Attributes: + iter(int): the current iteration. + + start_iter(int): The iteration to start with. + By convention the minimum possible value is 0. + + max_iter(int): The iteration to end training. + + storage(EventStorage): An EventStorage that's opened during the course of training. + """ + + def __init__(self): + self._hooks = [] + + def register_hooks(self, hooks): + """ + Register hooks to the trainer. The hooks are executed in the order + they are registered. + + Args: + hooks (list[Optional[HookBase]]): list of hooks + """ + hooks = [h for h in hooks if h is not None] + for h in hooks: + assert isinstance(h, HookBase) + # To avoid circular reference, hooks and trainer cannot own each other. + # This normally does not matter, but will cause memory leak if the + # involved objects contain __del__: + # See http://engineering.hearsaysocial.com/2013/06/16/circular-references-in-python/ + h.trainer = weakref.proxy(self) + self._hooks.extend(hooks) + + def train(self, start_iter: int, max_iter: int): + """ + Args: + start_iter, max_iter (int): See docs above + """ + logger = logging.getLogger(__name__) + logger.info("Starting training from iteration {}".format(start_iter)) + + self.iter = self.start_iter = start_iter + self.max_iter = max_iter + + with EventStorage(start_iter) as self.storage: + try: + self.before_train() + for self.iter in range(start_iter, max_iter): + self.before_step() + self.run_step() + self.after_step() + except Exception: + logger.exception("Exception during training:") + raise + finally: + self.after_train() + + def before_train(self): + for h in self._hooks: + h.before_train() + + def after_train(self): + for h in self._hooks: + h.after_train() + + def before_step(self): + for h in self._hooks: + h.before_step() + + def after_step(self): + for h in self._hooks: + h.after_step() + # this guarantees, that in each hook's after_step, storage.iter == trainer.iter + self.storage.step() + + def run_step(self): + raise NotImplementedError + + +class SimpleTrainer(TrainerBase): + """ + A simple trainer for the most common type of task: + single-cost single-optimizer single-data-source iterative optimization. + It assumes that every step, you: + + 1. Compute the loss with a data from the data_loader. + 2. Compute the gradients with the above loss. + 3. Update the model with the optimizer. + + If you want to do anything fancier than this, + either subclass TrainerBase and implement your own `run_step`, + or write your own training loop. + """ + + def __init__(self, model, data_loader, optimizer): + """ + Args: + model: a torch Module. Takes a data from data_loader and returns a + dict of losses. + data_loader: an iterable. Contains data to be used to call model. + optimizer: a torch optimizer. + """ + super().__init__() + + """ + We set the model to training mode in the trainer. + However it's valid to train a model that's in eval mode. + If you want your model (or a submodule of it) to behave + like evaluation during training, you can overwrite its train() method. + """ + model.train() + + self.model = model + self.data_loader = data_loader + self._data_loader_iter = iter(data_loader) + self.optimizer = optimizer + + def run_step(self): + """ + Implement the standard training logic described above. + """ + assert self.model.training, "[SimpleTrainer] model was changed to eval mode!" + start = time.perf_counter() + """ + If you want to do something with the data, you can wrap the dataloader. + """ + data = next(self._data_loader_iter) + data_time = time.perf_counter() - start + + """ + If you want to do something with the losses, you can wrap the model. + """ + loss_dict = self.model(data) + losses = sum(loss_dict.values()) + self._detect_anomaly(losses, loss_dict) + + metrics_dict = loss_dict + metrics_dict["data_time"] = data_time + self._write_metrics(metrics_dict) + + """ + If you need to accumulate gradients or something similar, you can + wrap the optimizer with your custom `zero_grad()` method. + """ + self.optimizer.zero_grad() + losses.backward() + + """ + If you need gradient clipping/scaling or other processing, you can + wrap the optimizer with your custom `step()` method. + """ + self.optimizer.step() + + def _detect_anomaly(self, losses, loss_dict): + if not torch.isfinite(losses).all(): + raise FloatingPointError( + "Loss became infinite or NaN at iteration={}!\nloss_dict = {}".format( + self.iter, loss_dict + ) + ) + + def _write_metrics(self, metrics_dict: dict): + """ + Args: + metrics_dict (dict): dict of scalar metrics + """ + metrics_dict = { + k: v.detach().cpu().item() if isinstance(v, torch.Tensor) else float(v) + for k, v in metrics_dict.items() + } + # gather metrics among all workers for logging + # This assumes we do DDP-style training, which is currently the only + # supported method in detectron2. + all_metrics_dict = comm.gather(metrics_dict) + + if comm.is_main_process(): + if "data_time" in all_metrics_dict[0]: + # data_time among workers can have high variance. The actual latency + # caused by data_time is the maximum among workers. + data_time = np.max([x.pop("data_time") for x in all_metrics_dict]) + self.storage.put_scalar("data_time", data_time) + + # average the rest metrics + metrics_dict = { + k: np.mean([x[k] for x in all_metrics_dict]) for k in all_metrics_dict[0].keys() + } + total_losses_reduced = sum(loss for loss in metrics_dict.values()) + + self.storage.put_scalar("total_loss", total_losses_reduced) + if len(metrics_dict) > 1: + self.storage.put_scalars(**metrics_dict) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f1d2f1001af2eb46060db362a94d9dae26e3fb4e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .cityscapes_evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator +from .coco_evaluation import COCOEvaluator +from .rotated_coco_evaluation import RotatedCOCOEvaluator +from .evaluator import DatasetEvaluator, DatasetEvaluators, inference_context, inference_on_dataset +from .lvis_evaluation import LVISEvaluator +from .panoptic_evaluation import COCOPanopticEvaluator +from .pascal_voc_evaluation import PascalVOCDetectionEvaluator +from .sem_seg_evaluation import SemSegEvaluator +from .testing import print_csv_format, verify_results + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/cityscapes_evaluation.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/cityscapes_evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..f6287a8980b10d9d13f0f0e6a0f0e1a16ff3566c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/cityscapes_evaluation.py @@ -0,0 +1,187 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import glob +import logging +import numpy as np +import os +import tempfile +from collections import OrderedDict +import torch +from fvcore.common.file_io import PathManager +from PIL import Image + +from detectron2.data import MetadataCatalog +from detectron2.utils import comm + +from .evaluator import DatasetEvaluator + + +class CityscapesEvaluator(DatasetEvaluator): + """ + Base class for evaluation using cityscapes API. + """ + + def __init__(self, dataset_name): + """ + Args: + dataset_name (str): the name of the dataset. + It must have the following metadata associated with it: + "thing_classes", "gt_dir". + """ + self._metadata = MetadataCatalog.get(dataset_name) + self._cpu_device = torch.device("cpu") + self._logger = logging.getLogger(__name__) + + def reset(self): + self._working_dir = tempfile.TemporaryDirectory(prefix="cityscapes_eval_") + self._temp_dir = self._working_dir.name + # All workers will write to the same results directory + # TODO this does not work in distributed training + self._temp_dir = comm.all_gather(self._temp_dir)[0] + if self._temp_dir != self._working_dir.name: + self._working_dir.cleanup() + self._logger.info( + "Writing cityscapes results to temporary directory {} ...".format(self._temp_dir) + ) + + +class CityscapesInstanceEvaluator(CityscapesEvaluator): + """ + Evaluate instance segmentation results using cityscapes API. + + Note: + * It does not work in multi-machine distributed training. + * It contains a synchronization, therefore has to be used on all ranks. + * Only the main process runs evaluation. + """ + + def process(self, inputs, outputs): + from cityscapesscripts.helpers.labels import name2label + + for input, output in zip(inputs, outputs): + file_name = input["file_name"] + basename = os.path.splitext(os.path.basename(file_name))[0] + pred_txt = os.path.join(self._temp_dir, basename + "_pred.txt") + + output = output["instances"].to(self._cpu_device) + num_instances = len(output) + with open(pred_txt, "w") as fout: + for i in range(num_instances): + pred_class = output.pred_classes[i] + classes = self._metadata.thing_classes[pred_class] + class_id = name2label[classes].id + score = output.scores[i] + mask = output.pred_masks[i].numpy().astype("uint8") + png_filename = os.path.join( + self._temp_dir, basename + "_{}_{}.png".format(i, classes) + ) + + Image.fromarray(mask * 255).save(png_filename) + fout.write("{} {} {}\n".format(os.path.basename(png_filename), class_id, score)) + + def evaluate(self): + """ + Returns: + dict: has a key "segm", whose value is a dict of "AP" and "AP50". + """ + comm.synchronize() + if comm.get_rank() > 0: + return + import cityscapesscripts.evaluation.evalInstanceLevelSemanticLabeling as cityscapes_eval + + self._logger.info("Evaluating results under {} ...".format(self._temp_dir)) + + # set some global states in cityscapes evaluation API, before evaluating + cityscapes_eval.args.predictionPath = os.path.abspath(self._temp_dir) + cityscapes_eval.args.predictionWalk = None + cityscapes_eval.args.JSONOutput = False + cityscapes_eval.args.colorized = False + cityscapes_eval.args.gtInstancesFile = os.path.join(self._temp_dir, "gtInstances.json") + + # These lines are adopted from + # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalInstanceLevelSemanticLabeling.py # noqa + gt_dir = PathManager.get_local_path(self._metadata.gt_dir) + groundTruthImgList = glob.glob(os.path.join(gt_dir, "*", "*_gtFine_instanceIds.png")) + assert len( + groundTruthImgList + ), "Cannot find any ground truth images to use for evaluation. Searched for: {}".format( + cityscapes_eval.args.groundTruthSearch + ) + predictionImgList = [] + for gt in groundTruthImgList: + predictionImgList.append(cityscapes_eval.getPrediction(gt, cityscapes_eval.args)) + results = cityscapes_eval.evaluateImgLists( + predictionImgList, groundTruthImgList, cityscapes_eval.args + )["averages"] + + ret = OrderedDict() + ret["segm"] = {"AP": results["allAp"] * 100, "AP50": results["allAp50%"] * 100} + self._working_dir.cleanup() + return ret + + +class CityscapesSemSegEvaluator(CityscapesEvaluator): + """ + Evaluate semantic segmentation results using cityscapes API. + + Note: + * It does not work in multi-machine distributed training. + * It contains a synchronization, therefore has to be used on all ranks. + * Only the main process runs evaluation. + """ + + def process(self, inputs, outputs): + from cityscapesscripts.helpers.labels import trainId2label + + for input, output in zip(inputs, outputs): + file_name = input["file_name"] + basename = os.path.splitext(os.path.basename(file_name))[0] + pred_filename = os.path.join(self._temp_dir, basename + "_pred.png") + + output = output["sem_seg"].argmax(dim=0).to(self._cpu_device).numpy() + pred = 255 * np.ones(output.shape, dtype=np.uint8) + for train_id, label in trainId2label.items(): + if label.ignoreInEval: + continue + pred[output == train_id] = label.id + Image.fromarray(pred).save(pred_filename) + + def evaluate(self): + comm.synchronize() + if comm.get_rank() > 0: + return + # Load the Cityscapes eval script *after* setting the required env var, + # since the script reads CITYSCAPES_DATASET into global variables at load time. + import cityscapesscripts.evaluation.evalPixelLevelSemanticLabeling as cityscapes_eval + + self._logger.info("Evaluating results under {} ...".format(self._temp_dir)) + + # set some global states in cityscapes evaluation API, before evaluating + cityscapes_eval.args.predictionPath = os.path.abspath(self._temp_dir) + cityscapes_eval.args.predictionWalk = None + cityscapes_eval.args.JSONOutput = False + cityscapes_eval.args.colorized = False + + # These lines are adopted from + # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalPixelLevelSemanticLabeling.py # noqa + gt_dir = PathManager.get_local_path(self._metadata.gt_dir) + groundTruthImgList = glob.glob(os.path.join(gt_dir, "*", "*_gtFine_labelIds.png")) + assert len( + groundTruthImgList + ), "Cannot find any ground truth images to use for evaluation. Searched for: {}".format( + cityscapes_eval.args.groundTruthSearch + ) + predictionImgList = [] + for gt in groundTruthImgList: + predictionImgList.append(cityscapes_eval.getPrediction(cityscapes_eval.args, gt)) + results = cityscapes_eval.evaluateImgLists( + predictionImgList, groundTruthImgList, cityscapes_eval.args + ) + ret = OrderedDict() + ret["sem_seg"] = { + "IoU": 100.0 * results["averageScoreClasses"], + "iIoU": 100.0 * results["averageScoreInstClasses"], + "IoU_sup": 100.0 * results["averageScoreCategories"], + "iIoU_sup": 100.0 * results["averageScoreInstCategories"], + } + self._working_dir.cleanup() + return ret diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/coco_evaluation.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/coco_evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..64b0903a43187db785113267ed16e82be6f5b28c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/coco_evaluation.py @@ -0,0 +1,512 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import contextlib +import copy +import io +import itertools +import json +import logging +import numpy as np +import os +import pickle +from collections import OrderedDict +import pycocotools.mask as mask_util +import torch +from fvcore.common.file_io import PathManager +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval +from tabulate import tabulate + +import detectron2.utils.comm as comm +from detectron2.data import MetadataCatalog +from detectron2.data.datasets.coco import convert_to_coco_json +from detectron2.structures import Boxes, BoxMode, pairwise_iou +from detectron2.utils.logger import create_small_table + +from .evaluator import DatasetEvaluator + + +class COCOEvaluator(DatasetEvaluator): + """ + Evaluate object proposal, instance detection/segmentation, keypoint detection + outputs using COCO's metrics and APIs. + """ + + def __init__(self, dataset_name, cfg, distributed, output_dir=None): + """ + Args: + dataset_name (str): name of the dataset to be evaluated. + It must have either the following corresponding metadata: + + "json_file": the path to the COCO format annotation + + Or it must be in detectron2's standard dataset format + so it can be converted to COCO format automatically. + cfg (CfgNode): config instance + distributed (True): if True, will collect results from all ranks and run evaluation + in the main process. + Otherwise, will evaluate the results in the current process. + output_dir (str): optional, an output directory to dump all + results predicted on the dataset. The dump contains two files: + + 1. "instance_predictions.pth" a file in torch serialization + format that contains all the raw original predictions. + 2. "coco_instances_results.json" a json file in COCO's result + format. + """ + self._tasks = self._tasks_from_config(cfg) + self._distributed = distributed + self._output_dir = output_dir + + self._cpu_device = torch.device("cpu") + self._logger = logging.getLogger(__name__) + + self._metadata = MetadataCatalog.get(dataset_name) + if not hasattr(self._metadata, "json_file"): + self._logger.warning( + f"json_file was not found in MetaDataCatalog for '{dataset_name}'." + " Trying to convert it to COCO format ..." + ) + + cache_path = os.path.join(output_dir, f"{dataset_name}_coco_format.json") + self._metadata.json_file = cache_path + convert_to_coco_json(dataset_name, cache_path) + + json_file = PathManager.get_local_path(self._metadata.json_file) + with contextlib.redirect_stdout(io.StringIO()): + self._coco_api = COCO(json_file) + + self._kpt_oks_sigmas = cfg.TEST.KEYPOINT_OKS_SIGMAS + # Test set json files do not contain annotations (evaluation must be + # performed using the COCO evaluation server). + self._do_evaluation = "annotations" in self._coco_api.split_name + + def reset(self): + self._predictions = [] + + def _tasks_from_config(self, cfg): + """ + Returns: + tuple[str]: tasks that can be evaluated under the given configuration. + """ + tasks = ("bbox",) + if cfg.MODEL.MASK_ON: + tasks = tasks + ("segm",) + if cfg.MODEL.KEYPOINT_ON: + tasks = tasks + ("keypoints",) + return tasks + + def process(self, inputs, outputs): + """ + Args: + inputs: the inputs to a COCO model (e.g., GeneralizedRCNN). + It is a list of dict. Each dict corresponds to an image and + contains keys like "height", "width", "file_name", "image_id". + outputs: the outputs of a COCO model. It is a list of dicts with key + "instances" that contains :class:`Instances`. + """ + for input, output in zip(inputs, outputs): + prediction = {"image_id": input["image_id"]} + + # TODO this is ugly + if "instances" in output: + instances = output["instances"].to(self._cpu_device) + prediction["instances"] = instances_to_coco_json(instances, input["image_id"]) + if "proposals" in output: + prediction["proposals"] = output["proposals"].to(self._cpu_device) + self._predictions.append(prediction) + + def evaluate(self): + if self._distributed: + comm.synchronize() + predictions = comm.gather(self._predictions, dst=0) + predictions = list(itertools.chain(*predictions)) + + if not comm.is_main_process(): + return {} + else: + predictions = self._predictions + + if len(predictions) == 0: + self._logger.warning("[COCOEvaluator] Did not receive valid predictions.") + return {} + + if self._output_dir: + PathManager.mkdirs(self._output_dir) + file_path = os.path.join(self._output_dir, "instances_predictions.pth") + with PathManager.open(file_path, "wb") as f: + torch.save(predictions, f) + + self._results = OrderedDict() + if "proposals" in predictions[0]: + self._eval_box_proposals(predictions) + if "instances" in predictions[0]: + self._eval_predictions(set(self._tasks), predictions) + # Copy so the caller can do whatever with results + return copy.deepcopy(self._results) + + def _eval_predictions(self, tasks, predictions): + """ + Evaluate predictions on the given tasks. + Fill self._results with the metrics of the tasks. + """ + self._logger.info("Preparing results for COCO format ...") + coco_results = list(itertools.chain(*[x["instances"] for x in predictions])) + + # unmap the category ids for COCO + if hasattr(self._metadata, "thing_dataset_id_to_contiguous_id"): + reverse_id_mapping = { + v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items() + } + for result in coco_results: + category_id = result["category_id"] + assert ( + category_id in reverse_id_mapping + ), "A prediction has category_id={}, which is not available in the dataset.".format( + category_id + ) + result["category_id"] = reverse_id_mapping[category_id] + + if self._output_dir: + file_path = os.path.join(self._output_dir, "coco_instances_results.json") + self._logger.info("Saving results to {}".format(file_path)) + with PathManager.open(file_path, "w") as f: + f.write(json.dumps(coco_results)) + f.flush() + + if not self._do_evaluation: + self._logger.info("Annotations are not available for evaluation.") + return + + self._logger.info("Evaluating predictions ...") + for task in sorted(tasks): + coco_eval = ( + _evaluate_predictions_on_coco( + self._coco_api, coco_results, task, kpt_oks_sigmas=self._kpt_oks_sigmas + ) + if len(coco_results) > 0 + else None # cocoapi does not handle empty results very well + ) + + res = self._derive_coco_results( + coco_eval, task, class_names=self._metadata.get("thing_classes") + ) + self._results[task] = res + + def _eval_box_proposals(self, predictions): + """ + Evaluate the box proposals in predictions. + Fill self._results with the metrics for "box_proposals" task. + """ + if self._output_dir: + # Saving generated box proposals to file. + # Predicted box_proposals are in XYXY_ABS mode. + bbox_mode = BoxMode.XYXY_ABS.value + ids, boxes, objectness_logits = [], [], [] + for prediction in predictions: + ids.append(prediction["image_id"]) + boxes.append(prediction["proposals"].proposal_boxes.tensor.numpy()) + objectness_logits.append(prediction["proposals"].objectness_logits.numpy()) + + proposal_data = { + "boxes": boxes, + "objectness_logits": objectness_logits, + "ids": ids, + "bbox_mode": bbox_mode, + } + with PathManager.open(os.path.join(self._output_dir, "box_proposals.pkl"), "wb") as f: + pickle.dump(proposal_data, f) + + if not self._do_evaluation: + self._logger.info("Annotations are not available for evaluation.") + return + + self._logger.info("Evaluating bbox proposals ...") + res = {} + areas = {"all": "", "small": "s", "medium": "m", "large": "l"} + for limit in [100, 1000]: + for area, suffix in areas.items(): + stats = _evaluate_box_proposals(predictions, self._coco_api, area=area, limit=limit) + key = "AR{}@{:d}".format(suffix, limit) + res[key] = float(stats["ar"].item() * 100) + self._logger.info("Proposal metrics: \n" + create_small_table(res)) + self._results["box_proposals"] = res + + def _derive_coco_results(self, coco_eval, iou_type, class_names=None): + """ + Derive the desired score numbers from summarized COCOeval. + + Args: + coco_eval (None or COCOEval): None represents no predictions from model. + iou_type (str): + class_names (None or list[str]): if provided, will use it to predict + per-category AP. + + Returns: + a dict of {metric name: score} + """ + + metrics = { + "bbox": ["AP", "AP50", "AP75", "APs", "APm", "APl"], + "segm": ["AP", "AP50", "AP75", "APs", "APm", "APl"], + "keypoints": ["AP", "AP50", "AP75", "APm", "APl"], + }[iou_type] + + if coco_eval is None: + self._logger.warn("No predictions from the model!") + return {metric: float("nan") for metric in metrics} + + # the standard metrics + results = { + metric: float(coco_eval.stats[idx] * 100 if coco_eval.stats[idx] >= 0 else "nan") + for idx, metric in enumerate(metrics) + } + self._logger.info( + "Evaluation results for {}: \n".format(iou_type) + create_small_table(results) + ) + if not np.isfinite(sum(results.values())): + self._logger.info("Note that some metrics cannot be computed.") + + if class_names is None or len(class_names) <= 1: + return results + # Compute per-category AP + # from https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L222-L252 # noqa + precisions = coco_eval.eval["precision"] + # precision has dims (iou, recall, cls, area range, max dets) + assert len(class_names) == precisions.shape[2] + + results_per_category = [] + for idx, name in enumerate(class_names): + # area range index 0: all area ranges + # max dets index -1: typically 100 per image + precision = precisions[:, :, idx, 0, -1] + precision = precision[precision > -1] + ap = np.mean(precision) if precision.size else float("nan") + results_per_category.append(("{}".format(name), float(ap * 100))) + + # tabulate it + N_COLS = min(6, len(results_per_category) * 2) + results_flatten = list(itertools.chain(*results_per_category)) + results_2d = itertools.zip_longest(*[results_flatten[i::N_COLS] for i in range(N_COLS)]) + table = tabulate( + results_2d, + tablefmt="pipe", + floatfmt=".3f", + headers=["category", "AP"] * (N_COLS // 2), + numalign="left", + ) + self._logger.info("Per-category {} AP: \n".format(iou_type) + table) + + results.update({"AP-" + name: ap for name, ap in results_per_category}) + return results + + +def instances_to_coco_json(instances, img_id): + """ + Dump an "Instances" object to a COCO-format json that's used for evaluation. + + Args: + instances (Instances): + img_id (int): the image id + + Returns: + list[dict]: list of json annotations in COCO format. + """ + num_instance = len(instances) + if num_instance == 0: + return [] + + boxes = instances.pred_boxes.tensor.numpy() + boxes = BoxMode.convert(boxes, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS) + boxes = boxes.tolist() + scores = instances.scores.tolist() + classes = instances.pred_classes.tolist() + + has_mask = instances.has("pred_masks") + if has_mask: + # use RLE to encode the masks, because they are too large and takes memory + # since this evaluator stores outputs of the entire dataset + rles = [ + mask_util.encode(np.array(mask[:, :, None], order="F", dtype="uint8"))[0] + for mask in instances.pred_masks + ] + for rle in rles: + # "counts" is an array encoded by mask_util as a byte-stream. Python3's + # json writer which always produces strings cannot serialize a bytestream + # unless you decode it. Thankfully, utf-8 works out (which is also what + # the pycocotools/_mask.pyx does). + rle["counts"] = rle["counts"].decode("utf-8") + + has_keypoints = instances.has("pred_keypoints") + if has_keypoints: + keypoints = instances.pred_keypoints + + results = [] + for k in range(num_instance): + result = { + "image_id": img_id, + "category_id": classes[k], + "bbox": boxes[k], + "score": scores[k], + } + if has_mask: + result["segmentation"] = rles[k] + if has_keypoints: + # In COCO annotations, + # keypoints coordinates are pixel indices. + # However our predictions are floating point coordinates. + # Therefore we subtract 0.5 to be consistent with the annotation format. + # This is the inverse of data loading logic in `data/coco.py`. + keypoints[k][:, :2] -= 0.5 + result["keypoints"] = keypoints[k].flatten().tolist() + results.append(result) + return results + + +# inspired from Detectron: +# https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L255 # noqa +def _evaluate_box_proposals(dataset_predictions, coco_api, thresholds=None, area="all", limit=None): + """ + Evaluate detection proposal recall metrics. This function is a much + faster alternative to the official COCO API recall evaluation code. However, + it produces slightly different results. + """ + # Record max overlap value for each gt box + # Return vector of overlap values + areas = { + "all": 0, + "small": 1, + "medium": 2, + "large": 3, + "96-128": 4, + "128-256": 5, + "256-512": 6, + "512-inf": 7, + } + area_ranges = [ + [0 ** 2, 1e5 ** 2], # all + [0 ** 2, 32 ** 2], # small + [32 ** 2, 96 ** 2], # medium + [96 ** 2, 1e5 ** 2], # large + [96 ** 2, 128 ** 2], # 96-128 + [128 ** 2, 256 ** 2], # 128-256 + [256 ** 2, 512 ** 2], # 256-512 + [512 ** 2, 1e5 ** 2], + ] # 512-inf + assert area in areas, "Unknown area range: {}".format(area) + area_range = area_ranges[areas[area]] + gt_overlaps = [] + num_pos = 0 + + for prediction_dict in dataset_predictions: + predictions = prediction_dict["proposals"] + + # sort predictions in descending order + # TODO maybe remove this and make it explicit in the documentation + inds = predictions.objectness_logits.sort(descending=True)[1] + predictions = predictions[inds] + + ann_ids = coco_api.getAnnIds(imgIds=prediction_dict["image_id"]) + anno = coco_api.loadAnns(ann_ids) + gt_boxes = [ + BoxMode.convert(obj["bbox"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS) + for obj in anno + if obj["iscrowd"] == 0 + ] + gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4) # guard against no boxes + gt_boxes = Boxes(gt_boxes) + gt_areas = torch.as_tensor([obj["area"] for obj in anno if obj["iscrowd"] == 0]) + + if len(gt_boxes) == 0 or len(predictions) == 0: + continue + + valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1]) + gt_boxes = gt_boxes[valid_gt_inds] + + num_pos += len(gt_boxes) + + if len(gt_boxes) == 0: + continue + + if limit is not None and len(predictions) > limit: + predictions = predictions[:limit] + + overlaps = pairwise_iou(predictions.proposal_boxes, gt_boxes) + + _gt_overlaps = torch.zeros(len(gt_boxes)) + for j in range(min(len(predictions), len(gt_boxes))): + # find which proposal box maximally covers each gt box + # and get the iou amount of coverage for each gt box + max_overlaps, argmax_overlaps = overlaps.max(dim=0) + + # find which gt box is 'best' covered (i.e. 'best' = most iou) + gt_ovr, gt_ind = max_overlaps.max(dim=0) + assert gt_ovr >= 0 + # find the proposal box that covers the best covered gt box + box_ind = argmax_overlaps[gt_ind] + # record the iou coverage of this gt box + _gt_overlaps[j] = overlaps[box_ind, gt_ind] + assert _gt_overlaps[j] == gt_ovr + # mark the proposal box and the gt box as used + overlaps[box_ind, :] = -1 + overlaps[:, gt_ind] = -1 + + # append recorded iou coverage level + gt_overlaps.append(_gt_overlaps) + gt_overlaps = ( + torch.cat(gt_overlaps, dim=0) if len(gt_overlaps) else torch.zeros(0, dtype=torch.float32) + ) + gt_overlaps, _ = torch.sort(gt_overlaps) + + if thresholds is None: + step = 0.05 + thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32) + recalls = torch.zeros_like(thresholds) + # compute recall for each iou threshold + for i, t in enumerate(thresholds): + recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos) + # ar = 2 * np.trapz(recalls, thresholds) + ar = recalls.mean() + return { + "ar": ar, + "recalls": recalls, + "thresholds": thresholds, + "gt_overlaps": gt_overlaps, + "num_pos": num_pos, + } + + +def _evaluate_predictions_on_coco(coco_gt, coco_results, iou_type, kpt_oks_sigmas=None): + """ + Evaluate the coco results using COCOEval API. + """ + assert len(coco_results) > 0 + + if iou_type == "segm": + coco_results = copy.deepcopy(coco_results) + # When evaluating mask AP, if the results contain bbox, cocoapi will + # use the box area as the area of the instance, instead of the mask area. + # This leads to a different definition of small/medium/large. + # We remove the bbox field to let mask AP use mask area. + for c in coco_results: + c.pop("bbox", None) + + coco_dt = coco_gt.loadRes(coco_results) + coco_eval = COCOeval(coco_gt, coco_dt, iou_type) + # Use the COCO default keypoint OKS sigmas unless overrides are specified + if kpt_oks_sigmas: + coco_eval.params.kpt_oks_sigmas = np.array(kpt_oks_sigmas) + + if iou_type == "keypoints": + num_keypoints = len(coco_results[0]["keypoints"]) // 3 + assert len(coco_eval.params.kpt_oks_sigmas) == num_keypoints, ( + "[COCOEvaluator] The length of cfg.TEST.KEYPOINT_OKS_SIGMAS (default: 17) " + "must be equal to the number of keypoints. However the prediction has {} " + "keypoints! For more information please refer to " + "http://cocodataset.org/#keypoints-eval.".format(num_keypoints) + ) + + coco_eval.evaluate() + coco_eval.accumulate() + coco_eval.summarize() + + return coco_eval diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/evaluator.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/evaluator.py new file mode 100644 index 0000000000000000000000000000000000000000..dcb98043a1ededb3925d0ecbba3914d6409dc022 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/evaluator.py @@ -0,0 +1,196 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import datetime +import logging +import time +from collections import OrderedDict +from contextlib import contextmanager +import torch + +from detectron2.utils.comm import get_world_size, is_main_process +from detectron2.utils.logger import log_every_n_seconds + + +class DatasetEvaluator: + """ + Base class for a dataset evaluator. + + The function :func:`inference_on_dataset` runs the model over + all samples in the dataset, and have a DatasetEvaluator to process the inputs/outputs. + + This class will accumulate information of the inputs/outputs (by :meth:`process`), + and produce evaluation results in the end (by :meth:`evaluate`). + """ + + def reset(self): + """ + Preparation for a new round of evaluation. + Should be called before starting a round of evaluation. + """ + pass + + def process(self, inputs, outputs): + """ + Process the pair of inputs and outputs. + If they contain batches, the pairs can be consumed one-by-one using `zip`: + + .. code-block:: python + + for input_, output in zip(inputs, outputs): + # do evaluation on single input/output pair + ... + + Args: + inputs (list): the inputs that's used to call the model. + outputs (list): the return value of `model(inputs)` + """ + pass + + def evaluate(self): + """ + Evaluate/summarize the performance, after processing all input/output pairs. + + Returns: + dict: + A new evaluator class can return a dict of arbitrary format + as long as the user can process the results. + In our train_net.py, we expect the following format: + + * key: the name of the task (e.g., bbox) + * value: a dict of {metric name: score}, e.g.: {"AP50": 80} + """ + pass + + +class DatasetEvaluators(DatasetEvaluator): + """ + Wrapper class to combine multiple :class:`DatasetEvaluator` instances. + + This class dispatches every evaluation call to + all of its :class:`DatasetEvaluator`. + """ + + def __init__(self, evaluators): + """ + Args: + evaluators (list): the evaluators to combine. + """ + super().__init__() + self._evaluators = evaluators + + def reset(self): + for evaluator in self._evaluators: + evaluator.reset() + + def process(self, inputs, outputs): + for evaluator in self._evaluators: + evaluator.process(inputs, outputs) + + def evaluate(self): + results = OrderedDict() + for evaluator in self._evaluators: + result = evaluator.evaluate() + if is_main_process() and result is not None: + for k, v in result.items(): + assert ( + k not in results + ), "Different evaluators produce results with the same key {}".format(k) + results[k] = v + return results + + +def inference_on_dataset(model, data_loader, evaluator): + """ + Run model on the data_loader and evaluate the metrics with evaluator. + Also benchmark the inference speed of `model.forward` accurately. + The model will be used in eval mode. + + Args: + model (nn.Module): a module which accepts an object from + `data_loader` and returns some outputs. It will be temporarily set to `eval` mode. + + If you wish to evaluate a model in `training` mode instead, you can + wrap the given model and override its behavior of `.eval()` and `.train()`. + data_loader: an iterable object with a length. + The elements it generates will be the inputs to the model. + evaluator (DatasetEvaluator): the evaluator to run. Use `None` if you only want + to benchmark, but don't want to do any evaluation. + + Returns: + The return value of `evaluator.evaluate()` + """ + num_devices = get_world_size() + logger = logging.getLogger(__name__) + logger.info("Start inference on {} images".format(len(data_loader))) + + total = len(data_loader) # inference data loader must have a fixed length + if evaluator is None: + # create a no-op evaluator + evaluator = DatasetEvaluators([]) + evaluator.reset() + + num_warmup = min(5, total - 1) + start_time = time.perf_counter() + total_compute_time = 0 + with inference_context(model), torch.no_grad(): + for idx, inputs in enumerate(data_loader): + if idx == num_warmup: + start_time = time.perf_counter() + total_compute_time = 0 + + start_compute_time = time.perf_counter() + outputs = model(inputs) + if torch.cuda.is_available(): + torch.cuda.synchronize() + total_compute_time += time.perf_counter() - start_compute_time + evaluator.process(inputs, outputs) + + iters_after_start = idx + 1 - num_warmup * int(idx >= num_warmup) + seconds_per_img = total_compute_time / iters_after_start + if idx >= num_warmup * 2 or seconds_per_img > 5: + total_seconds_per_img = (time.perf_counter() - start_time) / iters_after_start + eta = datetime.timedelta(seconds=int(total_seconds_per_img * (total - idx - 1))) + log_every_n_seconds( + logging.INFO, + "Inference done {}/{}. {:.4f} s / demo. ETA={}".format( + idx + 1, total, seconds_per_img, str(eta) + ), + n=5, + ) + + # Measure the time only for this worker (before the synchronization barrier) + total_time = time.perf_counter() - start_time + total_time_str = str(datetime.timedelta(seconds=total_time)) + # NOTE this format is parsed by grep + logger.info( + "Total inference time: {} ({:.6f} s / demo per device, on {} devices)".format( + total_time_str, total_time / (total - num_warmup), num_devices + ) + ) + total_compute_time_str = str(datetime.timedelta(seconds=int(total_compute_time))) + logger.info( + "Total inference pure compute time: {} ({:.6f} s / demo per device, on {} devices)".format( + total_compute_time_str, total_compute_time / (total - num_warmup), num_devices + ) + ) + + results = evaluator.evaluate() + # An evaluator may return None when not in main process. + # Replace it by an empty dict instead to make it easier for downstream code to handle + if results is None: + results = {} + return results + + +@contextmanager +def inference_context(model): + """ + A context where the model is temporarily changed to eval mode, + and restored to previous mode afterwards. + + Args: + model: a torch Module + """ + training_mode = model.training + model.eval() + yield + model.train(training_mode) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/lvis_evaluation.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/lvis_evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..e55f50fb9d1fa7ccb685f812b603c10f9a1ffea0 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/lvis_evaluation.py @@ -0,0 +1,350 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import copy +import itertools +import json +import logging +import os +import pickle +from collections import OrderedDict +import torch +from fvcore.common.file_io import PathManager + +import detectron2.utils.comm as comm +from detectron2.data import MetadataCatalog +from detectron2.structures import Boxes, BoxMode, pairwise_iou +from detectron2.utils.logger import create_small_table + +from .coco_evaluation import instances_to_coco_json +from .evaluator import DatasetEvaluator + + +class LVISEvaluator(DatasetEvaluator): + """ + Evaluate object proposal and instance detection/segmentation outputs using + LVIS's metrics and evaluation API. + """ + + def __init__(self, dataset_name, cfg, distributed, output_dir=None): + """ + Args: + dataset_name (str): name of the dataset to be evaluated. + It must have the following corresponding metadata: + "json_file": the path to the LVIS format annotation + cfg (CfgNode): config instance + distributed (True): if True, will collect results from all ranks for evaluation. + Otherwise, will evaluate the results in the current process. + output_dir (str): optional, an output directory to dump results. + """ + from lvis import LVIS + + self._tasks = self._tasks_from_config(cfg) + self._distributed = distributed + self._output_dir = output_dir + + self._cpu_device = torch.device("cpu") + self._logger = logging.getLogger(__name__) + + self._metadata = MetadataCatalog.get(dataset_name) + json_file = PathManager.get_local_path(self._metadata.json_file) + self._lvis_api = LVIS(json_file) + # Test set json files do not contain annotations (evaluation must be + # performed using the LVIS evaluation server). + self._do_evaluation = len(self._lvis_api.get_ann_ids()) > 0 + + def reset(self): + self._predictions = [] + + def _tasks_from_config(self, cfg): + """ + Returns: + tuple[str]: tasks that can be evaluated under the given configuration. + """ + tasks = ("bbox",) + if cfg.MODEL.MASK_ON: + tasks = tasks + ("segm",) + return tasks + + def process(self, inputs, outputs): + """ + Args: + inputs: the inputs to a LVIS model (e.g., GeneralizedRCNN). + It is a list of dict. Each dict corresponds to an image and + contains keys like "height", "width", "file_name", "image_id". + outputs: the outputs of a LVIS model. It is a list of dicts with key + "instances" that contains :class:`Instances`. + """ + for input, output in zip(inputs, outputs): + prediction = {"image_id": input["image_id"]} + + if "instances" in output: + instances = output["instances"].to(self._cpu_device) + prediction["instances"] = instances_to_coco_json(instances, input["image_id"]) + if "proposals" in output: + prediction["proposals"] = output["proposals"].to(self._cpu_device) + self._predictions.append(prediction) + + def evaluate(self): + if self._distributed: + comm.synchronize() + predictions = comm.gather(self._predictions, dst=0) + predictions = list(itertools.chain(*predictions)) + + if not comm.is_main_process(): + return + else: + predictions = self._predictions + + if len(predictions) == 0: + self._logger.warning("[LVISEvaluator] Did not receive valid predictions.") + return {} + + if self._output_dir: + PathManager.mkdirs(self._output_dir) + file_path = os.path.join(self._output_dir, "instances_predictions.pth") + with PathManager.open(file_path, "wb") as f: + torch.save(predictions, f) + + self._results = OrderedDict() + if "proposals" in predictions[0]: + self._eval_box_proposals(predictions) + if "instances" in predictions[0]: + self._eval_predictions(set(self._tasks), predictions) + # Copy so the caller can do whatever with results + return copy.deepcopy(self._results) + + def _eval_predictions(self, tasks, predictions): + """ + Evaluate predictions on the given tasks. + Fill self._results with the metrics of the tasks. + + Args: + predictions (list[dict]): list of outputs from the model + """ + self._logger.info("Preparing results in the LVIS format ...") + lvis_results = list(itertools.chain(*[x["instances"] for x in predictions])) + + # LVIS evaluator can be used to evaluate results for COCO dataset categories. + # In this case `_metadata` variable will have a field with COCO-specific category mapping. + if hasattr(self._metadata, "thing_dataset_id_to_contiguous_id"): + reverse_id_mapping = { + v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items() + } + for result in lvis_results: + result["category_id"] = reverse_id_mapping[result["category_id"]] + else: + # unmap the category ids for LVIS (from 0-indexed to 1-indexed) + for result in lvis_results: + result["category_id"] += 1 + + if self._output_dir: + file_path = os.path.join(self._output_dir, "lvis_instances_results.json") + self._logger.info("Saving results to {}".format(file_path)) + with PathManager.open(file_path, "w") as f: + f.write(json.dumps(lvis_results)) + f.flush() + + if not self._do_evaluation: + self._logger.info("Annotations are not available for evaluation.") + return + + self._logger.info("Evaluating predictions ...") + for task in sorted(tasks): + res = _evaluate_predictions_on_lvis( + self._lvis_api, lvis_results, task, class_names=self._metadata.get("thing_classes") + ) + self._results[task] = res + + def _eval_box_proposals(self, predictions): + """ + Evaluate the box proposals in predictions. + Fill self._results with the metrics for "box_proposals" task. + """ + if self._output_dir: + # Saving generated box proposals to file. + # Predicted box_proposals are in XYXY_ABS mode. + bbox_mode = BoxMode.XYXY_ABS.value + ids, boxes, objectness_logits = [], [], [] + for prediction in predictions: + ids.append(prediction["image_id"]) + boxes.append(prediction["proposals"].proposal_boxes.tensor.numpy()) + objectness_logits.append(prediction["proposals"].objectness_logits.numpy()) + + proposal_data = { + "boxes": boxes, + "objectness_logits": objectness_logits, + "ids": ids, + "bbox_mode": bbox_mode, + } + with PathManager.open(os.path.join(self._output_dir, "box_proposals.pkl"), "wb") as f: + pickle.dump(proposal_data, f) + + if not self._do_evaluation: + self._logger.info("Annotations are not available for evaluation.") + return + + self._logger.info("Evaluating bbox proposals ...") + res = {} + areas = {"all": "", "small": "s", "medium": "m", "large": "l"} + for limit in [100, 1000]: + for area, suffix in areas.items(): + stats = _evaluate_box_proposals(predictions, self._lvis_api, area=area, limit=limit) + key = "AR{}@{:d}".format(suffix, limit) + res[key] = float(stats["ar"].item() * 100) + self._logger.info("Proposal metrics: \n" + create_small_table(res)) + self._results["box_proposals"] = res + + +# inspired from Detectron: +# https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L255 # noqa +def _evaluate_box_proposals(dataset_predictions, lvis_api, thresholds=None, area="all", limit=None): + """ + Evaluate detection proposal recall metrics. This function is a much + faster alternative to the official LVIS API recall evaluation code. However, + it produces slightly different results. + """ + # Record max overlap value for each gt box + # Return vector of overlap values + areas = { + "all": 0, + "small": 1, + "medium": 2, + "large": 3, + "96-128": 4, + "128-256": 5, + "256-512": 6, + "512-inf": 7, + } + area_ranges = [ + [0 ** 2, 1e5 ** 2], # all + [0 ** 2, 32 ** 2], # small + [32 ** 2, 96 ** 2], # medium + [96 ** 2, 1e5 ** 2], # large + [96 ** 2, 128 ** 2], # 96-128 + [128 ** 2, 256 ** 2], # 128-256 + [256 ** 2, 512 ** 2], # 256-512 + [512 ** 2, 1e5 ** 2], + ] # 512-inf + assert area in areas, "Unknown area range: {}".format(area) + area_range = area_ranges[areas[area]] + gt_overlaps = [] + num_pos = 0 + + for prediction_dict in dataset_predictions: + predictions = prediction_dict["proposals"] + + # sort predictions in descending order + # TODO maybe remove this and make it explicit in the documentation + inds = predictions.objectness_logits.sort(descending=True)[1] + predictions = predictions[inds] + + ann_ids = lvis_api.get_ann_ids(img_ids=[prediction_dict["image_id"]]) + anno = lvis_api.load_anns(ann_ids) + gt_boxes = [ + BoxMode.convert(obj["bbox"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS) for obj in anno + ] + gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4) # guard against no boxes + gt_boxes = Boxes(gt_boxes) + gt_areas = torch.as_tensor([obj["area"] for obj in anno]) + + if len(gt_boxes) == 0 or len(predictions) == 0: + continue + + valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1]) + gt_boxes = gt_boxes[valid_gt_inds] + + num_pos += len(gt_boxes) + + if len(gt_boxes) == 0: + continue + + if limit is not None and len(predictions) > limit: + predictions = predictions[:limit] + + overlaps = pairwise_iou(predictions.proposal_boxes, gt_boxes) + + _gt_overlaps = torch.zeros(len(gt_boxes)) + for j in range(min(len(predictions), len(gt_boxes))): + # find which proposal box maximally covers each gt box + # and get the iou amount of coverage for each gt box + max_overlaps, argmax_overlaps = overlaps.max(dim=0) + + # find which gt box is 'best' covered (i.e. 'best' = most iou) + gt_ovr, gt_ind = max_overlaps.max(dim=0) + assert gt_ovr >= 0 + # find the proposal box that covers the best covered gt box + box_ind = argmax_overlaps[gt_ind] + # record the iou coverage of this gt box + _gt_overlaps[j] = overlaps[box_ind, gt_ind] + assert _gt_overlaps[j] == gt_ovr + # mark the proposal box and the gt box as used + overlaps[box_ind, :] = -1 + overlaps[:, gt_ind] = -1 + + # append recorded iou coverage level + gt_overlaps.append(_gt_overlaps) + gt_overlaps = ( + torch.cat(gt_overlaps, dim=0) if len(gt_overlaps) else torch.zeros(0, dtype=torch.float32) + ) + gt_overlaps, _ = torch.sort(gt_overlaps) + + if thresholds is None: + step = 0.05 + thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32) + recalls = torch.zeros_like(thresholds) + # compute recall for each iou threshold + for i, t in enumerate(thresholds): + recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos) + # ar = 2 * np.trapz(recalls, thresholds) + ar = recalls.mean() + return { + "ar": ar, + "recalls": recalls, + "thresholds": thresholds, + "gt_overlaps": gt_overlaps, + "num_pos": num_pos, + } + + +def _evaluate_predictions_on_lvis(lvis_gt, lvis_results, iou_type, class_names=None): + """ + Args: + iou_type (str): + kpt_oks_sigmas (list[float]): + class_names (None or list[str]): if provided, will use it to predict + per-category AP. + + Returns: + a dict of {metric name: score} + """ + metrics = { + "bbox": ["AP", "AP50", "AP75", "APs", "APm", "APl", "APr", "APc", "APf"], + "segm": ["AP", "AP50", "AP75", "APs", "APm", "APl", "APr", "APc", "APf"], + }[iou_type] + + logger = logging.getLogger(__name__) + + if len(lvis_results) == 0: # TODO: check if needed + logger.warn("No predictions from the model!") + return {metric: float("nan") for metric in metrics} + + if iou_type == "segm": + lvis_results = copy.deepcopy(lvis_results) + # When evaluating mask AP, if the results contain bbox, LVIS API will + # use the box area as the area of the instance, instead of the mask area. + # This leads to a different definition of small/medium/large. + # We remove the bbox field to let mask AP use mask area. + for c in lvis_results: + c.pop("bbox", None) + + from lvis import LVISEval, LVISResults + + lvis_results = LVISResults(lvis_gt, lvis_results) + lvis_eval = LVISEval(lvis_gt, lvis_results, iou_type) + lvis_eval.run() + lvis_eval.print_results() + + # Pull the standard metrics from the LVIS results + results = lvis_eval.get_results() + results = {metric: float(results[metric] * 100) for metric in metrics} + logger.info("Evaluation results for {}: \n".format(iou_type) + create_small_table(results)) + return results diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/panoptic_evaluation.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/panoptic_evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..fb5e7ab87b1dd5bb3e0c5d1e405e321c48d9e6a0 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/panoptic_evaluation.py @@ -0,0 +1,167 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import contextlib +import io +import itertools +import json +import logging +import os +import tempfile +from collections import OrderedDict +from fvcore.common.file_io import PathManager +from PIL import Image +from tabulate import tabulate + +from detectron2.data import MetadataCatalog +from detectron2.utils import comm + +from .evaluator import DatasetEvaluator + +logger = logging.getLogger(__name__) + + +class COCOPanopticEvaluator(DatasetEvaluator): + """ + Evaluate Panoptic Quality metrics on COCO using PanopticAPI. + It saves panoptic segmentation prediction in `output_dir` + + It contains a synchronize call and has to be called from all workers. + """ + + def __init__(self, dataset_name, output_dir): + """ + Args: + dataset_name (str): name of the dataset + output_dir (str): output directory to save results for evaluation + """ + self._metadata = MetadataCatalog.get(dataset_name) + self._thing_contiguous_id_to_dataset_id = { + v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items() + } + self._stuff_contiguous_id_to_dataset_id = { + v: k for k, v in self._metadata.stuff_dataset_id_to_contiguous_id.items() + } + + self._predictions_json = os.path.join(output_dir, "predictions.json") + + def reset(self): + self._predictions = [] + + def _convert_category_id(self, segment_info): + isthing = segment_info.pop("isthing", None) + if isthing is None: + # the model produces panoptic category id directly. No more conversion needed + return segment_info + if isthing is True: + segment_info["category_id"] = self._thing_contiguous_id_to_dataset_id[ + segment_info["category_id"] + ] + else: + segment_info["category_id"] = self._stuff_contiguous_id_to_dataset_id[ + segment_info["category_id"] + ] + return segment_info + + def process(self, inputs, outputs): + from panopticapi.utils import id2rgb + + for input, output in zip(inputs, outputs): + panoptic_img, segments_info = output["panoptic_seg"] + panoptic_img = panoptic_img.cpu().numpy() + + file_name = os.path.basename(input["file_name"]) + file_name_png = os.path.splitext(file_name)[0] + ".png" + with io.BytesIO() as out: + Image.fromarray(id2rgb(panoptic_img)).save(out, format="PNG") + segments_info = [self._convert_category_id(x) for x in segments_info] + self._predictions.append( + { + "image_id": input["image_id"], + "file_name": file_name_png, + "png_string": out.getvalue(), + "segments_info": segments_info, + } + ) + + def evaluate(self): + comm.synchronize() + + self._predictions = comm.gather(self._predictions) + self._predictions = list(itertools.chain(*self._predictions)) + if not comm.is_main_process(): + return + + # PanopticApi requires local files + gt_json = PathManager.get_local_path(self._metadata.panoptic_json) + gt_folder = PathManager.get_local_path(self._metadata.panoptic_root) + + with tempfile.TemporaryDirectory(prefix="panoptic_eval") as pred_dir: + logger.info("Writing all panoptic predictions to {} ...".format(pred_dir)) + for p in self._predictions: + with open(os.path.join(pred_dir, p["file_name"]), "wb") as f: + f.write(p.pop("png_string")) + + with open(gt_json, "r") as f: + json_data = json.load(f) + json_data["annotations"] = self._predictions + with PathManager.open(self._predictions_json, "w") as f: + f.write(json.dumps(json_data)) + + from panopticapi.evaluation import pq_compute + + with contextlib.redirect_stdout(io.StringIO()): + pq_res = pq_compute( + gt_json, + PathManager.get_local_path(self._predictions_json), + gt_folder=gt_folder, + pred_folder=pred_dir, + ) + + res = {} + res["PQ"] = 100 * pq_res["All"]["pq"] + res["SQ"] = 100 * pq_res["All"]["sq"] + res["RQ"] = 100 * pq_res["All"]["rq"] + res["PQ_th"] = 100 * pq_res["Things"]["pq"] + res["SQ_th"] = 100 * pq_res["Things"]["sq"] + res["RQ_th"] = 100 * pq_res["Things"]["rq"] + res["PQ_st"] = 100 * pq_res["Stuff"]["pq"] + res["SQ_st"] = 100 * pq_res["Stuff"]["sq"] + res["RQ_st"] = 100 * pq_res["Stuff"]["rq"] + + results = OrderedDict({"panoptic_seg": res}) + _print_panoptic_results(pq_res) + + return results + + +def _print_panoptic_results(pq_res): + headers = ["", "PQ", "SQ", "RQ", "#categories"] + data = [] + for name in ["All", "Things", "Stuff"]: + row = [name] + [pq_res[name][k] * 100 for k in ["pq", "sq", "rq"]] + [pq_res[name]["n"]] + data.append(row) + table = tabulate( + data, headers=headers, tablefmt="pipe", floatfmt=".3f", stralign="center", numalign="center" + ) + logger.info("Panoptic Evaluation Results:\n" + table) + + +if __name__ == "__main__": + from detectron2.utils.logger import setup_logger + + logger = setup_logger() + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--gt-json") + parser.add_argument("--gt-dir") + parser.add_argument("--pred-json") + parser.add_argument("--pred-dir") + args = parser.parse_args() + + from panopticapi.evaluation import pq_compute + + with contextlib.redirect_stdout(io.StringIO()): + pq_res = pq_compute( + args.gt_json, args.pred_json, gt_folder=args.gt_dir, pred_folder=args.pred_dir + ) + _print_panoptic_results(pq_res) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/pascal_voc_evaluation.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/pascal_voc_evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..22d2e523d23c695e06e5da5cb3a210a6d1945dfb --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/pascal_voc_evaluation.py @@ -0,0 +1,294 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import logging +import numpy as np +import os +import tempfile +import xml.etree.ElementTree as ET +from collections import OrderedDict, defaultdict +from functools import lru_cache +import torch +from fvcore.common.file_io import PathManager + +from detectron2.data import MetadataCatalog +from detectron2.utils import comm + +from .evaluator import DatasetEvaluator + + +class PascalVOCDetectionEvaluator(DatasetEvaluator): + """ + Evaluate Pascal VOC AP. + It contains a synchronization, therefore has to be called from all ranks. + + Note that this is a rewrite of the official Matlab API. + The results should be similar, but not identical to the one produced by + the official API. + """ + + def __init__(self, dataset_name): + """ + Args: + dataset_name (str): name of the dataset, e.g., "voc_2007_test" + """ + self._dataset_name = dataset_name + meta = MetadataCatalog.get(dataset_name) + self._anno_file_template = os.path.join(meta.dirname, "Annotations", "{}.xml") + self._image_set_path = os.path.join(meta.dirname, "ImageSets", "Main", meta.split + ".txt") + self._class_names = meta.thing_classes + assert meta.year in [2007, 2012], meta.year + self._is_2007 = meta.year == 2007 + self._cpu_device = torch.device("cpu") + self._logger = logging.getLogger(__name__) + + def reset(self): + self._predictions = defaultdict(list) # class name -> list of prediction strings + + def process(self, inputs, outputs): + for input, output in zip(inputs, outputs): + image_id = input["image_id"] + instances = output["instances"].to(self._cpu_device) + boxes = instances.pred_boxes.tensor.numpy() + scores = instances.scores.tolist() + classes = instances.pred_classes.tolist() + for box, score, cls in zip(boxes, scores, classes): + xmin, ymin, xmax, ymax = box + # The inverse of data loading logic in `data/pascal_voc.py` + xmin += 1 + ymin += 1 + self._predictions[cls].append( + f"{image_id} {score:.3f} {xmin:.1f} {ymin:.1f} {xmax:.1f} {ymax:.1f}" + ) + + def evaluate(self): + """ + Returns: + dict: has a key "segm", whose value is a dict of "AP", "AP50", and "AP75". + """ + all_predictions = comm.gather(self._predictions, dst=0) + if not comm.is_main_process(): + return + predictions = defaultdict(list) + for predictions_per_rank in all_predictions: + for clsid, lines in predictions_per_rank.items(): + predictions[clsid].extend(lines) + del all_predictions + + self._logger.info( + "Evaluating {} using {} metric. " + "Note that results do not use the official Matlab API.".format( + self._dataset_name, 2007 if self._is_2007 else 2012 + ) + ) + + with tempfile.TemporaryDirectory(prefix="pascal_voc_eval_") as dirname: + res_file_template = os.path.join(dirname, "{}.txt") + + aps = defaultdict(list) # iou -> ap per class + for cls_id, cls_name in enumerate(self._class_names): + lines = predictions.get(cls_id, [""]) + + with open(res_file_template.format(cls_name), "w") as f: + f.write("\n".join(lines)) + + for thresh in range(50, 100, 5): + rec, prec, ap = voc_eval( + res_file_template, + self._anno_file_template, + self._image_set_path, + cls_name, + ovthresh=thresh / 100.0, + use_07_metric=self._is_2007, + ) + aps[thresh].append(ap * 100) + + ret = OrderedDict() + mAP = {iou: np.mean(x) for iou, x in aps.items()} + ret["bbox"] = {"AP": np.mean(list(mAP.values())), "AP50": mAP[50], "AP75": mAP[75]} + return ret + + +############################################################################## +# +# Below code is modified from +# https://github.com/rbgirshick/py-faster-rcnn/blob/master/lib/datasets/voc_eval.py +# -------------------------------------------------------- +# Fast/er R-CNN +# Licensed under The MIT License [see LICENSE for details] +# Written by Bharath Hariharan +# -------------------------------------------------------- + +"""Python implementation of the PASCAL VOC devkit's AP evaluation code.""" + + +@lru_cache(maxsize=None) +def parse_rec(filename): + """Parse a PASCAL VOC xml file.""" + with PathManager.open(filename) as f: + tree = ET.parse(f) + objects = [] + for obj in tree.findall("object"): + obj_struct = {} + obj_struct["name"] = obj.find("name").text + obj_struct["pose"] = obj.find("pose").text + obj_struct["truncated"] = int(obj.find("truncated").text) + obj_struct["difficult"] = int(obj.find("difficult").text) + bbox = obj.find("bndbox") + obj_struct["bbox"] = [ + int(bbox.find("xmin").text), + int(bbox.find("ymin").text), + int(bbox.find("xmax").text), + int(bbox.find("ymax").text), + ] + objects.append(obj_struct) + + return objects + + +def voc_ap(rec, prec, use_07_metric=False): + """Compute VOC AP given precision and recall. If use_07_metric is true, uses + the VOC 07 11-point method (default:False). + """ + if use_07_metric: + # 11 point metric + ap = 0.0 + for t in np.arange(0.0, 1.1, 0.1): + if np.sum(rec >= t) == 0: + p = 0 + else: + p = np.max(prec[rec >= t]) + ap = ap + p / 11.0 + else: + # correct AP calculation + # first append sentinel values at the end + mrec = np.concatenate(([0.0], rec, [1.0])) + mpre = np.concatenate(([0.0], prec, [0.0])) + + # compute the precision envelope + for i in range(mpre.size - 1, 0, -1): + mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i]) + + # to calculate area under PR curve, look for points + # where X axis (recall) changes value + i = np.where(mrec[1:] != mrec[:-1])[0] + + # and sum (\Delta recall) * prec + ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) + return ap + + +def voc_eval(detpath, annopath, imagesetfile, classname, ovthresh=0.5, use_07_metric=False): + """rec, prec, ap = voc_eval(detpath, + annopath, + imagesetfile, + classname, + [ovthresh], + [use_07_metric]) + + Top level function that does the PASCAL VOC evaluation. + + detpath: Path to detections + detpath.format(classname) should produce the detection results file. + annopath: Path to annotations + annopath.format(imagename) should be the xml annotations file. + imagesetfile: Text file containing the list of images, one image per line. + classname: Category name (duh) + [ovthresh]: Overlap threshold (default = 0.5) + [use_07_metric]: Whether to use VOC07's 11 point AP computation + (default False) + """ + # assumes detections are in detpath.format(classname) + # assumes annotations are in annopath.format(imagename) + # assumes imagesetfile is a text file with each line an image name + + # first load gt + # read list of images + with PathManager.open(imagesetfile, "r") as f: + lines = f.readlines() + imagenames = [x.strip() for x in lines] + + # load annots + recs = {} + for imagename in imagenames: + recs[imagename] = parse_rec(annopath.format(imagename)) + + # extract gt objects for this class + class_recs = {} + npos = 0 + for imagename in imagenames: + R = [obj for obj in recs[imagename] if obj["name"] == classname] + bbox = np.array([x["bbox"] for x in R]) + difficult = np.array([x["difficult"] for x in R]).astype(np.bool) + # difficult = np.array([False for x in R]).astype(np.bool) # treat all "difficult" as GT + det = [False] * len(R) + npos = npos + sum(~difficult) + class_recs[imagename] = {"bbox": bbox, "difficult": difficult, "det": det} + + # read dets + detfile = detpath.format(classname) + with open(detfile, "r") as f: + lines = f.readlines() + + splitlines = [x.strip().split(" ") for x in lines] + image_ids = [x[0] for x in splitlines] + confidence = np.array([float(x[1]) for x in splitlines]) + BB = np.array([[float(z) for z in x[2:]] for x in splitlines]).reshape(-1, 4) + + # sort by confidence + sorted_ind = np.argsort(-confidence) + BB = BB[sorted_ind, :] + image_ids = [image_ids[x] for x in sorted_ind] + + # go down dets and mark TPs and FPs + nd = len(image_ids) + tp = np.zeros(nd) + fp = np.zeros(nd) + for d in range(nd): + R = class_recs[image_ids[d]] + bb = BB[d, :].astype(float) + ovmax = -np.inf + BBGT = R["bbox"].astype(float) + + if BBGT.size > 0: + # compute overlaps + # intersection + ixmin = np.maximum(BBGT[:, 0], bb[0]) + iymin = np.maximum(BBGT[:, 1], bb[1]) + ixmax = np.minimum(BBGT[:, 2], bb[2]) + iymax = np.minimum(BBGT[:, 3], bb[3]) + iw = np.maximum(ixmax - ixmin + 1.0, 0.0) + ih = np.maximum(iymax - iymin + 1.0, 0.0) + inters = iw * ih + + # union + uni = ( + (bb[2] - bb[0] + 1.0) * (bb[3] - bb[1] + 1.0) + + (BBGT[:, 2] - BBGT[:, 0] + 1.0) * (BBGT[:, 3] - BBGT[:, 1] + 1.0) + - inters + ) + + overlaps = inters / uni + ovmax = np.max(overlaps) + jmax = np.argmax(overlaps) + + if ovmax > ovthresh: + if not R["difficult"][jmax]: + if not R["det"][jmax]: + tp[d] = 1.0 + R["det"][jmax] = 1 + else: + fp[d] = 1.0 + else: + fp[d] = 1.0 + + # compute precision recall + fp = np.cumsum(fp) + tp = np.cumsum(tp) + rec = tp / float(npos) + # avoid divide by zero in case the first detection matches a difficult + # ground truth + prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps) + ap = voc_ap(rec, prec, use_07_metric) + + return rec, prec, ap diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/rotated_coco_evaluation.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/rotated_coco_evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..30746e1aaac9a1feb0c7994d9229423e9f04bb51 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/rotated_coco_evaluation.py @@ -0,0 +1,204 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import itertools +import json +import numpy as np +import os +import torch +from fvcore.common.file_io import PathManager +from pycocotools.cocoeval import COCOeval, maskUtils + +from detectron2.structures import BoxMode, RotatedBoxes, pairwise_iou_rotated + +from .coco_evaluation import COCOEvaluator + + +class RotatedCOCOeval(COCOeval): + @staticmethod + def is_rotated(box_list): + if type(box_list) == np.ndarray: + return box_list.shape[1] == 5 + elif type(box_list) == list: + if box_list == []: # cannot decide the box_dim + return False + return np.all( + np.array( + [ + (len(obj) == 5) and ((type(obj) == list) or (type(obj) == np.ndarray)) + for obj in box_list + ] + ) + ) + return False + + @staticmethod + def boxlist_to_tensor(boxlist, output_box_dim): + if type(boxlist) == np.ndarray: + box_tensor = torch.from_numpy(boxlist) + elif type(boxlist) == list: + if boxlist == []: + return torch.zeros((0, output_box_dim), dtype=torch.float32) + else: + box_tensor = torch.FloatTensor(boxlist) + else: + raise Exception("Unrecognized boxlist type") + + input_box_dim = box_tensor.shape[1] + if input_box_dim != output_box_dim: + if input_box_dim == 4 and output_box_dim == 5: + box_tensor = BoxMode.convert(box_tensor, BoxMode.XYWH_ABS, BoxMode.XYWHA_ABS) + else: + raise Exception( + "Unable to convert from {}-dim box to {}-dim box".format( + input_box_dim, output_box_dim + ) + ) + return box_tensor + + def compute_iou_dt_gt(self, dt, gt, is_crowd): + if self.is_rotated(dt) or self.is_rotated(gt): + # TODO: take is_crowd into consideration + assert all(c == 0 for c in is_crowd) + dt = RotatedBoxes(self.boxlist_to_tensor(dt, output_box_dim=5)) + gt = RotatedBoxes(self.boxlist_to_tensor(gt, output_box_dim=5)) + return pairwise_iou_rotated(dt, gt) + else: + # This is the same as the classical COCO evaluation + return maskUtils.iou(dt, gt, is_crowd) + + def computeIoU(self, imgId, catId): + p = self.params + if p.useCats: + gt = self._gts[imgId, catId] + dt = self._dts[imgId, catId] + else: + gt = [_ for cId in p.catIds for _ in self._gts[imgId, cId]] + dt = [_ for cId in p.catIds for _ in self._dts[imgId, cId]] + if len(gt) == 0 and len(dt) == 0: + return [] + inds = np.argsort([-d["score"] for d in dt], kind="mergesort") + dt = [dt[i] for i in inds] + if len(dt) > p.maxDets[-1]: + dt = dt[0 : p.maxDets[-1]] + + assert p.iouType == "bbox", "unsupported iouType for iou computation" + + g = [g["bbox"] for g in gt] + d = [d["bbox"] for d in dt] + + # compute iou between each dt and gt region + iscrowd = [int(o["iscrowd"]) for o in gt] + + # Note: this function is copied from cocoeval.py in cocoapi + # and the major difference is here. + ious = self.compute_iou_dt_gt(d, g, iscrowd) + return ious + + +class RotatedCOCOEvaluator(COCOEvaluator): + """ + Evaluate object proposal/instance detection outputs using COCO-like metrics and APIs, + with rotated boxes support. + Note: this uses IOU only and does not consider angle differences. + """ + + def process(self, inputs, outputs): + """ + Args: + inputs: the inputs to a COCO model (e.g., GeneralizedRCNN). + It is a list of dict. Each dict corresponds to an image and + contains keys like "height", "width", "file_name", "image_id". + outputs: the outputs of a COCO model. It is a list of dicts with key + "instances" that contains :class:`Instances`. + """ + for input, output in zip(inputs, outputs): + prediction = {"image_id": input["image_id"]} + + if "instances" in output: + instances = output["instances"].to(self._cpu_device) + + prediction["instances"] = self.instances_to_json(instances, input["image_id"]) + if "proposals" in output: + prediction["proposals"] = output["proposals"].to(self._cpu_device) + self._predictions.append(prediction) + + def instances_to_json(self, instances, img_id): + num_instance = len(instances) + if num_instance == 0: + return [] + + boxes = instances.pred_boxes.tensor.numpy() + if boxes.shape[1] == 4: + boxes = BoxMode.convert(boxes, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS) + boxes = boxes.tolist() + scores = instances.scores.tolist() + classes = instances.pred_classes.tolist() + + results = [] + for k in range(num_instance): + result = { + "image_id": img_id, + "category_id": classes[k], + "bbox": boxes[k], + "score": scores[k], + } + + results.append(result) + return results + + def _eval_predictions(self, tasks, predictions): + """ + Evaluate predictions on the given tasks. + Fill self._results with the metrics of the tasks. + """ + self._logger.info("Preparing results for COCO format ...") + coco_results = list(itertools.chain(*[x["instances"] for x in predictions])) + + # unmap the category ids for COCO + if hasattr(self._metadata, "thing_dataset_id_to_contiguous_id"): + reverse_id_mapping = { + v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items() + } + for result in coco_results: + result["category_id"] = reverse_id_mapping[result["category_id"]] + + if self._output_dir: + file_path = os.path.join(self._output_dir, "coco_instances_results.json") + self._logger.info("Saving results to {}".format(file_path)) + with PathManager.open(file_path, "w") as f: + f.write(json.dumps(coco_results)) + f.flush() + + if not self._do_evaluation: + self._logger.info("Annotations are not available for evaluation.") + return + + self._logger.info("Evaluating predictions ...") + for task in sorted(tasks): + assert task == "bbox", "Task {} is not supported".format(task) + coco_eval = ( + self._evaluate_predictions_on_coco(self._coco_api, coco_results) + if len(coco_results) > 0 + else None # cocoapi does not handle empty results very well + ) + + res = self._derive_coco_results( + coco_eval, task, class_names=self._metadata.get("thing_classes") + ) + self._results[task] = res + + def _evaluate_predictions_on_coco(self, coco_gt, coco_results): + """ + Evaluate the coco results using COCOEval API. + """ + assert len(coco_results) > 0 + + coco_dt = coco_gt.loadRes(coco_results) + + # Only bbox is supported for now + coco_eval = RotatedCOCOeval(coco_gt, coco_dt, iouType="bbox") + + coco_eval.evaluate() + coco_eval.accumulate() + coco_eval.summarize() + + return coco_eval diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/sem_seg_evaluation.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/sem_seg_evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..fb3b28d79284a5eeb335fc8ee8d859b4e46510ef --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/sem_seg_evaluation.py @@ -0,0 +1,168 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import itertools +import json +import logging +import numpy as np +import os +from collections import OrderedDict +import PIL.Image as Image +import pycocotools.mask as mask_util +import torch +from fvcore.common.file_io import PathManager + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.utils.comm import all_gather, is_main_process, synchronize + +from .evaluator import DatasetEvaluator + + +class SemSegEvaluator(DatasetEvaluator): + """ + Evaluate semantic segmentation + """ + + def __init__(self, dataset_name, distributed, num_classes, ignore_label=255, output_dir=None): + """ + Args: + dataset_name (str): name of the dataset to be evaluated. + distributed (True): if True, will collect results from all ranks for evaluation. + Otherwise, will evaluate the results in the current process. + num_classes (int): number of classes + ignore_label (int): value in semantic segmentation ground truth. Predictions for the + corresponding pixels should be ignored. + output_dir (str): an output directory to dump results. + """ + self._dataset_name = dataset_name + self._distributed = distributed + self._output_dir = output_dir + self._num_classes = num_classes + self._ignore_label = ignore_label + self._N = num_classes + 1 + + self._cpu_device = torch.device("cpu") + self._logger = logging.getLogger(__name__) + + self.input_file_to_gt_file = { + dataset_record["file_name"]: dataset_record["sem_seg_file_name"] + for dataset_record in DatasetCatalog.get(dataset_name) + } + + meta = MetadataCatalog.get(dataset_name) + # Dict that maps contiguous training ids to COCO category ids + try: + c2d = meta.stuff_dataset_id_to_contiguous_id + self._contiguous_id_to_dataset_id = {v: k for k, v in c2d.items()} + except AttributeError: + self._contiguous_id_to_dataset_id = None + self._class_names = meta.stuff_classes + + def reset(self): + self._conf_matrix = np.zeros((self._N, self._N), dtype=np.int64) + self._predictions = [] + + def process(self, inputs, outputs): + """ + Args: + inputs: the inputs to a model. + It is a list of dicts. Each dict corresponds to an image and + contains keys like "height", "width", "file_name". + outputs: the outputs of a model. It is either list of semantic segmentation predictions + (Tensor [H, W]) or list of dicts with key "sem_seg" that contains semantic + segmentation prediction in the same format. + """ + for input, output in zip(inputs, outputs): + output = output["sem_seg"].argmax(dim=0).to(self._cpu_device) + pred = np.array(output, dtype=np.int) + with PathManager.open(self.input_file_to_gt_file[input["file_name"]], "rb") as f: + gt = np.array(Image.open(f), dtype=np.int) + + gt[gt == self._ignore_label] = self._num_classes + + self._conf_matrix += np.bincount( + self._N * pred.reshape(-1) + gt.reshape(-1), minlength=self._N ** 2 + ).reshape(self._N, self._N) + + self._predictions.extend(self.encode_json_sem_seg(pred, input["file_name"])) + + def evaluate(self): + """ + Evaluates standard semantic segmentation metrics (http://cocodataset.org/#stuff-eval): + + * Mean intersection-over-union averaged across classes (mIoU) + * Frequency Weighted IoU (fwIoU) + * Mean pixel accuracy averaged across classes (mACC) + * Pixel Accuracy (pACC) + """ + if self._distributed: + synchronize() + conf_matrix_list = all_gather(self._conf_matrix) + self._predictions = all_gather(self._predictions) + self._predictions = list(itertools.chain(*self._predictions)) + if not is_main_process(): + return + + self._conf_matrix = np.zeros_like(self._conf_matrix) + for conf_matrix in conf_matrix_list: + self._conf_matrix += conf_matrix + + if self._output_dir: + PathManager.mkdirs(self._output_dir) + file_path = os.path.join(self._output_dir, "sem_seg_predictions.json") + with PathManager.open(file_path, "w") as f: + f.write(json.dumps(self._predictions)) + + acc = np.full(self._num_classes, np.nan, dtype=np.float) + iou = np.full(self._num_classes, np.nan, dtype=np.float) + tp = self._conf_matrix.diagonal()[:-1].astype(np.float) + pos_gt = np.sum(self._conf_matrix[:-1, :-1], axis=0).astype(np.float) + class_weights = pos_gt / np.sum(pos_gt) + pos_pred = np.sum(self._conf_matrix[:-1, :-1], axis=1).astype(np.float) + acc_valid = pos_gt > 0 + acc[acc_valid] = tp[acc_valid] / pos_gt[acc_valid] + iou_valid = (pos_gt + pos_pred) > 0 + union = pos_gt + pos_pred - tp + iou[acc_valid] = tp[acc_valid] / union[acc_valid] + macc = np.sum(acc[acc_valid]) / np.sum(acc_valid) + miou = np.sum(iou[acc_valid]) / np.sum(iou_valid) + fiou = np.sum(iou[acc_valid] * class_weights[acc_valid]) + pacc = np.sum(tp) / np.sum(pos_gt) + + res = {} + res["mIoU"] = 100 * miou + res["fwIoU"] = 100 * fiou + for i, name in enumerate(self._class_names): + res["IoU-{}".format(name)] = 100 * iou[i] + res["mACC"] = 100 * macc + res["pACC"] = 100 * pacc + for i, name in enumerate(self._class_names): + res["ACC-{}".format(name)] = 100 * acc[i] + + if self._output_dir: + file_path = os.path.join(self._output_dir, "sem_seg_evaluation.pth") + with PathManager.open(file_path, "wb") as f: + torch.save(res, f) + results = OrderedDict({"sem_seg": res}) + self._logger.info(results) + return results + + def encode_json_sem_seg(self, sem_seg, input_file_name): + """ + Convert semantic segmentation to COCO stuff format with segments encoded as RLEs. + See http://cocodataset.org/#format-results + """ + json_list = [] + for label in np.unique(sem_seg): + if self._contiguous_id_to_dataset_id is not None: + assert ( + label in self._contiguous_id_to_dataset_id + ), "Label {} is not in the metadata info for {}".format(label, self._dataset_name) + dataset_id = self._contiguous_id_to_dataset_id[label] + else: + dataset_id = int(label) + mask = (sem_seg == label).astype(np.uint8) + mask_rle = mask_util.encode(np.array(mask[:, :, None], order="F"))[0] + mask_rle["counts"] = mask_rle["counts"].decode("utf-8") + json_list.append( + {"file_name": input_file_name, "category_id": dataset_id, "segmentation": mask_rle} + ) + return json_list diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/testing.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/testing.py new file mode 100644 index 0000000000000000000000000000000000000000..95addebc185111c572cb19aa98f7e055b21fc74e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/evaluation/testing.py @@ -0,0 +1,78 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import numpy as np +import pprint +import sys +from collections import OrderedDict +from collections.abc import Mapping + + +def print_csv_format(results): + """ + Print main metrics in a format similar to Detectron, + so that they are easy to copypaste into a spreadsheet. + + Args: + results (OrderedDict[dict]): task_name -> {metric -> score} + """ + assert isinstance(results, OrderedDict), results # unordered results cannot be properly printed + logger = logging.getLogger(__name__) + for task, res in results.items(): + # Don't print "AP-category" metrics since they are usually not tracked. + important_res = [(k, v) for k, v in res.items() if "-" not in k] + logger.info("copypaste: Task: {}".format(task)) + logger.info("copypaste: " + ",".join([k[0] for k in important_res])) + logger.info("copypaste: " + ",".join(["{0:.4f}".format(k[1]) for k in important_res])) + + +def verify_results(cfg, results): + """ + Args: + results (OrderedDict[dict]): task_name -> {metric -> score} + + Returns: + bool: whether the verification succeeds or not + """ + expected_results = cfg.TEST.EXPECTED_RESULTS + if not len(expected_results): + return True + + ok = True + for task, metric, expected, tolerance in expected_results: + actual = results[task][metric] + if not np.isfinite(actual): + ok = False + diff = abs(actual - expected) + if diff > tolerance: + ok = False + + logger = logging.getLogger(__name__) + if not ok: + logger.error("Result verification failed!") + logger.error("Expected Results: " + str(expected_results)) + logger.error("Actual Results: " + pprint.pformat(results)) + + sys.exit(1) + else: + logger.info("Results verification passed.") + return ok + + +def flatten_results_dict(results): + """ + Expand a hierarchical dict of scalars into a flat dict of scalars. + If results[k1][k2][k3] = v, the returned dict will have the entry + {"k1/k2/k3": v}. + + Args: + results (dict): + """ + r = {} + for k, v in results.items(): + if isinstance(v, Mapping): + v = flatten_results_dict(v) + for kk, vv in v.items(): + r[k + "/" + kk] = vv + else: + r[k] = v + return r diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9bd8b57c1a5f15e391eb63b690f1051b1ad79d21 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/README.md @@ -0,0 +1,10 @@ + +This directory contains code to prepare a detectron2 model for deployment. +Currently it supports exporting a detectron2 model to Caffe2 format through ONNX. + +Please see [documentation](https://detectron2.readthedocs.io/tutorials/deployment.html) for its usage. + + +### Acknowledgements + +Thanks to Mobile Vision team at Facebook for developing the conversion tools. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1e2bf4d0670ed0ccd73dbdb7ce27a8e617bbf6aa --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- + +from .api import * + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/api.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/api.py new file mode 100644 index 0000000000000000000000000000000000000000..a7600714e1edb019def04f9d0d1a063668943101 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/api.py @@ -0,0 +1,277 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +import copy +import logging +import os +import torch +from caffe2.proto import caffe2_pb2 +from torch import nn + +from detectron2.config import CfgNode as CN + +from .caffe2_export import export_caffe2_detection_model +from .caffe2_export import export_onnx_model as export_onnx_model_impl +from .caffe2_export import run_and_save_graph +from .caffe2_inference import ProtobufDetectionModel +from .caffe2_modeling import META_ARCH_CAFFE2_EXPORT_TYPE_MAP, convert_batched_inputs_to_c2_format +from .shared import get_pb_arg_vali, get_pb_arg_vals, save_graph + +__all__ = [ + "add_export_config", + "export_caffe2_model", + "Caffe2Model", + "export_onnx_model", + "Caffe2Tracer", +] + + +def add_export_config(cfg): + """ + Args: + cfg (CfgNode): a detectron2 config + + Returns: + CfgNode: an updated config with new options that will be used + by :class:`Caffe2Tracer`. + """ + is_frozen = cfg.is_frozen() + cfg.defrost() + cfg.EXPORT_CAFFE2 = CN() + cfg.EXPORT_CAFFE2.USE_HEATMAP_MAX_KEYPOINT = False + if is_frozen: + cfg.freeze() + return cfg + + +class Caffe2Tracer: + """ + Make a detectron2 model traceable with caffe2 style. + + An original detectron2 model may not be traceable, or + cannot be deployed directly after being traced, due to some reasons: + 1. control flow in some ops + 2. custom ops + 3. complicated pre/post processing + + This class provides a traceable version of a detectron2 model by: + 1. Rewrite parts of the model using ops in caffe2. Note that some ops do + not have GPU implementation. + 2. Define the inputs "after pre-processing" as inputs to the model + 3. Remove post-processing and produce raw layer outputs + + More specifically about inputs: all builtin models take two input tensors. + (1) NCHW float "data" which is an image (usually in [0, 255]) + (2) Nx3 float "im_info", each row of which is (height, width, 1.0) + + After making a traceable model, the class provide methods to export such a + model to different deployment formats. + + The class currently only supports models using builtin meta architectures. + """ + + def __init__(self, cfg, model, inputs): + """ + Args: + cfg (CfgNode): a detectron2 config, with extra export-related options + added by :func:`add_export_config`. + model (nn.Module): a model built by + :func:`detectron2.modeling.build_model`. + inputs: sample inputs that the given model takes for inference. + Will be used to trace the model. + """ + assert isinstance(cfg, CN), cfg + assert isinstance(model, torch.nn.Module), type(model) + if "EXPORT_CAFFE2" not in cfg: + cfg = add_export_config(cfg) # will just the defaults + + self.cfg = cfg + self.model = model + self.inputs = inputs + + def _get_traceable(self): + # TODO how to make it extensible to support custom models + C2MetaArch = META_ARCH_CAFFE2_EXPORT_TYPE_MAP[self.cfg.MODEL.META_ARCHITECTURE] + traceable_model = C2MetaArch(self.cfg, copy.deepcopy(self.model)) + traceable_inputs = traceable_model.get_caffe2_inputs(self.inputs) + return traceable_model, traceable_inputs + + def export_caffe2(self): + """ + Export the model to Caffe2's protobuf format. + The returned object can be saved with `.save_protobuf()` method. + The result can be loaded and executed using Caffe2 runtime. + + Returns: + Caffe2Model + """ + model, inputs = self._get_traceable() + predict_net, init_net = export_caffe2_detection_model(model, inputs) + return Caffe2Model(predict_net, init_net) + + def export_onnx(self): + """ + Export the model to ONNX format. + Note that the exported model contains custom ops only available in caffe2, therefore it + cannot be directly executed by other runtime. Post-processing or transformation passes + may be applied on the model to accommodate different runtimes. + + Returns: + onnx.ModelProto: an onnx model. + """ + model, inputs = self._get_traceable() + return export_onnx_model_impl(model, (inputs,)) + + def export_torchscript(self): + """ + Export the model to a `torch.jit.TracedModule` by tracing. + The returned object can be saved to a file by ".save()". + + Returns: + torch.jit.TracedModule: a torch TracedModule + """ + model, inputs = self._get_traceable() + logger = logging.getLogger(__name__) + logger.info("Tracing the model with torch.jit.trace ...") + with torch.no_grad(): + return torch.jit.trace(model, (inputs,), optimize=True) + + +def export_caffe2_model(cfg, model, inputs): + """ + Export a detectron2 model to caffe2 format. + + Args: + cfg (CfgNode): a detectron2 config, with extra export-related options + added by :func:`add_export_config`. + model (nn.Module): a model built by + :func:`detectron2.modeling.build_model`. + It will be modified by this function. + inputs: sample inputs that the given model takes for inference. + Will be used to trace the model. + + Returns: + Caffe2Model + """ + return Caffe2Tracer(cfg, model, inputs).export_caffe2() + + +def export_onnx_model(cfg, model, inputs): + """ + Export a detectron2 model to ONNX format. + Note that the exported model contains custom ops only available in caffe2, therefore it + cannot be directly executed by other runtime. Post-processing or transformation passes + may be applied on the model to accommodate different runtimes. + Args: + cfg (CfgNode): a detectron2 config, with extra export-related options + added by :func:`add_export_config`. + model (nn.Module): a model built by + :func:`detectron2.modeling.build_model`. + It will be modified by this function. + inputs: sample inputs that the given model takes for inference. + Will be used to trace the model. + Returns: + onnx.ModelProto: an onnx model. + """ + return Caffe2Tracer(cfg, model, inputs).export_onnx() + + +class Caffe2Model(nn.Module): + """ + A wrapper around the traced model in caffe2's pb format. + """ + + def __init__(self, predict_net, init_net): + super().__init__() + self.eval() # always in eval mode + self._predict_net = predict_net + self._init_net = init_net + self._predictor = None + + @property + def predict_net(self): + """ + Returns: + core.Net: the underlying caffe2 predict net + """ + return self._predict_net + + @property + def init_net(self): + """ + Returns: + core.Net: the underlying caffe2 init net + """ + return self._init_net + + __init__.__HIDE_SPHINX_DOC__ = True + + def save_protobuf(self, output_dir): + """ + Save the model as caffe2's protobuf format. + + Args: + output_dir (str): the output directory to save protobuf files. + """ + logger = logging.getLogger(__name__) + logger.info("Saving model to {} ...".format(output_dir)) + os.makedirs(output_dir, exist_ok=True) + + with open(os.path.join(output_dir, "model.pb"), "wb") as f: + f.write(self._predict_net.SerializeToString()) + with open(os.path.join(output_dir, "model.pbtxt"), "w") as f: + f.write(str(self._predict_net)) + with open(os.path.join(output_dir, "model_init.pb"), "wb") as f: + f.write(self._init_net.SerializeToString()) + + def save_graph(self, output_file, inputs=None): + """ + Save the graph as SVG format. + + Args: + output_file (str): a SVG file + inputs: optional inputs given to the model. + If given, the inputs will be used to run the graph to record + shape of every tensor. The shape information will be + saved together with the graph. + """ + if inputs is None: + save_graph(self._predict_net, output_file, op_only=False) + else: + size_divisibility = get_pb_arg_vali(self._predict_net, "size_divisibility", 0) + device = get_pb_arg_vals(self._predict_net, "device", b"cpu").decode("ascii") + inputs = convert_batched_inputs_to_c2_format(inputs, size_divisibility, device) + inputs = [x.cpu().numpy() for x in inputs] + run_and_save_graph(self._predict_net, self._init_net, inputs, output_file) + + @staticmethod + def load_protobuf(dir): + """ + Args: + dir (str): a directory used to save Caffe2Model with + :meth:`save_protobuf`. + The files "model.pb" and "model_init.pb" are needed. + + Returns: + Caffe2Model: the caffe2 model loaded from this directory. + """ + predict_net = caffe2_pb2.NetDef() + with open(os.path.join(dir, "model.pb"), "rb") as f: + predict_net.ParseFromString(f.read()) + + init_net = caffe2_pb2.NetDef() + with open(os.path.join(dir, "model_init.pb"), "rb") as f: + init_net.ParseFromString(f.read()) + + return Caffe2Model(predict_net, init_net) + + def __call__(self, inputs): + """ + An interface that wraps around a caffe2 model and mimics detectron2's models' + input & output format. This is used to compare the outputs of caffe2 model + with its original torch model. + + Due to the extra conversion between torch/caffe2, + this method is not meant for benchmark. + """ + if self._predictor is None: + self._predictor = ProtobufDetectionModel(self._predict_net, self._init_net) + return self._predictor(inputs) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/c10.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/c10.py new file mode 100644 index 0000000000000000000000000000000000000000..6e3cbe3ce94d0c56596c645b8c85592ed5d31fe1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/c10.py @@ -0,0 +1,503 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import math +import torch +import torch.nn.functional as F + +from detectron2.layers import cat +from detectron2.layers.roi_align_rotated import ROIAlignRotated +from detectron2.modeling import poolers +from detectron2.modeling.proposal_generator import rpn +from detectron2.modeling.roi_heads.mask_head import mask_rcnn_inference +from detectron2.structures import Boxes, ImageList, Instances, Keypoints + +from .shared import alias, to_device + + +""" +This file contains caffe2-compatible implementation of several detectrno2 components. +""" + + +class Caffe2Boxes(Boxes): + """ + Representing a list of detectron2.structures.Boxes from minibatch, each box + is represented by a 5d vector (batch index + 4 coordinates), or a 6d vector + (batch index + 5 coordinates) for RotatedBoxes. + """ + + def __init__(self, tensor): + assert isinstance(tensor, torch.Tensor) + assert tensor.dim() == 2 and tensor.size(-1) in [4, 5, 6], tensor.size() + # TODO: make tensor immutable when dim is Nx5 for Boxes, + # and Nx6 for RotatedBoxes? + self.tensor = tensor + + +# TODO clean up this class, maybe just extend Instances +class InstancesList(object): + """ + Tensor representation of a list of Instances object for a batch of images. + + When dealing with a batch of images with Caffe2 ops, a list of bboxes + (instances) are usually represented by single Tensor with size + (sigma(Ni), 5) or (sigma(Ni), 4) plus a batch split Tensor. This class is + for providing common functions to convert between these two representations. + """ + + def __init__(self, im_info, indices, extra_fields=None): + # [N, 3] -> (H, W, Scale) + self.im_info = im_info + # [N,] -> indice of batch to which the instance belongs + self.indices = indices + # [N, ...] + self.batch_extra_fields = extra_fields or {} + + self.image_size = self.im_info + + def get_fields(self): + """ like `get_fields` in the Instances object, + but return each field in tensor representations """ + ret = {} + for k, v in self.batch_extra_fields.items(): + # if isinstance(v, torch.Tensor): + # tensor_rep = v + # elif isinstance(v, (Boxes, Keypoints)): + # tensor_rep = v.tensor + # else: + # raise ValueError("Can't find tensor representation for: {}".format()) + ret[k] = v + return ret + + def has(self, name): + return name in self.batch_extra_fields + + def set(self, name, value): + data_len = len(value) + if len(self.batch_extra_fields): + assert ( + len(self) == data_len + ), "Adding a field of length {} to a Instances of length {}".format(data_len, len(self)) + self.batch_extra_fields[name] = value + + def __setattr__(self, name, val): + if name in ["im_info", "indices", "batch_extra_fields", "image_size"]: + super().__setattr__(name, val) + else: + self.set(name, val) + + def __getattr__(self, name): + if name not in self.batch_extra_fields: + raise AttributeError("Cannot find field '{}' in the given Instances!".format(name)) + return self.batch_extra_fields[name] + + def __len__(self): + return len(self.indices) + + def flatten(self): + ret = [] + for _, v in self.batch_extra_fields.items(): + if isinstance(v, (Boxes, Keypoints)): + ret.append(v.tensor) + else: + ret.append(v) + return ret + + @staticmethod + def to_d2_instances_list(instances_list): + """ + Convert InstancesList to List[Instances]. The input `instances_list` can + also be a List[Instances], in this case this method is a non-op. + """ + if not isinstance(instances_list, InstancesList): + assert all(isinstance(x, Instances) for x in instances_list) + return instances_list + + ret = [] + for i, info in enumerate(instances_list.im_info): + instances = Instances(torch.Size([int(info[0].item()), int(info[1].item())])) + + ids = instances_list.indices == i + for k, v in instances_list.batch_extra_fields.items(): + if isinstance(v, torch.Tensor): + instances.set(k, v[ids]) + continue + elif isinstance(v, Boxes): + instances.set(k, v[ids, -4:]) + continue + + target_type, tensor_source = v + assert isinstance(tensor_source, torch.Tensor) + assert tensor_source.shape[0] == instances_list.indices.shape[0] + tensor_source = tensor_source[ids] + + if issubclass(target_type, Boxes): + instances.set(k, Boxes(tensor_source[:, -4:])) + elif issubclass(target_type, Keypoints): + instances.set(k, Keypoints(tensor_source)) + elif issubclass(target_type, torch.Tensor): + instances.set(k, tensor_source) + else: + raise ValueError("Can't handle targe type: {}".format(target_type)) + + ret.append(instances) + return ret + + +class Caffe2Compatible(object): + def _get_tensor_mode(self): + return self._tensor_mode + + def _set_tensor_mode(self, v): + self._tensor_mode = v + + tensor_mode = property(_get_tensor_mode, _set_tensor_mode) + """ + If true, the model expects C2-style tensor only inputs/outputs format. + """ + + +class Caffe2RPN(Caffe2Compatible, rpn.RPN): + def forward(self, images, features, gt_instances=None): + assert not self.training + + features = [features[f] for f in self.in_features] + objectness_logits_pred, anchor_deltas_pred = self.rpn_head(features) + + assert isinstance(images, ImageList) + if self.tensor_mode: + im_info = images.image_sizes + else: + im_info = torch.Tensor( + [[im_sz[0], im_sz[1], torch.Tensor([1.0])] for im_sz in images.image_sizes] + ).to(images.tensor.device) + assert isinstance(im_info, torch.Tensor) + + rpn_rois_list = [] + rpn_roi_probs_list = [] + for scores, bbox_deltas, cell_anchors_tensor, feat_stride in zip( + objectness_logits_pred, + anchor_deltas_pred, + iter(self.anchor_generator.cell_anchors), + self.anchor_generator.strides, + ): + scores = scores.detach() + bbox_deltas = bbox_deltas.detach() + + rpn_rois, rpn_roi_probs = torch.ops._caffe2.GenerateProposals( + scores, + bbox_deltas, + im_info, + cell_anchors_tensor, + spatial_scale=1.0 / feat_stride, + pre_nms_topN=self.pre_nms_topk[self.training], + post_nms_topN=self.post_nms_topk[self.training], + nms_thresh=self.nms_thresh, + min_size=self.min_box_side_len, + # correct_transform_coords=True, # deprecated argument + angle_bound_on=True, # Default + angle_bound_lo=-180, + angle_bound_hi=180, + clip_angle_thresh=1.0, # Default + legacy_plus_one=False, + ) + rpn_rois_list.append(rpn_rois) + rpn_roi_probs_list.append(rpn_roi_probs) + + # For FPN in D2, in RPN all proposals from different levels are concated + # together, ranked and picked by top post_nms_topk. Then in ROIPooler + # it calculates level_assignments and calls the RoIAlign from + # the corresponding level. + + if len(objectness_logits_pred) == 1: + rpn_rois = rpn_rois_list[0] + rpn_roi_probs = rpn_roi_probs_list[0] + else: + assert len(rpn_rois_list) == len(rpn_roi_probs_list) + rpn_post_nms_topN = self.post_nms_topk[self.training] + + device = rpn_rois_list[0].device + input_list = [to_device(x, "cpu") for x in (rpn_rois_list + rpn_roi_probs_list)] + + # TODO remove this after confirming rpn_max_level/rpn_min_level + # is not needed in CollectRpnProposals. + feature_strides = list(self.anchor_generator.strides) + rpn_min_level = int(math.log2(feature_strides[0])) + rpn_max_level = int(math.log2(feature_strides[-1])) + assert (rpn_max_level - rpn_min_level + 1) == len( + rpn_rois_list + ), "CollectRpnProposals requires continuous levels" + + rpn_rois = torch.ops._caffe2.CollectRpnProposals( + input_list, + # NOTE: in current implementation, rpn_max_level and rpn_min_level + # are not needed, only the subtraction of two matters and it + # can be infer from the number of inputs. Keep them now for + # consistency. + rpn_max_level=2 + len(rpn_rois_list) - 1, + rpn_min_level=2, + rpn_post_nms_topN=rpn_post_nms_topN, + ) + rpn_rois = to_device(rpn_rois, device) + rpn_roi_probs = [] + + proposals = self.c2_postprocess(im_info, rpn_rois, rpn_roi_probs, self.tensor_mode) + return proposals, {} + + @staticmethod + def c2_postprocess(im_info, rpn_rois, rpn_roi_probs, tensor_mode): + proposals = InstancesList( + im_info=im_info, + indices=rpn_rois[:, 0], + extra_fields={ + "proposal_boxes": Caffe2Boxes(rpn_rois), + "objectness_logits": (torch.Tensor, rpn_roi_probs), + }, + ) + if not tensor_mode: + proposals = InstancesList.to_d2_instances_list(proposals) + else: + proposals = [proposals] + return proposals + + +class Caffe2ROIPooler(Caffe2Compatible, poolers.ROIPooler): + @staticmethod + def c2_preprocess(box_lists): + assert all(isinstance(x, Boxes) for x in box_lists) + if all(isinstance(x, Caffe2Boxes) for x in box_lists): + # input is pure-tensor based + assert len(box_lists) == 1 + pooler_fmt_boxes = box_lists[0].tensor + else: + pooler_fmt_boxes = poolers.convert_boxes_to_pooler_format(box_lists) + return pooler_fmt_boxes + + def forward(self, x, box_lists): + assert not self.training + + pooler_fmt_boxes = self.c2_preprocess(box_lists) + num_level_assignments = len(self.level_poolers) + + if num_level_assignments == 1: + if isinstance(self.level_poolers[0], ROIAlignRotated): + c2_roi_align = torch.ops._caffe2.RoIAlignRotated + aligned = True + else: + c2_roi_align = torch.ops._caffe2.RoIAlign + aligned = self.level_poolers[0].aligned + + out = c2_roi_align( + x[0], + pooler_fmt_boxes, + order="NCHW", + spatial_scale=float(self.level_poolers[0].spatial_scale), + pooled_h=int(self.output_size[0]), + pooled_w=int(self.output_size[1]), + sampling_ratio=int(self.level_poolers[0].sampling_ratio), + aligned=aligned, + ) + return out + + device = pooler_fmt_boxes.device + assert ( + self.max_level - self.min_level + 1 == 4 + ), "Currently DistributeFpnProposals only support 4 levels" + fpn_outputs = torch.ops._caffe2.DistributeFpnProposals( + to_device(pooler_fmt_boxes, "cpu"), + roi_canonical_scale=self.canonical_box_size, + roi_canonical_level=self.canonical_level, + roi_max_level=self.max_level, + roi_min_level=self.min_level, + legacy_plus_one=False, + ) + fpn_outputs = [to_device(x, device) for x in fpn_outputs] + + rois_fpn_list = fpn_outputs[:-1] + rois_idx_restore_int32 = fpn_outputs[-1] + + roi_feat_fpn_list = [] + for roi_fpn, x_level, pooler in zip(rois_fpn_list, x, self.level_poolers): + if isinstance(pooler, ROIAlignRotated): + c2_roi_align = torch.ops._caffe2.RoIAlignRotated + aligned = True + else: + c2_roi_align = torch.ops._caffe2.RoIAlign + aligned = bool(pooler.aligned) + + roi_feat_fpn = c2_roi_align( + x_level, + roi_fpn, + order="NCHW", + spatial_scale=float(pooler.spatial_scale), + pooled_h=int(self.output_size[0]), + pooled_w=int(self.output_size[1]), + sampling_ratio=int(pooler.sampling_ratio), + aligned=aligned, + ) + roi_feat_fpn_list.append(roi_feat_fpn) + + roi_feat_shuffled = cat(roi_feat_fpn_list, dim=0) + roi_feat = torch.ops._caffe2.BatchPermutation(roi_feat_shuffled, rois_idx_restore_int32) + return roi_feat + + +class Caffe2FastRCNNOutputsInference: + def __init__(self, tensor_mode): + self.tensor_mode = tensor_mode # whether the output is caffe2 tensor mode + + def __call__(self, box_predictor, predictions, proposals): + """ equivalent to FastRCNNOutputLayers.inference """ + score_thresh = box_predictor.test_score_thresh + nms_thresh = box_predictor.test_nms_thresh + topk_per_image = box_predictor.test_topk_per_image + is_rotated = len(box_predictor.box2box_transform.weights) == 5 + + if is_rotated: + box_dim = 5 + assert box_predictor.box2box_transform.weights[4] == 1, ( + "The weights for Rotated BBoxTransform in C2 have only 4 dimensions," + + " thus enforcing the angle weight to be 1 for now" + ) + box2box_transform_weights = box_predictor.box2box_transform.weights[:4] + else: + box_dim = 4 + box2box_transform_weights = box_predictor.box2box_transform.weights + + class_logits, box_regression = predictions + class_prob = F.softmax(class_logits, -1) + + assert box_regression.shape[1] % box_dim == 0 + cls_agnostic_bbox_reg = box_regression.shape[1] // box_dim == 1 + + input_tensor_mode = proposals[0].proposal_boxes.tensor.shape[1] == box_dim + 1 + + rois = type(proposals[0].proposal_boxes).cat([p.proposal_boxes for p in proposals]) + device, dtype = rois.tensor.device, rois.tensor.dtype + if input_tensor_mode: + im_info = proposals[0].image_size + rois = rois.tensor + else: + im_info = torch.Tensor( + [[sz[0], sz[1], 1.0] for sz in [x.image_size for x in proposals]] + ) + batch_ids = cat( + [ + torch.full((b, 1), i, dtype=dtype, device=device) + for i, b in enumerate(len(p) for p in proposals) + ], + dim=0, + ) + rois = torch.cat([batch_ids, rois.tensor], dim=1) + + roi_pred_bbox, roi_batch_splits = torch.ops._caffe2.BBoxTransform( + to_device(rois, "cpu"), + to_device(box_regression, "cpu"), + to_device(im_info, "cpu"), + weights=box2box_transform_weights, + apply_scale=True, + rotated=is_rotated, + angle_bound_on=True, + angle_bound_lo=-180, + angle_bound_hi=180, + clip_angle_thresh=1.0, + legacy_plus_one=False, + ) + roi_pred_bbox = to_device(roi_pred_bbox, device) + roi_batch_splits = to_device(roi_batch_splits, device) + + nms_outputs = torch.ops._caffe2.BoxWithNMSLimit( + to_device(class_prob, "cpu"), + to_device(roi_pred_bbox, "cpu"), + to_device(roi_batch_splits, "cpu"), + score_thresh=float(score_thresh), + nms=float(nms_thresh), + detections_per_im=int(topk_per_image), + soft_nms_enabled=False, + soft_nms_method="linear", + soft_nms_sigma=0.5, + soft_nms_min_score_thres=0.001, + rotated=is_rotated, + cls_agnostic_bbox_reg=cls_agnostic_bbox_reg, + input_boxes_include_bg_cls=False, + output_classes_include_bg_cls=False, + legacy_plus_one=False, + ) + roi_score_nms = to_device(nms_outputs[0], device) + roi_bbox_nms = to_device(nms_outputs[1], device) + roi_class_nms = to_device(nms_outputs[2], device) + roi_batch_splits_nms = to_device(nms_outputs[3], device) + roi_keeps_nms = to_device(nms_outputs[4], device) + roi_keeps_size_nms = to_device(nms_outputs[5], device) + if not self.tensor_mode: + roi_class_nms = roi_class_nms.to(torch.int64) + + roi_batch_ids = cat( + [ + torch.full((b, 1), i, dtype=dtype, device=device) + for i, b in enumerate(int(x.item()) for x in roi_batch_splits_nms) + ], + dim=0, + ) + + roi_class_nms = alias(roi_class_nms, "class_nms") + roi_score_nms = alias(roi_score_nms, "score_nms") + roi_bbox_nms = alias(roi_bbox_nms, "bbox_nms") + roi_batch_splits_nms = alias(roi_batch_splits_nms, "batch_splits_nms") + roi_keeps_nms = alias(roi_keeps_nms, "keeps_nms") + roi_keeps_size_nms = alias(roi_keeps_size_nms, "keeps_size_nms") + + results = InstancesList( + im_info=im_info, + indices=roi_batch_ids[:, 0], + extra_fields={ + "pred_boxes": Caffe2Boxes(roi_bbox_nms), + "scores": roi_score_nms, + "pred_classes": roi_class_nms, + }, + ) + + if not self.tensor_mode: + results = InstancesList.to_d2_instances_list(results) + batch_splits = roi_batch_splits_nms.int().tolist() + kept_indices = list(roi_keeps_nms.to(torch.int64).split(batch_splits)) + else: + results = [results] + kept_indices = [roi_keeps_nms] + + return results, kept_indices + + +class Caffe2MaskRCNNInference: + def __call__(self, pred_mask_logits, pred_instances): + """ equivalent to mask_head.mask_rcnn_inference """ + if all(isinstance(x, InstancesList) for x in pred_instances): + assert len(pred_instances) == 1 + mask_probs_pred = pred_mask_logits.sigmoid() + mask_probs_pred = alias(mask_probs_pred, "mask_fcn_probs") + pred_instances[0].pred_masks = mask_probs_pred + else: + mask_rcnn_inference(pred_mask_logits, pred_instances) + + +class Caffe2KeypointRCNNInference: + def __init__(self, use_heatmap_max_keypoint): + self.use_heatmap_max_keypoint = use_heatmap_max_keypoint + + def __call__(self, pred_keypoint_logits, pred_instances): + # just return the keypoint heatmap for now, + # there will be option to call HeatmapMaxKeypointOp + output = alias(pred_keypoint_logits, "kps_score") + if all(isinstance(x, InstancesList) for x in pred_instances): + assert len(pred_instances) == 1 + if self.use_heatmap_max_keypoint: + device = output.device + output = torch.ops._caffe2.HeatmapMaxKeypoint( + to_device(output, "cpu"), + pred_instances[0].pred_boxes.tensor, + should_output_softmax=True, # worth make it configerable? + ) + output = to_device(output, device) + output = alias(output, "keypoints_out") + pred_instances[0].pred_keypoints = output + return pred_keypoint_logits diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/caffe2_export.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/caffe2_export.py new file mode 100644 index 0000000000000000000000000000000000000000..ccac809d7bf49ab144b5f0a34f57e00c3534ad60 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/caffe2_export.py @@ -0,0 +1,204 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import copy +import io +import logging +import numpy as np +from typing import List +import onnx +import torch +from caffe2.proto import caffe2_pb2 +from caffe2.python import core +from caffe2.python.onnx.backend import Caffe2Backend +from tabulate import tabulate +from termcolor import colored +from torch.onnx import OperatorExportTypes + +from .shared import ( + ScopedWS, + construct_init_net_from_params, + fuse_alias_placeholder, + fuse_copy_between_cpu_and_gpu, + get_params_from_init_net, + group_norm_replace_aten_with_caffe2, + infer_device_type, + remove_dead_end_ops, + remove_reshape_for_fc, + save_graph, +) + +logger = logging.getLogger(__name__) + + +def export_onnx_model(model, inputs): + """ + Trace and export a model to onnx format. + + Args: + model (nn.Module): + inputs (tuple[args]): the model will be called by `model(*inputs)` + + Returns: + an onnx model + """ + assert isinstance(model, torch.nn.Module) + + # make sure all modules are in eval mode, onnx may change the training state + # of the module if the states are not consistent + def _check_eval(module): + assert not module.training + + model.apply(_check_eval) + + # Export the model to ONNX + with torch.no_grad(): + with io.BytesIO() as f: + torch.onnx.export( + model, + inputs, + f, + operator_export_type=OperatorExportTypes.ONNX_ATEN_FALLBACK, + # verbose=True, # NOTE: uncomment this for debugging + # export_params=True, + ) + onnx_model = onnx.load_from_string(f.getvalue()) + + # Apply ONNX's Optimization + all_passes = onnx.optimizer.get_available_passes() + passes = ["fuse_bn_into_conv"] + assert all(p in all_passes for p in passes) + onnx_model = onnx.optimizer.optimize(onnx_model, passes) + return onnx_model + + +def _op_stats(net_def): + type_count = {} + for t in [op.type for op in net_def.op]: + type_count[t] = type_count.get(t, 0) + 1 + type_count_list = sorted(type_count.items(), key=lambda kv: kv[0]) # alphabet + type_count_list = sorted(type_count_list, key=lambda kv: -kv[1]) # count + return "\n".join("{:>4}x {}".format(count, name) for name, count in type_count_list) + + +def _assign_device_option( + predict_net: caffe2_pb2.NetDef, init_net: caffe2_pb2.NetDef, tensor_inputs: List[torch.Tensor] +): + """ + ONNX exported network doesn't have concept of device, assign necessary + device option for each op in order to make it runable on GPU runtime. + """ + + def _get_device_type(torch_tensor): + assert torch_tensor.device.type in ["cpu", "cuda"] + assert torch_tensor.device.index == 0 + return torch_tensor.device.type + + def _assign_op_device_option(net_proto, net_ssa, blob_device_types): + for op, ssa_i in zip(net_proto.op, net_ssa): + if op.type in ["CopyCPUToGPU", "CopyGPUToCPU"]: + op.device_option.CopyFrom(core.DeviceOption(caffe2_pb2.CUDA, 0)) + else: + devices = [blob_device_types[b] for b in ssa_i[0] + ssa_i[1]] + assert all(d == devices[0] for d in devices) + if devices[0] == "cuda": + op.device_option.CopyFrom(core.DeviceOption(caffe2_pb2.CUDA, 0)) + + # update ops in predict_net + predict_net_input_device_types = { + (name, 0): _get_device_type(tensor) + for name, tensor in zip(predict_net.external_input, tensor_inputs) + } + predict_net_device_types = infer_device_type( + predict_net, known_status=predict_net_input_device_types, device_name_style="pytorch" + ) + predict_net_ssa, _ = core.get_ssa(predict_net) + _assign_op_device_option(predict_net, predict_net_ssa, predict_net_device_types) + + # update ops in init_net + init_net_ssa, versions = core.get_ssa(init_net) + init_net_output_device_types = { + (name, versions[name]): predict_net_device_types[(name, 0)] + for name in init_net.external_output + } + init_net_device_types = infer_device_type( + init_net, known_status=init_net_output_device_types, device_name_style="pytorch" + ) + _assign_op_device_option(init_net, init_net_ssa, init_net_device_types) + + +def export_caffe2_detection_model(model: torch.nn.Module, tensor_inputs: List[torch.Tensor]): + """ + Export a caffe2-compatible Detectron2 model to caffe2 format via ONNX. + + Arg: + model: a caffe2-compatible version of detectron2 model, defined in caffe2_modeling.py + tensor_inputs: a list of tensors that caffe2 model takes as input. + """ + model = copy.deepcopy(model) + assert isinstance(model, torch.nn.Module) + assert hasattr(model, "encode_additional_info") + + # Export via ONNX + logger.info("Exporting a {} model via ONNX ...".format(type(model).__name__)) + onnx_model = export_onnx_model(model, (tensor_inputs,)) + # Convert ONNX model to Caffe2 protobuf + init_net, predict_net = Caffe2Backend.onnx_graph_to_caffe2_net(onnx_model) + ops_table = [[op.type, op.input, op.output] for op in predict_net.op] + table = tabulate(ops_table, headers=["type", "input", "output"], tablefmt="pipe") + logger.info( + "ONNX export Done. Exported predict_net (before optimizations):\n" + colored(table, "cyan") + ) + + # Apply protobuf optimization + fuse_alias_placeholder(predict_net, init_net) + if any(t.device.type != "cpu" for t in tensor_inputs): + fuse_copy_between_cpu_and_gpu(predict_net) + remove_dead_end_ops(init_net) + _assign_device_option(predict_net, init_net, tensor_inputs) + params, device_options = get_params_from_init_net(init_net) + predict_net, params = remove_reshape_for_fc(predict_net, params) + init_net = construct_init_net_from_params(params, device_options) + group_norm_replace_aten_with_caffe2(predict_net) + + # Record necessary information for running the pb model in Detectron2 system. + model.encode_additional_info(predict_net, init_net) + + logger.info("Operators used in predict_net: \n{}".format(_op_stats(predict_net))) + logger.info("Operators used in init_net: \n{}".format(_op_stats(init_net))) + + return predict_net, init_net + + +def run_and_save_graph(predict_net, init_net, tensor_inputs, graph_save_path): + """ + Run the caffe2 model on given inputs, recording the shape and draw the graph. + + predict_net/init_net: caffe2 model. + tensor_inputs: a list of tensors that caffe2 model takes as input. + graph_save_path: path for saving graph of exported model. + """ + + logger.info("Saving graph of ONNX exported model to {} ...".format(graph_save_path)) + save_graph(predict_net, graph_save_path, op_only=False) + + # Run the exported Caffe2 net + logger.info("Running ONNX exported model ...") + with ScopedWS("__ws_tmp__", True) as ws: + ws.RunNetOnce(init_net) + initialized_blobs = set(ws.Blobs()) + uninitialized = [inp for inp in predict_net.external_input if inp not in initialized_blobs] + for name, blob in zip(uninitialized, tensor_inputs): + ws.FeedBlob(name, blob) + + try: + ws.RunNetOnce(predict_net) + except RuntimeError as e: + logger.warning("Encountered RuntimeError: \n{}".format(str(e))) + + ws_blobs = {b: ws.FetchBlob(b) for b in ws.Blobs()} + blob_sizes = {b: ws_blobs[b].shape for b in ws_blobs if isinstance(ws_blobs[b], np.ndarray)} + + logger.info("Saving graph with blob shapes to {} ...".format(graph_save_path)) + save_graph(predict_net, graph_save_path, op_only=False, blob_sizes=blob_sizes) + + return ws_blobs diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/caffe2_inference.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/caffe2_inference.py new file mode 100644 index 0000000000000000000000000000000000000000..92718d04031b4513c2324ad596eae9cdbfa7c75e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/caffe2_inference.py @@ -0,0 +1,136 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import collections +import logging +import numpy as np +import torch +from caffe2.proto import caffe2_pb2 +from caffe2.python import core + +from .caffe2_modeling import META_ARCH_CAFFE2_EXPORT_TYPE_MAP, convert_batched_inputs_to_c2_format +from .shared import ScopedWS, get_pb_arg_vali, get_pb_arg_vals, infer_device_type + +logger = logging.getLogger(__name__) + + +class ProtobufModel(torch.nn.Module): + """ + A class works just like nn.Module in terms of inference, but running + caffe2 model under the hood. Input/Output are Dict[str, tensor] whose keys + are in external_input/output. + """ + + def __init__(self, predict_net, init_net): + logger.info("Initializing ProtobufModel ...") + super().__init__() + assert isinstance(predict_net, caffe2_pb2.NetDef) + assert isinstance(init_net, caffe2_pb2.NetDef) + self.ws_name = "__ws_tmp__" + self.net = core.Net(predict_net) + + with ScopedWS(self.ws_name, is_reset=True, is_cleanup=False) as ws: + ws.RunNetOnce(init_net) + for blob in self.net.Proto().external_input: + if blob not in ws.Blobs(): + ws.CreateBlob(blob) + ws.CreateNet(self.net) + + self._error_msgs = set() + + def forward(self, inputs_dict): + assert all(inp in self.net.Proto().external_input for inp in inputs_dict) + with ScopedWS(self.ws_name, is_reset=False, is_cleanup=False) as ws: + for b, tensor in inputs_dict.items(): + ws.FeedBlob(b, tensor) + try: + ws.RunNet(self.net.Proto().name) + except RuntimeError as e: + if not str(e) in self._error_msgs: + self._error_msgs.add(str(e)) + logger.warning("Encountered new RuntimeError: \n{}".format(str(e))) + logger.warning("Catch the error and use partial results.") + + outputs_dict = collections.OrderedDict( + [(b, ws.FetchBlob(b)) for b in self.net.Proto().external_output] + ) + # Remove outputs of current run, this is necessary in order to + # prevent fetching the result from previous run if the model fails + # in the middle. + for b in self.net.Proto().external_output: + # Needs to create uninitialized blob to make the net runable. + # This is "equivalent" to: ws.RemoveBlob(b) then ws.CreateBlob(b), + # but there'no such API. + ws.FeedBlob(b, "{}, a C++ native class of type nullptr (uninitialized).".format(b)) + + return outputs_dict + + +class ProtobufDetectionModel(torch.nn.Module): + """ + A class works just like a pytorch meta arch in terms of inference, but running + caffe2 model under the hood. + """ + + def __init__(self, predict_net, init_net, *, convert_outputs=None): + """ + Args: + predict_net, init_net (core.Net): caffe2 nets + convert_outptus (callable): a function that converts caffe2 + outputs to the same format of the original pytorch model. + By default, use the one defined in the caffe2 meta_arch. + """ + super().__init__() + self.protobuf_model = ProtobufModel(predict_net, init_net) + self.size_divisibility = get_pb_arg_vali(predict_net, "size_divisibility", 0) + self.device = get_pb_arg_vals(predict_net, "device", b"cpu").decode("ascii") + + if convert_outputs is None: + meta_arch = get_pb_arg_vals(predict_net, "meta_architecture", b"GeneralizedRCNN") + meta_arch = META_ARCH_CAFFE2_EXPORT_TYPE_MAP[meta_arch.decode("ascii")] + self._convert_outputs = meta_arch.get_outputs_converter(predict_net, init_net) + else: + self._convert_outputs = convert_outputs + + def _infer_output_devices(self, inputs_dict): + def _get_device_type(torch_tensor): + assert torch_tensor.device.type in ["cpu", "cuda"] + assert torch_tensor.device.index == 0 + return torch_tensor.device.type + + predict_net = self.protobuf_model.net.Proto() + input_device_types = { + (name, 0): _get_device_type(tensor) for name, tensor in inputs_dict.items() + } + device_type_map = infer_device_type( + predict_net, known_status=input_device_types, device_name_style="pytorch" + ) + ssa, versions = core.get_ssa(predict_net) + versioned_outputs = [(name, versions[name]) for name in predict_net.external_output] + output_devices = [device_type_map[outp] for outp in versioned_outputs] + return output_devices + + def _convert_inputs(self, batched_inputs): + # currently all models convert inputs in the same way + data, im_info = convert_batched_inputs_to_c2_format( + batched_inputs, self.size_divisibility, self.device + ) + return {"data": data, "im_info": im_info} + + def forward(self, batched_inputs): + c2_inputs = self._convert_inputs(batched_inputs) + c2_results = self.protobuf_model(c2_inputs) + + if any(t.device.type != "cpu" for _, t in c2_inputs.items()): + output_devices = self._infer_output_devices(c2_inputs) + else: + output_devices = ["cpu" for _ in self.protobuf_model.net.Proto().external_output] + + def _cast_caffe2_blob_to_torch_tensor(blob, device): + return torch.Tensor(blob).to(device) if isinstance(blob, np.ndarray) else None + + c2_results = { + name: _cast_caffe2_blob_to_torch_tensor(c2_results[name], device) + for name, device in zip(self.protobuf_model.net.Proto().external_output, output_devices) + } + + return self._convert_outputs(batched_inputs, c2_inputs, c2_results) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/caffe2_modeling.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/caffe2_modeling.py new file mode 100644 index 0000000000000000000000000000000000000000..1732b322c75abc3ac178d61d31cdec4cdcd61dfd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/caffe2_modeling.py @@ -0,0 +1,493 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import functools +import io +import struct +import types +import torch + +from detectron2.modeling import meta_arch +from detectron2.modeling.box_regression import Box2BoxTransform +from detectron2.modeling.meta_arch.panoptic_fpn import combine_semantic_and_instance_outputs +from detectron2.modeling.postprocessing import detector_postprocess, sem_seg_postprocess +from detectron2.modeling.roi_heads import keypoint_head +from detectron2.structures import Boxes, ImageList, Instances, RotatedBoxes + +from .c10 import Caffe2Compatible +from .patcher import ROIHeadsPatcher, patch_generalized_rcnn +from .shared import ( + alias, + check_set_pb_arg, + get_pb_arg_floats, + get_pb_arg_valf, + get_pb_arg_vali, + get_pb_arg_vals, + mock_torch_nn_functional_interpolate, +) + + +def assemble_rcnn_outputs_by_name(image_sizes, tensor_outputs, force_mask_on=False): + """ + A function to assemble caffe2 model's outputs (i.e. Dict[str, Tensor]) + to detectron2's format (i.e. list of Instances instance). + This only works when the model follows the Caffe2 detectron's naming convention. + + Args: + image_sizes (List[List[int, int]]): [H, W] of every image. + tensor_outputs (Dict[str, Tensor]): external_output to its tensor. + + force_mask_on (Bool): if true, the it make sure there'll be pred_masks even + if the mask is not found from tensor_outputs (usually due to model crash) + """ + + results = [Instances(image_size) for image_size in image_sizes] + + batch_splits = tensor_outputs.get("batch_splits", None) + if batch_splits: + raise NotImplementedError() + assert len(image_sizes) == 1 + result = results[0] + + bbox_nms = tensor_outputs["bbox_nms"] + score_nms = tensor_outputs["score_nms"] + class_nms = tensor_outputs["class_nms"] + # Detection will always success because Conv support 0-batch + assert bbox_nms is not None + assert score_nms is not None + assert class_nms is not None + if bbox_nms.shape[1] == 5: + result.pred_boxes = RotatedBoxes(bbox_nms) + else: + result.pred_boxes = Boxes(bbox_nms) + result.scores = score_nms + result.pred_classes = class_nms.to(torch.int64) + + mask_fcn_probs = tensor_outputs.get("mask_fcn_probs", None) + if mask_fcn_probs is not None: + # finish the mask pred + mask_probs_pred = mask_fcn_probs + num_masks = mask_probs_pred.shape[0] + class_pred = result.pred_classes + indices = torch.arange(num_masks, device=class_pred.device) + mask_probs_pred = mask_probs_pred[indices, class_pred][:, None] + result.pred_masks = mask_probs_pred + elif force_mask_on: + # NOTE: there's no way to know the height/width of mask here, it won't be + # used anyway when batch size is 0, so just set them to 0. + result.pred_masks = torch.zeros([0, 1, 0, 0], dtype=torch.uint8) + + keypoints_out = tensor_outputs.get("keypoints_out", None) + kps_score = tensor_outputs.get("kps_score", None) + if keypoints_out is not None: + # keypoints_out: [N, 4, #kypoints], where 4 is in order of (x, y, score, prob) + keypoints_tensor = keypoints_out + # NOTE: it's possible that prob is not calculated if "should_output_softmax" + # is set to False in HeatmapMaxKeypoint, so just using raw score, seems + # it doesn't affect mAP. TODO: check more carefully. + keypoint_xyp = keypoints_tensor.transpose(1, 2)[:, :, [0, 1, 2]] + result.pred_keypoints = keypoint_xyp + elif kps_score is not None: + # keypoint heatmap to sparse data structure + pred_keypoint_logits = kps_score + keypoint_head.keypoint_rcnn_inference(pred_keypoint_logits, [result]) + + return results + + +def _cast_to_f32(f64): + return struct.unpack("f", struct.pack("f", f64))[0] + + +def set_caffe2_compatible_tensor_mode(model, enable=True): + def _fn(m): + if isinstance(m, Caffe2Compatible): + m.tensor_mode = enable + + model.apply(_fn) + + +def convert_batched_inputs_to_c2_format(batched_inputs, size_divisibility, device): + """ + See get_caffe2_inputs() below. + """ + assert all(isinstance(x, dict) for x in batched_inputs) + assert all(x["image"].dim() == 3 for x in batched_inputs) + + images = [x["image"] for x in batched_inputs] + images = ImageList.from_tensors(images, size_divisibility) + + im_info = [] + for input_per_image, image_size in zip(batched_inputs, images.image_sizes): + target_height = input_per_image.get("height", image_size[0]) + target_width = input_per_image.get("width", image_size[1]) # noqa + # NOTE: The scale inside im_info is kept as convention and for providing + # post-processing information if further processing is needed. For + # current Caffe2 model definitions that don't include post-processing inside + # the model, this number is not used. + # NOTE: There can be a slight difference between width and height + # scales, using a single number can results in numerical difference + # compared with D2's post-processing. + scale = target_height / image_size[0] + im_info.append([image_size[0], image_size[1], scale]) + im_info = torch.Tensor(im_info) + + return images.tensor.to(device), im_info.to(device) + + +class Caffe2MetaArch(Caffe2Compatible, torch.nn.Module): + """ + Base class for caffe2-compatible implementation of a meta architecture. + The forward is traceable and its traced graph can be converted to caffe2 + graph through ONNX. + """ + + def __init__(self, cfg, torch_model): + """ + Args: + cfg (CfgNode): + torch_model (nn.Module): the detectron2 model (meta_arch) to be + converted. + """ + super().__init__() + self._wrapped_model = torch_model + self.eval() + set_caffe2_compatible_tensor_mode(self, True) + + def get_caffe2_inputs(self, batched_inputs): + """ + Convert pytorch-style structured inputs to caffe2-style inputs that + are tuples of tensors. + + Args: + batched_inputs (list[dict]): inputs to a detectron2 model + in its standard format. Each dict has "image" (CHW tensor), and optionally + "height" and "width". + + Returns: + tuple[Tensor]: + tuple of tensors that will be the inputs to the + :meth:`forward` method. For existing models, the first + is an NCHW tensor (padded and batched); the second is + a im_info Nx3 tensor, where the rows are + (height, width, unused legacy parameter) + """ + return convert_batched_inputs_to_c2_format( + batched_inputs, + self._wrapped_model.backbone.size_divisibility, + self._wrapped_model.device, + ) + + def encode_additional_info(self, predict_net, init_net): + """ + Save extra metadata that will be used by inference in the output protobuf. + """ + pass + + def forward(self, inputs): + """ + Run the forward in caffe2-style. It has to use caffe2-compatible ops + and the method will be used for tracing. + + Args: + inputs (tuple[Tensor]): inputs defined by :meth:`get_caffe2_input`. + They will be the inputs of the converted caffe2 graph. + + Returns: + tuple[Tensor]: output tensors. They will be the outputs of the + converted caffe2 graph. + """ + raise NotImplementedError + + def _caffe2_preprocess_image(self, inputs): + """ + Caffe2 implementation of preprocess_image, which is called inside each MetaArch's forward. + It normalizes the input images, and the final caffe2 graph assumes the + inputs have been batched already. + """ + data, im_info = inputs + data = alias(data, "data") + im_info = alias(im_info, "im_info") + mean, std = self._wrapped_model.pixel_mean, self._wrapped_model.pixel_std + normalized_data = (data - mean) / std + normalized_data = alias(normalized_data, "normalized_data") + + # Pack (data, im_info) into ImageList which is recognized by self.inference. + images = ImageList(tensor=normalized_data, image_sizes=im_info) + return images + + @staticmethod + def get_outputs_converter(predict_net, init_net): + """ + Creates a function that converts outputs of the caffe2 model to + detectron2's standard format. + The function uses information in `predict_net` and `init_net` that are + available at inferene time. Therefore the function logic can be used in inference. + + The returned function has the following signature: + + def convert(batched_inputs, c2_inputs, c2_results) -> detectron2_outputs + + Where + + * batched_inputs (list[dict]): the original input format of the meta arch + * c2_inputs (dict[str, Tensor]): the caffe2 inputs. + * c2_results (dict[str, Tensor]): the caffe2 output format, + corresponding to the outputs of the :meth:`forward` function. + * detectron2_outputs: the original output format of the meta arch. + + This function can be used to compare the outputs of the original meta arch and + the converted caffe2 graph. + + Returns: + callable: a callable of the above signature. + """ + raise NotImplementedError + + +class Caffe2GeneralizedRCNN(Caffe2MetaArch): + def __init__(self, cfg, torch_model): + assert isinstance(torch_model, meta_arch.GeneralizedRCNN) + torch_model = patch_generalized_rcnn(torch_model) + super().__init__(cfg, torch_model) + + self.roi_heads_patcher = ROIHeadsPatcher(cfg, self._wrapped_model.roi_heads) + + def encode_additional_info(self, predict_net, init_net): + size_divisibility = self._wrapped_model.backbone.size_divisibility + check_set_pb_arg(predict_net, "size_divisibility", "i", size_divisibility) + check_set_pb_arg( + predict_net, "device", "s", str.encode(str(self._wrapped_model.device), "ascii") + ) + check_set_pb_arg(predict_net, "meta_architecture", "s", b"GeneralizedRCNN") + + @mock_torch_nn_functional_interpolate() + def forward(self, inputs): + if not self.tensor_mode: + return self._wrapped_model.inference(inputs) + images = self._caffe2_preprocess_image(inputs) + features = self._wrapped_model.backbone(images.tensor) + proposals, _ = self._wrapped_model.proposal_generator(images, features) + with self.roi_heads_patcher.mock_roi_heads(): + detector_results, _ = self._wrapped_model.roi_heads(images, features, proposals) + return tuple(detector_results[0].flatten()) + + @staticmethod + def get_outputs_converter(predict_net, init_net): + def f(batched_inputs, c2_inputs, c2_results): + image_sizes = [[int(im[0]), int(im[1])] for im in c2_inputs["im_info"]] + results = assemble_rcnn_outputs_by_name(image_sizes, c2_results) + return meta_arch.GeneralizedRCNN._postprocess(results, batched_inputs, image_sizes) + + return f + + +class Caffe2PanopticFPN(Caffe2MetaArch): + def __init__(self, cfg, torch_model): + assert isinstance(torch_model, meta_arch.PanopticFPN) + torch_model = patch_generalized_rcnn(torch_model) + super().__init__(cfg, torch_model) + + self.roi_heads_patcher = ROIHeadsPatcher(cfg, self._wrapped_model.roi_heads) + + @mock_torch_nn_functional_interpolate() + def forward(self, inputs): + assert self.tensor_mode + images = self._caffe2_preprocess_image(inputs) + features = self._wrapped_model.backbone(images.tensor) + + sem_seg_results, _ = self._wrapped_model.sem_seg_head(features) + sem_seg_results = alias(sem_seg_results, "sem_seg") + + proposals, _ = self._wrapped_model.proposal_generator(images, features) + + with self.roi_heads_patcher.mock_roi_heads(self.tensor_mode): + detector_results, _ = self._wrapped_model.roi_heads(images, features, proposals) + + return tuple(detector_results[0].flatten()) + (sem_seg_results,) + + def encode_additional_info(self, predict_net, init_net): + size_divisibility = self._wrapped_model.backbone.size_divisibility + check_set_pb_arg(predict_net, "size_divisibility", "i", size_divisibility) + check_set_pb_arg( + predict_net, "device", "s", str.encode(str(self._wrapped_model.device), "ascii") + ) + check_set_pb_arg(predict_net, "meta_architecture", "s", b"PanopticFPN") + + # Inference parameters: + check_set_pb_arg(predict_net, "combine_on", "i", self._wrapped_model.combine_on) + check_set_pb_arg( + predict_net, + "combine_overlap_threshold", + "f", + _cast_to_f32(self._wrapped_model.combine_overlap_threshold), + ) + check_set_pb_arg( + predict_net, + "combine_stuff_area_limit", + "i", + self._wrapped_model.combine_stuff_area_limit, + ) + check_set_pb_arg( + predict_net, + "combine_instances_confidence_threshold", + "f", + _cast_to_f32(self._wrapped_model.combine_instances_confidence_threshold), + ) + + @staticmethod + def get_outputs_converter(predict_net, init_net): + combine_on = get_pb_arg_vali(predict_net, "combine_on", None) + combine_overlap_threshold = get_pb_arg_valf(predict_net, "combine_overlap_threshold", None) + combine_stuff_area_limit = get_pb_arg_vali(predict_net, "combine_stuff_area_limit", None) + combine_instances_confidence_threshold = get_pb_arg_valf( + predict_net, "combine_instances_confidence_threshold", None + ) + + def f(batched_inputs, c2_inputs, c2_results): + image_sizes = [[int(im[0]), int(im[1])] for im in c2_inputs["im_info"]] + detector_results = assemble_rcnn_outputs_by_name( + image_sizes, c2_results, force_mask_on=True + ) + sem_seg_results = c2_results["sem_seg"] + + # copied from meta_arch/panoptic_fpn.py ... + processed_results = [] + for sem_seg_result, detector_result, input_per_image, image_size in zip( + sem_seg_results, detector_results, batched_inputs, image_sizes + ): + height = input_per_image.get("height", image_size[0]) + width = input_per_image.get("width", image_size[1]) + sem_seg_r = sem_seg_postprocess(sem_seg_result, image_size, height, width) + detector_r = detector_postprocess(detector_result, height, width) + + processed_results.append({"sem_seg": sem_seg_r, "instances": detector_r}) + + if combine_on: + panoptic_r = combine_semantic_and_instance_outputs( + detector_r, + sem_seg_r.argmax(dim=0), + combine_overlap_threshold, + combine_stuff_area_limit, + combine_instances_confidence_threshold, + ) + processed_results[-1]["panoptic_seg"] = panoptic_r + return processed_results + + return f + + +class Caffe2RetinaNet(Caffe2MetaArch): + def __init__(self, cfg, torch_model): + assert isinstance(torch_model, meta_arch.RetinaNet) + super().__init__(cfg, torch_model) + + @mock_torch_nn_functional_interpolate() + def forward(self, inputs): + assert self.tensor_mode + images = self._caffe2_preprocess_image(inputs) + + # explicitly return the images sizes to avoid removing "im_info" by ONNX + # since it's not used in the forward path + return_tensors = [images.image_sizes] + + features = self._wrapped_model.backbone(images.tensor) + features = [features[f] for f in self._wrapped_model.in_features] + for i, feature_i in enumerate(features): + features[i] = alias(feature_i, "feature_{}".format(i), is_backward=True) + return_tensors.append(features[i]) + + box_cls, box_delta = self._wrapped_model.head(features) + for i, (box_cls_i, box_delta_i) in enumerate(zip(box_cls, box_delta)): + return_tensors.append(alias(box_cls_i, "box_cls_{}".format(i))) + return_tensors.append(alias(box_delta_i, "box_delta_{}".format(i))) + + return tuple(return_tensors) + + def encode_additional_info(self, predict_net, init_net): + size_divisibility = self._wrapped_model.backbone.size_divisibility + check_set_pb_arg(predict_net, "size_divisibility", "i", size_divisibility) + check_set_pb_arg( + predict_net, "device", "s", str.encode(str(self._wrapped_model.device), "ascii") + ) + check_set_pb_arg(predict_net, "meta_architecture", "s", b"RetinaNet") + + # Inference parameters: + check_set_pb_arg( + predict_net, "score_threshold", "f", _cast_to_f32(self._wrapped_model.score_threshold) + ) + check_set_pb_arg(predict_net, "topk_candidates", "i", self._wrapped_model.topk_candidates) + check_set_pb_arg( + predict_net, "nms_threshold", "f", _cast_to_f32(self._wrapped_model.nms_threshold) + ) + check_set_pb_arg( + predict_net, + "max_detections_per_image", + "i", + self._wrapped_model.max_detections_per_image, + ) + + check_set_pb_arg( + predict_net, + "bbox_reg_weights", + "floats", + [_cast_to_f32(w) for w in self._wrapped_model.box2box_transform.weights], + ) + self._encode_anchor_generator_cfg(predict_net) + + def _encode_anchor_generator_cfg(self, predict_net): + # serialize anchor_generator for future use + serialized_anchor_generator = io.BytesIO() + torch.save(self._wrapped_model.anchor_generator, serialized_anchor_generator) + # Ideally we can put anchor generating inside the model, then we don't + # need to store this information. + bytes = serialized_anchor_generator.getvalue() + check_set_pb_arg(predict_net, "serialized_anchor_generator", "s", bytes) + + @staticmethod + def get_outputs_converter(predict_net, init_net): + self = types.SimpleNamespace() + serialized_anchor_generator = io.BytesIO( + get_pb_arg_vals(predict_net, "serialized_anchor_generator", None) + ) + self.anchor_generator = torch.load(serialized_anchor_generator) + bbox_reg_weights = get_pb_arg_floats(predict_net, "bbox_reg_weights", None) + self.box2box_transform = Box2BoxTransform(weights=tuple(bbox_reg_weights)) + self.score_threshold = get_pb_arg_valf(predict_net, "score_threshold", None) + self.topk_candidates = get_pb_arg_vali(predict_net, "topk_candidates", None) + self.nms_threshold = get_pb_arg_valf(predict_net, "nms_threshold", None) + self.max_detections_per_image = get_pb_arg_vali( + predict_net, "max_detections_per_image", None + ) + + # hack to reuse inference code from RetinaNet + self.inference = functools.partial(meta_arch.RetinaNet.inference, self) + self.inference_single_image = functools.partial( + meta_arch.RetinaNet.inference_single_image, self + ) + + def f(batched_inputs, c2_inputs, c2_results): + image_sizes = [[int(im[0]), int(im[1])] for im in c2_inputs["im_info"]] + + num_features = len([x for x in c2_results.keys() if x.startswith("box_cls_")]) + box_cls = [c2_results["box_cls_{}".format(i)] for i in range(num_features)] + box_delta = [c2_results["box_delta_{}".format(i)] for i in range(num_features)] + + # For each feature level, feature should have the same batch size and + # spatial dimension as the box_cls and box_delta. + dummy_features = [box_delta[i].clone()[:, 0:0, :, :] for i in range(num_features)] + anchors = self.anchor_generator(dummy_features) + + # self.num_classess can be inferred + self.num_classes = box_cls[0].shape[1] // (box_delta[0].shape[1] // 4) + + results = self.inference(box_cls, box_delta, anchors, image_sizes) + return meta_arch.GeneralizedRCNN._postprocess(results, batched_inputs, image_sizes) + + return f + + +META_ARCH_CAFFE2_EXPORT_TYPE_MAP = { + "GeneralizedRCNN": Caffe2GeneralizedRCNN, + "PanopticFPN": Caffe2PanopticFPN, + "RetinaNet": Caffe2RetinaNet, +} diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/patcher.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/patcher.py new file mode 100644 index 0000000000000000000000000000000000000000..3f0b0fd8122d12c10d06cfc1b0720e3c3374c737 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/patcher.py @@ -0,0 +1,153 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import contextlib +import mock +import torch + +from detectron2.modeling import poolers +from detectron2.modeling.proposal_generator import rpn +from detectron2.modeling.roi_heads import keypoint_head, mask_head +from detectron2.modeling.roi_heads.fast_rcnn import FastRCNNOutputLayers + +from .c10 import ( + Caffe2Compatible, + Caffe2FastRCNNOutputsInference, + Caffe2KeypointRCNNInference, + Caffe2MaskRCNNInference, + Caffe2ROIPooler, + Caffe2RPN, +) + + +class GenericMixin(object): + pass + + +class Caffe2CompatibleConverter(object): + """ + A GenericUpdater which implements the `create_from` interface, by modifying + module object and assign it with another class replaceCls. + """ + + def __init__(self, replaceCls): + self.replaceCls = replaceCls + + def create_from(self, module): + # update module's class to the new class + assert isinstance(module, torch.nn.Module) + if issubclass(self.replaceCls, GenericMixin): + # replaceCls should act as mixin, create a new class on-the-fly + new_class = type( + "{}MixedWith{}".format(self.replaceCls.__name__, module.__class__.__name__), + (self.replaceCls, module.__class__), + {}, # {"new_method": lambda self: ...}, + ) + module.__class__ = new_class + else: + # replaceCls is complete class, this allow arbitrary class swap + module.__class__ = self.replaceCls + + # initialize Caffe2Compatible + if isinstance(module, Caffe2Compatible): + module.tensor_mode = False + + return module + + +def patch(model, target, updater, *args, **kwargs): + """ + recursively (post-order) update all modules with the target type and its + subclasses, make a initialization/composition/inheritance/... via the + updater.create_from. + """ + for name, module in model.named_children(): + model._modules[name] = patch(module, target, updater, *args, **kwargs) + if isinstance(model, target): + return updater.create_from(model, *args, **kwargs) + return model + + +def patch_generalized_rcnn(model): + ccc = Caffe2CompatibleConverter + model = patch(model, rpn.RPN, ccc(Caffe2RPN)) + model = patch(model, poolers.ROIPooler, ccc(Caffe2ROIPooler)) + + return model + + +@contextlib.contextmanager +def mock_fastrcnn_outputs_inference( + tensor_mode, check=True, box_predictor_type=FastRCNNOutputLayers +): + with mock.patch.object( + box_predictor_type, + "inference", + autospec=True, + side_effect=Caffe2FastRCNNOutputsInference(tensor_mode), + ) as mocked_func: + yield + if check: + assert mocked_func.call_count > 0 + + +@contextlib.contextmanager +def mock_mask_rcnn_inference(tensor_mode, patched_module, check=True): + with mock.patch( + "{}.mask_rcnn_inference".format(patched_module), side_effect=Caffe2MaskRCNNInference() + ) as mocked_func: + yield + if check: + assert mocked_func.call_count > 0 + + +@contextlib.contextmanager +def mock_keypoint_rcnn_inference(tensor_mode, patched_module, use_heatmap_max_keypoint, check=True): + with mock.patch( + "{}.keypoint_rcnn_inference".format(patched_module), + side_effect=Caffe2KeypointRCNNInference(use_heatmap_max_keypoint), + ) as mocked_func: + yield + if check: + assert mocked_func.call_count > 0 + + +class ROIHeadsPatcher: + def __init__(self, cfg, heads): + self.heads = heads + + self.use_heatmap_max_keypoint = cfg.EXPORT_CAFFE2.USE_HEATMAP_MAX_KEYPOINT + + @contextlib.contextmanager + def mock_roi_heads(self, tensor_mode=True): + """ + Patching several inference functions inside ROIHeads and its subclasses + + Args: + tensor_mode (bool): whether the inputs/outputs are caffe2's tensor + format or not. Default to True. + """ + # NOTE: this requries the `keypoint_rcnn_inference` and `mask_rcnn_inference` + # are called inside the same file as BaseXxxHead due to using mock.patch. + kpt_heads_mod = keypoint_head.BaseKeypointRCNNHead.__module__ + mask_head_mod = mask_head.BaseMaskRCNNHead.__module__ + + mock_ctx_managers = [ + mock_fastrcnn_outputs_inference( + tensor_mode=tensor_mode, + check=True, + box_predictor_type=type(self.heads.box_predictor), + ) + ] + if getattr(self.heads, "keypoint_on", False): + mock_ctx_managers += [ + mock_keypoint_rcnn_inference( + tensor_mode, kpt_heads_mod, self.use_heatmap_max_keypoint + ) + ] + if getattr(self.heads, "mask_on", False): + mock_ctx_managers += [mock_mask_rcnn_inference(tensor_mode, mask_head_mod)] + + with contextlib.ExitStack() as stack: # python 3.3+ + for mgr in mock_ctx_managers: + stack.enter_context(mgr) + yield diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/shared.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/shared.py new file mode 100644 index 0000000000000000000000000000000000000000..cb7ffeb098f21178660572830164126fab63e0e1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/export/shared.py @@ -0,0 +1,1034 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import collections +import contextlib +import copy +import functools +import logging +import mock +import numpy as np +import os +from typing import Any, Callable, Dict, List, Optional, Tuple, Union +import caffe2.python.utils as putils +import torch +import torch.nn.functional as F +from caffe2.proto import caffe2_pb2 +from caffe2.python import core, net_drawer, workspace +from torch.nn.functional import interpolate as interp + +logger = logging.getLogger(__name__) + + +# ==== torch/utils_toffee/cast.py ======================================= + + +def to_device(t, device_str): + """ + This function is a replacement of .to(another_device) such that it allows the + casting to be traced properly by explicitly calling the underlying copy ops. + It also avoids introducing unncessary op when casting to the same device. + """ + src = t.device + dst = torch.device(device_str) + + if src == dst: + return t + elif src.type == "cuda" and dst.type == "cpu": + return torch.ops._caffe2.CopyGPUToCPU(t) + elif src.type == "cpu" and dst.type == "cuda": + return torch.ops._caffe2.CopyCPUToGPU(t) + else: + raise RuntimeError("Can't cast tensor from device {} to device {}".format(src, dst)) + + +# ==== torch/utils_toffee/interpolate.py ======================================= + + +# Note: borrowed from vision/detection/fair/detectron/detectron/modeling/detector.py +def BilinearInterpolation(tensor_in, up_scale): + assert up_scale % 2 == 0, "Scale should be even" + + def upsample_filt(size): + factor = (size + 1) // 2 + if size % 2 == 1: + center = factor - 1 + else: + center = factor - 0.5 + + og = np.ogrid[:size, :size] + return (1 - abs(og[0] - center) / factor) * (1 - abs(og[1] - center) / factor) + + kernel_size = int(up_scale) * 2 + bil_filt = upsample_filt(kernel_size) + + dim = int(tensor_in.shape[1]) + kernel = np.zeros((dim, dim, kernel_size, kernel_size), dtype=np.float32) + kernel[range(dim), range(dim), :, :] = bil_filt + + tensor_out = F.conv_transpose2d( + tensor_in, + weight=to_device(torch.Tensor(kernel), tensor_in.device), + bias=None, + stride=int(up_scale), + padding=int(up_scale / 2), + ) + + return tensor_out + + +# NOTE: ONNX is incompatible with traced torch.nn.functional.interpolate if +# using dynamic `scale_factor` rather than static `size`. (T43166860) +# NOTE: Caffe2 Int8 conversion might not be able to quantize `size` properly. +def onnx_compatibale_interpolate( + input, size=None, scale_factor=None, mode="nearest", align_corners=None +): + # NOTE: The input dimensions are interpreted in the form: + # `mini-batch x channels x [optional depth] x [optional height] x width`. + if size is None and scale_factor is not None: + if input.dim() == 4: + if isinstance(scale_factor, (int, float)): + height_scale, width_scale = (scale_factor, scale_factor) + else: + assert isinstance(scale_factor, (tuple, list)) + assert len(scale_factor) == 2 + height_scale, width_scale = scale_factor + + assert not align_corners, "No matching C2 op for align_corners == True" + if mode == "nearest": + return torch.ops._caffe2.ResizeNearest( + input, order="NCHW", width_scale=width_scale, height_scale=height_scale + ) + elif mode == "bilinear": + logger.warning( + "Use F.conv_transpose2d for bilinear interpolate" + " because there's no such C2 op, this may cause significant" + " slowdown and the boundary pixels won't be as same as" + " using F.interpolate due to padding." + ) + assert height_scale == width_scale + return BilinearInterpolation(input, up_scale=height_scale) + logger.warning("Output size is not static, it might cause ONNX conversion issue") + + return interp(input, size, scale_factor, mode, align_corners) + + +@contextlib.contextmanager +def mock_torch_nn_functional_interpolate(): + if torch.onnx.is_in_onnx_export(): + with mock.patch( + "torch.nn.functional.interpolate", side_effect=onnx_compatibale_interpolate + ): + yield + else: + yield + + +# ==== torch/utils_caffe2/ws_utils.py ========================================== + + +class ScopedWS(object): + def __init__(self, ws_name, is_reset, is_cleanup=False): + self.ws_name = ws_name + self.is_reset = is_reset + self.is_cleanup = is_cleanup + self.org_ws = "" + + def __enter__(self): + self.org_ws = workspace.CurrentWorkspace() + if self.ws_name is not None: + workspace.SwitchWorkspace(self.ws_name, True) + if self.is_reset: + workspace.ResetWorkspace() + + return workspace + + def __exit__(self, *args): + if self.is_cleanup: + workspace.ResetWorkspace() + if self.ws_name is not None: + workspace.SwitchWorkspace(self.org_ws) + + +def fetch_any_blob(name): + bb = None + try: + bb = workspace.FetchBlob(name) + except TypeError: + bb = workspace.FetchInt8Blob(name) + except Exception as e: + logger.error("Get blob {} error: {}".format(name, e)) + + return bb + + +# ==== torch/utils_caffe2/protobuf.py ========================================== + + +def get_pb_arg(pb, arg_name): + for x in pb.arg: + if x.name == arg_name: + return x + return None + + +def get_pb_arg_valf(pb, arg_name, default_val): + arg = get_pb_arg(pb, arg_name) + return arg.f if arg is not None else default_val + + +def get_pb_arg_floats(pb, arg_name, default_val): + arg = get_pb_arg(pb, arg_name) + return list(map(float, arg.floats)) if arg is not None else default_val + + +def get_pb_arg_ints(pb, arg_name, default_val): + arg = get_pb_arg(pb, arg_name) + return list(map(int, arg.ints)) if arg is not None else default_val + + +def get_pb_arg_vali(pb, arg_name, default_val): + arg = get_pb_arg(pb, arg_name) + return arg.i if arg is not None else default_val + + +def get_pb_arg_vals(pb, arg_name, default_val): + arg = get_pb_arg(pb, arg_name) + return arg.s if arg is not None else default_val + + +def get_pb_arg_valstrings(pb, arg_name, default_val): + arg = get_pb_arg(pb, arg_name) + return list(arg.strings) if arg is not None else default_val + + +def check_set_pb_arg(pb, arg_name, arg_attr, arg_value, allow_override=False): + arg = get_pb_arg(pb, arg_name) + if arg is None: + arg = putils.MakeArgument(arg_name, arg_value) + assert hasattr(arg, arg_attr) + pb.arg.extend([arg]) + if allow_override and getattr(arg, arg_attr) != arg_value: + logger.warning( + "Override argument {}: {} -> {}".format(arg_name, getattr(arg, arg_attr), arg_value) + ) + setattr(arg, arg_attr, arg_value) + else: + assert arg is not None + assert getattr(arg, arg_attr) == arg_value, "Existing value {}, new value {}".format( + getattr(arg, arg_attr), arg_value + ) + + +def _create_const_fill_op_from_numpy(name, tensor, device_option=None): + assert type(tensor) == np.ndarray + kTypeNameMapper = { + np.dtype("float32"): "GivenTensorFill", + np.dtype("int32"): "GivenTensorIntFill", + np.dtype("int64"): "GivenTensorInt64Fill", + np.dtype("uint8"): "GivenTensorStringFill", + } + + args_dict = {} + if tensor.dtype == np.dtype("uint8"): + args_dict.update({"values": [str(tensor.data)], "shape": [1]}) + else: + args_dict.update({"values": tensor, "shape": tensor.shape}) + + if device_option is not None: + args_dict["device_option"] = device_option + + return core.CreateOperator(kTypeNameMapper[tensor.dtype], [], [name], **args_dict) + + +def _create_const_fill_op_from_c2_int8_tensor(name, int8_tensor): + assert type(int8_tensor) == workspace.Int8Tensor + kTypeNameMapper = { + np.dtype("int32"): "Int8GivenIntTensorFill", + np.dtype("uint8"): "Int8GivenTensorFill", + } + + tensor = int8_tensor.data + assert tensor.dtype in [np.dtype("uint8"), np.dtype("int32")] + values = tensor.tobytes() if tensor.dtype == np.dtype("uint8") else tensor + + return core.CreateOperator( + kTypeNameMapper[tensor.dtype], + [], + [name], + values=values, + shape=tensor.shape, + Y_scale=int8_tensor.scale, + Y_zero_point=int8_tensor.zero_point, + ) + + +def create_const_fill_op( + name: str, + blob: Union[np.ndarray, workspace.Int8Tensor], + device_option: Optional[caffe2_pb2.DeviceOption] = None, +) -> caffe2_pb2.OperatorDef: + """ + Given a blob object, return the Caffe2 operator that creates this blob + as constant. Currently support NumPy tensor and Caffe2 Int8Tensor. + """ + + tensor_type = type(blob) + assert tensor_type in [ + np.ndarray, + workspace.Int8Tensor, + ], 'Error when creating const fill op for "{}", unsupported blob type: {}'.format( + name, type(blob) + ) + + if tensor_type == np.ndarray: + return _create_const_fill_op_from_numpy(name, blob, device_option) + elif tensor_type == workspace.Int8Tensor: + assert device_option is None + return _create_const_fill_op_from_c2_int8_tensor(name, blob) + + +def construct_init_net_from_params( + params: Dict[str, Any], device_options: Optional[Dict[str, caffe2_pb2.DeviceOption]] = None +) -> caffe2_pb2.NetDef: + """ + Construct the init_net from params dictionary + """ + init_net = caffe2_pb2.NetDef() + device_options = device_options or {} + for name, blob in params.items(): + if isinstance(blob, str): + logger.warning( + ( + "Blob {} with type {} is not supported in generating init net," + " skipped.".format(name, type(blob)) + ) + ) + continue + init_net.op.extend( + [create_const_fill_op(name, blob, device_option=device_options.get(name, None))] + ) + init_net.external_output.append(name) + return init_net + + +def get_producer_map(ssa): + """ + Return dict from versioned blob to (i, j), + where i is index of producer op, j is the index of output of that op. + """ + producer_map = {} + for i in range(len(ssa)): + outputs = ssa[i][1] + for j, outp in enumerate(outputs): + producer_map[outp] = (i, j) + return producer_map + + +def get_consumer_map(ssa): + """ + Return dict from versioned blob to list of (i, j), + where i is index of consumer op, j is the index of input of that op. + """ + consumer_map = collections.defaultdict(list) + for i in range(len(ssa)): + inputs = ssa[i][0] + for j, inp in enumerate(inputs): + consumer_map[inp].append((i, j)) + return consumer_map + + +def get_params_from_init_net( + init_net: caffe2_pb2.NetDef, +) -> [Dict[str, Any], Dict[str, caffe2_pb2.DeviceOption]]: + """ + Take the output blobs from init_net by running it. + Outputs: + params: dict from blob name to numpy array + device_options: dict from blob name to the device option of its creating op + """ + # NOTE: this assumes that the params is determined by producer op with the + # only exception be CopyGPUToCPU which is CUDA op but returns CPU tensor. + def _get_device_option(producer_op): + if producer_op.type == "CopyGPUToCPU": + return caffe2_pb2.DeviceOption() + else: + return producer_op.device_option + + with ScopedWS("__get_params_from_init_net__", is_reset=True, is_cleanup=True) as ws: + ws.RunNetOnce(init_net) + params = {b: fetch_any_blob(b) for b in init_net.external_output} + ssa, versions = core.get_ssa(init_net) + producer_map = get_producer_map(ssa) + device_options = { + b: _get_device_option(init_net.op[producer_map[(b, versions[b])][0]]) + for b in init_net.external_output + } + return params, device_options + + +def _updater_raise(op, input_types, output_types): + raise RuntimeError( + "Failed to apply updater for op {} given input_types {} and" + " output_types {}".format(op, input_types, output_types) + ) + + +def _generic_status_identifier( + predict_net: caffe2_pb2.NetDef, + status_updater: Callable, + known_status: Dict[Tuple[str, int], Any], +) -> Dict[Tuple[str, int], Any]: + """ + Statically infer the status of each blob, the status can be such as device type + (CPU/GPU), layout (NCHW/NHWC), data type (float32/int8), etc. "Blob" here + is versioned blob (Tuple[str, int]) in the format compatible with ssa. + Inputs: + predict_net: the caffe2 network + status_updater: a callable, given an op and the status of its input/output, + it returns the updated status of input/output. `None` is used for + representing unknown status. + known_status: a dict containing known status, used as initialization. + Outputs: + A dict mapping from versioned blob to its status + """ + ssa, versions = core.get_ssa(predict_net) + versioned_ext_input = [(b, 0) for b in predict_net.external_input] + versioned_ext_output = [(b, versions[b]) for b in predict_net.external_output] + all_versioned_blobs = set().union(*[set(x[0] + x[1]) for x in ssa]) + + allowed_vbs = all_versioned_blobs.union(versioned_ext_input).union(versioned_ext_output) + assert all(k in allowed_vbs for k in known_status) + assert all(v is not None for v in known_status.values()) + _known_status = copy.deepcopy(known_status) + + def _check_and_update(key, value): + assert value is not None + if key in _known_status: + if not _known_status[key] == value: + raise RuntimeError( + "Confilict status for {}, existing status {}, new status {}".format( + key, _known_status[key], value + ) + ) + _known_status[key] = value + + def _update_i(op, ssa_i): + versioned_inputs = ssa_i[0] + versioned_outputs = ssa_i[1] + + inputs_status = [_known_status.get(b, None) for b in versioned_inputs] + outputs_status = [_known_status.get(b, None) for b in versioned_outputs] + + new_inputs_status, new_outputs_status = status_updater(op, inputs_status, outputs_status) + + for versioned_blob, status in zip( + versioned_inputs + versioned_outputs, new_inputs_status + new_outputs_status + ): + if status is not None: + _check_and_update(versioned_blob, status) + + for op, ssa_i in zip(predict_net.op, ssa): + _update_i(op, ssa_i) + for op, ssa_i in zip(reversed(predict_net.op), reversed(ssa)): + _update_i(op, ssa_i) + + # NOTE: This strictly checks all the blob from predict_net must be assgined + # a known status. However sometimes it's impossible (eg. having deadend op), + # we may relax this constraint if + for k in all_versioned_blobs: + if k not in _known_status: + raise NotImplementedError( + "Can not infer the status for {}. Currently only support the case where" + " a single forward and backward pass can identify status for all blobs.".format(k) + ) + + return _known_status + + +def infer_device_type( + predict_net: caffe2_pb2.NetDef, + known_status: Dict[Tuple[str, int], Any], + device_name_style: str = "caffe2", +) -> Dict[Tuple[str, int], str]: + """ Return the device type ("cpu" or "gpu"/"cuda") of each (versioned) blob """ + + assert device_name_style in ["caffe2", "pytorch"] + _CPU_STR = "cpu" + _GPU_STR = "gpu" if device_name_style == "caffe2" else "cuda" + + def _copy_cpu_to_gpu_updater(op, input_types, output_types): + if input_types[0] == _GPU_STR or output_types[0] == _CPU_STR: + _updater_raise(op, input_types, output_types) + return ([_CPU_STR], [_GPU_STR]) + + def _copy_gpu_to_cpu_updater(op, input_types, output_types): + if input_types[0] == _CPU_STR or output_types[0] == _GPU_STR: + _updater_raise(op, input_types, output_types) + return ([_GPU_STR], [_CPU_STR]) + + def _other_ops_updater(op, input_types, output_types): + non_none_types = [x for x in input_types + output_types if x is not None] + if len(non_none_types) > 0: + the_type = non_none_types[0] + if not all(x == the_type for x in non_none_types): + _updater_raise(op, input_types, output_types) + else: + the_type = None + return ([the_type for _ in op.input], [the_type for _ in op.output]) + + def _device_updater(op, *args, **kwargs): + return { + "CopyCPUToGPU": _copy_cpu_to_gpu_updater, + "CopyGPUToCPU": _copy_gpu_to_cpu_updater, + }.get(op.type, _other_ops_updater)(op, *args, **kwargs) + + return _generic_status_identifier(predict_net, _device_updater, known_status) + + +# ==== torch/utils_caffe2/vis.py =============================================== + + +def _modify_blob_names(ops, blob_rename_f): + ret = [] + + def _replace_list(blob_list, replaced_list): + del blob_list[:] + blob_list.extend(replaced_list) + + for x in ops: + cur = copy.deepcopy(x) + _replace_list(cur.input, list(map(blob_rename_f, cur.input))) + _replace_list(cur.output, list(map(blob_rename_f, cur.output))) + ret.append(cur) + + return ret + + +def _rename_blob(name, blob_sizes, blob_ranges): + def _list_to_str(bsize): + ret = ", ".join([str(x) for x in bsize]) + ret = "[" + ret + "]" + return ret + + ret = name + if blob_sizes is not None and name in blob_sizes: + ret += "\n" + _list_to_str(blob_sizes[name]) + if blob_ranges is not None and name in blob_ranges: + ret += "\n" + _list_to_str(blob_ranges[name]) + + return ret + + +# graph_name could not contain word 'graph' +def save_graph(net, file_name, graph_name="net", op_only=True, blob_sizes=None, blob_ranges=None): + blob_rename_f = functools.partial(_rename_blob, blob_sizes=blob_sizes, blob_ranges=blob_ranges) + return save_graph_base(net, file_name, graph_name, op_only, blob_rename_f) + + +def save_graph_base(net, file_name, graph_name="net", op_only=True, blob_rename_func=None): + graph = None + ops = net.op + if blob_rename_func is not None: + ops = _modify_blob_names(ops, blob_rename_func) + if not op_only: + graph = net_drawer.GetPydotGraph(ops, graph_name, rankdir="TB") + else: + graph = net_drawer.GetPydotGraphMinimal( + ops, graph_name, rankdir="TB", minimal_dependency=True + ) + + try: + par_dir = os.path.dirname(file_name) + if not os.path.exists(par_dir): + os.makedirs(par_dir) + + format = os.path.splitext(os.path.basename(file_name))[-1] + if format == ".png": + graph.write_png(file_name) + elif format == ".pdf": + graph.write_pdf(file_name) + elif format == ".svg": + graph.write_svg(file_name) + else: + print("Incorrect format {}".format(format)) + except Exception as e: + print("Error when writing graph to image {}".format(e)) + + return graph + + +# ==== torch/utils_toffee/aten_to_caffe2.py ==================================== + + +def group_norm_replace_aten_with_caffe2(predict_net: caffe2_pb2.NetDef): + """ + For ONNX exported model, GroupNorm will be represented as ATen op, + this can be a drop in replacement from ATen to GroupNorm + """ + count = 0 + for op in predict_net.op: + if op.type == "ATen": + op_name = get_pb_arg_vals(op, "operator", None) # return byte in py3 + if op_name and op_name.decode() == "group_norm": + op.arg.remove(get_pb_arg(op, "operator")) + + if get_pb_arg_vali(op, "cudnn_enabled", None): + op.arg.remove(get_pb_arg(op, "cudnn_enabled")) + + num_groups = get_pb_arg_vali(op, "num_groups", None) + if num_groups is not None: + op.arg.remove(get_pb_arg(op, "num_groups")) + check_set_pb_arg(op, "group", "i", num_groups) + + op.type = "GroupNorm" + count += 1 + if count > 1: + logger.info("Replaced {} ATen operator to GroupNormOp".format(count)) + + +# ==== torch/utils_toffee/alias.py ============================================= + + +def alias(x, name, is_backward=False): + if not torch.onnx.is_in_onnx_export(): + return x + assert isinstance(x, torch.Tensor) + return torch.ops._caffe2.AliasWithName(x, name, is_backward=is_backward) + + +def fuse_alias_placeholder(predict_net, init_net): + """ Remove AliasWithName placeholder and rename the input/output of it """ + # First we finish all the re-naming + for i, op in enumerate(predict_net.op): + if op.type == "AliasWithName": + assert len(op.input) == 1 + assert len(op.output) == 1 + name = get_pb_arg_vals(op, "name", None).decode() + is_backward = bool(get_pb_arg_vali(op, "is_backward", 0)) + rename_op_input(predict_net, init_net, i, 0, name, from_producer=is_backward) + rename_op_output(predict_net, i, 0, name) + + # Remove AliasWithName, should be very safe since it's a non-op + new_ops = [] + for op in predict_net.op: + if op.type != "AliasWithName": + new_ops.append(op) + else: + # safety check + assert op.input == op.output + assert op.input[0] == op.arg[0].s.decode() + del predict_net.op[:] + predict_net.op.extend(new_ops) + + +# ==== torch/utils_caffe2/graph_transform.py =================================== + + +class IllegalGraphTransformError(ValueError): + """ When a graph transform function call can't be executed. """ + + +def _rename_versioned_blob_in_proto( + proto: caffe2_pb2.NetDef, + old_name: str, + new_name: str, + version: int, + ssa: List[Tuple[List[Tuple[str, int]], List[Tuple[str, int]]]], + start_versions: Dict[str, int], + end_versions: Dict[str, int], +): + """ In given proto, rename all blobs with matched version """ + # Operater list + for op, i_th_ssa in zip(proto.op, ssa): + versioned_inputs, versioned_outputs = i_th_ssa + for i in range(len(op.input)): + if versioned_inputs[i] == (old_name, version): + op.input[i] = new_name + for i in range(len(op.output)): + if versioned_outputs[i] == (old_name, version): + op.output[i] = new_name + # external_input + if start_versions.get(old_name, 0) == version: + for i in range(len(proto.external_input)): + if proto.external_input[i] == old_name: + proto.external_input[i] = new_name + # external_output + if end_versions.get(old_name, 0) == version: + for i in range(len(proto.external_output)): + if proto.external_output[i] == old_name: + proto.external_output[i] = new_name + + +def rename_op_input( + predict_net: caffe2_pb2.NetDef, + init_net: caffe2_pb2.NetDef, + op_id: int, + input_id: int, + new_name: str, + from_producer: bool = False, +): + """ + Rename the op_id-th operator in predict_net, change it's input_id-th input's + name to the new_name. It also does automatic re-route and change + external_input and init_net if necessary. + - It requires the input is only consumed by this op. + - This function modifies predict_net and init_net in-place. + - When from_producer is enable, this also updates other operators that consumes + the same input. Be cautious because may trigger unintended behavior. + """ + assert isinstance(predict_net, caffe2_pb2.NetDef) + assert isinstance(init_net, caffe2_pb2.NetDef) + + init_net_ssa, init_net_versions = core.get_ssa(init_net) + predict_net_ssa, predict_net_versions = core.get_ssa( + predict_net, copy.deepcopy(init_net_versions) + ) + + versioned_inputs, versioned_outputs = predict_net_ssa[op_id] + old_name, version = versioned_inputs[input_id] + + if from_producer: + producer_map = get_producer_map(predict_net_ssa) + if not (old_name, version) in producer_map: + raise NotImplementedError( + "Can't find producer, the input {} is probably from" + " init_net, this is not supported yet.".format(old_name) + ) + producer = producer_map[(old_name, version)] + rename_op_output(predict_net, producer[0], producer[1], new_name) + return + + def contain_targets(op_ssa): + return (old_name, version) in op_ssa[0] + + is_consumer = [contain_targets(op_ssa) for op_ssa in predict_net_ssa] + if sum(is_consumer) > 1: + raise IllegalGraphTransformError( + ( + "Input '{}' of operator(#{}) are consumed by other ops, please use" + + " rename_op_output on the producer instead. Offending op: \n{}" + ).format(old_name, op_id, predict_net.op[op_id]) + ) + + # update init_net + _rename_versioned_blob_in_proto( + init_net, old_name, new_name, version, init_net_ssa, {}, init_net_versions + ) + # update predict_net + _rename_versioned_blob_in_proto( + predict_net, + old_name, + new_name, + version, + predict_net_ssa, + init_net_versions, + predict_net_versions, + ) + + +def rename_op_output(predict_net: caffe2_pb2.NetDef, op_id: int, output_id: int, new_name: str): + """ + Rename the op_id-th operator in predict_net, change it's output_id-th input's + name to the new_name. It also does automatic re-route and change + external_output and if necessary. + - It allows multiple consumers of its output. + - This function modifies predict_net in-place, doesn't need init_net. + """ + assert isinstance(predict_net, caffe2_pb2.NetDef) + + ssa, blob_versions = core.get_ssa(predict_net) + + versioned_inputs, versioned_outputs = ssa[op_id] + old_name, version = versioned_outputs[output_id] + + # update predict_net + _rename_versioned_blob_in_proto( + predict_net, old_name, new_name, version, ssa, {}, blob_versions + ) + + +def get_sub_graph_external_input_output( + predict_net: caffe2_pb2.NetDef, sub_graph_op_indices: List[int] +) -> Tuple[List[Tuple[str, int]], List[Tuple[str, int]]]: + """ + Return the list of external input/output of sub-graph, + each element is tuple of the name and corresponding version in predict_net. + + external input/output is defined the same way as caffe2 NetDef. + """ + ssa, versions = core.get_ssa(predict_net) + + all_inputs = [] + all_outputs = [] + for op_id in sub_graph_op_indices: + all_inputs += [inp for inp in ssa[op_id][0] if inp not in all_inputs] + all_outputs += list(ssa[op_id][1]) # ssa output won't repeat + + # for versioned blobs, external inputs are just those blob in all_inputs + # but not in all_outputs + ext_inputs = [inp for inp in all_inputs if inp not in all_outputs] + + # external outputs are essentially outputs of this subgraph that are used + # outside of this sub-graph (including predict_net.external_output) + all_other_inputs = sum( + (ssa[i][0] for i in range(len(ssa)) if i not in sub_graph_op_indices), + [(outp, versions[outp]) for outp in predict_net.external_output], + ) + ext_outputs = [outp for outp in all_outputs if outp in set(all_other_inputs)] + + return ext_inputs, ext_outputs + + +class DiGraph: + """ A DAG representation of caffe2 graph, each vertice is a versioned blob. """ + + def __init__(self): + self.vertices = set() + self.graph = collections.defaultdict(list) + + def add_edge(self, u, v): + self.graph[u].append(v) + self.vertices.add(u) + self.vertices.add(v) + + # grab from https://www.geeksforgeeks.org/find-paths-given-source-destination/ + def get_all_paths(self, s, d): + visited = {k: False for k in self.vertices} + path = [] + all_paths = [] + + def _get_all_paths_util(graph, u, d, visited, path): + visited[u] = True + path.append(u) + if u == d: + all_paths.append(copy.deepcopy(path)) + else: + for i in graph[u]: + if not visited[i]: + _get_all_paths_util(graph, i, d, visited, path) + path.pop() + visited[u] = False + + _get_all_paths_util(self.graph, s, d, visited, path) + return all_paths + + @staticmethod + def from_ssa(ssa): + graph = DiGraph() + for op_id in range(len(ssa)): + for inp in ssa[op_id][0]: + for outp in ssa[op_id][1]: + graph.add_edge(inp, outp) + return graph + + +def _get_dependency_chain(ssa, versioned_target, versioned_source): + """ + Return the index list of relevant operator to produce target blob from source blob, + if there's no dependency, return empty list. + """ + + # finding all paths between nodes can be O(N!), thus we can only search + # in the subgraph using the op starting from the first consumer of source blob + # to the producer of the target blob. + consumer_map = get_consumer_map(ssa) + producer_map = get_producer_map(ssa) + start_op = min(x[0] for x in consumer_map[versioned_source]) - 15 + end_op = ( + producer_map[versioned_target][0] + 15 if versioned_target in producer_map else start_op + ) + sub_graph_ssa = ssa[start_op : end_op + 1] + if len(sub_graph_ssa) > 30: + logger.warning( + "Subgraph bebetween {} and {} is large (from op#{} to op#{}), it" + " might take non-trival time to find all paths between them.".format( + versioned_source, versioned_target, start_op, end_op + ) + ) + + dag = DiGraph.from_ssa(sub_graph_ssa) + paths = dag.get_all_paths(versioned_source, versioned_target) # include two ends + ops_in_paths = [[producer_map[blob][0] for blob in path[1:]] for path in paths] + return sorted(set().union(*[set(ops) for ops in ops_in_paths])) + + +def identify_reshape_sub_graph(predict_net: caffe2_pb2.NetDef) -> List[List[int]]: + """ + Idenfity the reshape sub-graph in a protobuf. + The reshape sub-graph is defined as matching the following pattern: + + (input_blob) -> Op_1 -> ... -> Op_N -> (new_shape) -─┐ + └-------------------------------------------> Reshape -> (output_blob) + + Return: + List of sub-graphs, each sub-graph is represented as a list of indices + of the relavent ops, [Op_1, Op_2, ..., Op_N, Reshape] + """ + + ssa, _ = core.get_ssa(predict_net) + + ret = [] + for i, op in enumerate(predict_net.op): + if op.type == "Reshape": + assert len(op.input) == 2 + input_ssa = ssa[i][0] + data_source = input_ssa[0] + shape_source = input_ssa[1] + op_indices = _get_dependency_chain(ssa, shape_source, data_source) + ret.append(op_indices + [i]) + return ret + + +def remove_reshape_for_fc(predict_net, params): + """ + In PyTorch nn.Linear has to take 2D tensor, this often leads to reshape + a 4D tensor to 2D by calling .view(). However this (dynamic) reshaping + doesn't work well with ONNX and Int8 tools, and cause using extra + ops (eg. ExpandDims) that might not be available on mobile. + Luckily Caffe2 supports 4D tensor for FC, so we can remove those reshape + after exporting ONNX model. + """ + from caffe2.python import core + + # find all reshape sub-graph that can be removed, which is now all Reshape + # sub-graph whose output is only consumed by FC. + # TODO: to make it safer, we may need the actually value to better determine + # if a Reshape before FC is removable. + reshape_sub_graphs = identify_reshape_sub_graph(predict_net) + sub_graphs_to_remove = [] + for reshape_sub_graph in reshape_sub_graphs: + reshape_op_id = reshape_sub_graph[-1] + assert predict_net.op[reshape_op_id].type == "Reshape" + ssa, _ = core.get_ssa(predict_net) + reshape_output = ssa[reshape_op_id][1][0] + consumers = [i for i in range(len(ssa)) if reshape_output in ssa[i][0]] + if all(predict_net.op[consumer].type == "FC" for consumer in consumers): + # safety check if the sub-graph is isolated, for this reshape sub-graph, + # it means it has one non-param external input and one external output. + ext_inputs, ext_outputs = get_sub_graph_external_input_output( + predict_net, reshape_sub_graph + ) + non_params_ext_inputs = [inp for inp in ext_inputs if inp[1] != 0] + if len(non_params_ext_inputs) == 1 and len(ext_outputs) == 1: + sub_graphs_to_remove.append(reshape_sub_graph) + + # perform removing subgraph by: + # 1: rename the Reshape's output to its input, then the graph can be + # seen as in-place itentify, meaning whose external input/output are the same. + # 2: simply remove those ops. + remove_op_ids = [] + params_to_remove = [] + for sub_graph in sub_graphs_to_remove: + logger.info( + "Remove Reshape sub-graph:\n{}".format( + "".join(["(#{:>4})\n{}".format(i, predict_net.op[i]) for i in sub_graph]) + ) + ) + reshape_op_id = sub_graph[-1] + new_reshap_output = predict_net.op[reshape_op_id].input[0] + rename_op_output(predict_net, reshape_op_id, 0, new_reshap_output) + ext_inputs, ext_outputs = get_sub_graph_external_input_output(predict_net, sub_graph) + non_params_ext_inputs = [inp for inp in ext_inputs if inp[1] != 0] + params_ext_inputs = [inp for inp in ext_inputs if inp[1] == 0] + assert len(non_params_ext_inputs) == 1 and len(ext_outputs) == 1 + assert ext_outputs[0][0] == non_params_ext_inputs[0][0] + assert ext_outputs[0][1] == non_params_ext_inputs[0][1] + 1 + remove_op_ids.extend(sub_graph) + params_to_remove.extend(params_ext_inputs) + + predict_net = copy.deepcopy(predict_net) + new_ops = [op for i, op in enumerate(predict_net.op) if i not in remove_op_ids] + del predict_net.op[:] + predict_net.op.extend(new_ops) + for versioned_params in params_to_remove: + name = versioned_params[0] + logger.info("Remove params: {} from init_net and predict_net.external_input".format(name)) + del params[name] + predict_net.external_input.remove(name) + + return predict_net, params + + +def fuse_copy_between_cpu_and_gpu(predict_net: caffe2_pb2.NetDef): + """ + In-place fuse extra copy ops between cpu/gpu for the following case: + a -CopyAToB-> b -CopyBToA> c1 -NextOp1-> d1 + -CopyBToA> c2 -NextOp2-> d2 + The fused network will look like: + a -NextOp1-> d1 + -NextOp2-> d2 + """ + + _COPY_OPS = ["CopyCPUToGPU", "CopyGPUToCPU"] + + def _fuse_once(predict_net): + ssa, blob_versions = core.get_ssa(predict_net) + consumer_map = get_consumer_map(ssa) + versioned_external_output = [ + (name, blob_versions[name]) for name in predict_net.external_output + ] + + for op_id, op in enumerate(predict_net.op): + if op.type in _COPY_OPS: + fw_copy_versioned_output = ssa[op_id][1][0] + consumer_ids = [x[0] for x in consumer_map[fw_copy_versioned_output]] + reverse_op_type = _COPY_OPS[1 - _COPY_OPS.index(op.type)] + + is_fusable = ( + len(consumer_ids) > 0 + and fw_copy_versioned_output not in versioned_external_output + and all( + predict_net.op[_op_id].type == reverse_op_type + and ssa[_op_id][1][0] not in versioned_external_output + for _op_id in consumer_ids + ) + ) + + if is_fusable: + for rv_copy_op_id in consumer_ids: + # making each NextOp uses "a" directly and removing Copy ops + rs_copy_versioned_output = ssa[rv_copy_op_id][1][0] + next_op_id, inp_id = consumer_map[rs_copy_versioned_output][0] + predict_net.op[next_op_id].input[inp_id] = op.input[0] + # remove CopyOps + new_ops = [ + op + for i, op in enumerate(predict_net.op) + if i != op_id and i not in consumer_ids + ] + del predict_net.op[:] + predict_net.op.extend(new_ops) + return True + + return False + + # _fuse_once returns False is nothing can be fused + while _fuse_once(predict_net): + pass + + +def remove_dead_end_ops(net_def: caffe2_pb2.NetDef): + """ remove ops if its output is not used or not in external_output """ + ssa, versions = core.get_ssa(net_def) + versioned_external_output = [(name, versions[name]) for name in net_def.external_output] + consumer_map = get_consumer_map(ssa) + removed_op_ids = set() + + def _is_dead_end(versioned_blob): + return not ( + versioned_blob in versioned_external_output + or ( + len(consumer_map[versioned_blob]) > 0 + and all(x[0] not in removed_op_ids for x in consumer_map[versioned_blob]) + ) + ) + + for i, ssa_i in reversed(list(enumerate(ssa))): + versioned_outputs = ssa_i[1] + if all(_is_dead_end(outp) for outp in versioned_outputs): + removed_op_ids.add(i) + + # simply removing those deadend ops should have no effect to external_output + new_ops = [op for i, op in enumerate(net_def.op) if i not in removed_op_ids] + del net_def.op[:] + net_def.op.extend(new_ops) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2753739a03659dff5bc5b87f8c8417056d319842 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .batch_norm import FrozenBatchNorm2d, get_norm, NaiveSyncBatchNorm +from .deform_conv import DeformConv, ModulatedDeformConv +from .mask_ops import paste_masks_in_image +from .nms import batched_nms, batched_nms_rotated, nms, nms_rotated +from .roi_align import ROIAlign, roi_align +from .roi_align_rotated import ROIAlignRotated, roi_align_rotated +from .shape_spec import ShapeSpec +from .wrappers import BatchNorm2d, Conv2d, ConvTranspose2d, cat, interpolate, Linear +from .blocks import CNNBlockBase + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/batch_norm.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/batch_norm.py new file mode 100644 index 0000000000000000000000000000000000000000..1339c6eaedfbc65c9604043234b738382d07fd40 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/batch_norm.py @@ -0,0 +1,242 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import torch +import torch.distributed as dist +from torch import nn +from torch.autograd.function import Function +from torch.nn import functional as F + +from detectron2.utils import comm + +from .wrappers import BatchNorm2d + +TORCH_VERSION = tuple(int(x) for x in torch.__version__.split(".")[:2]) + + +class FrozenBatchNorm2d(nn.Module): + """ + BatchNorm2d where the batch statistics and the affine parameters are fixed. + + It contains non-trainable buffers called + "weight" and "bias", "running_mean", "running_var", + initialized to perform identity transformation. + + The pre-trained backbone models from Caffe2 only contain "weight" and "bias", + which are computed from the original four parameters of BN. + The affine transform `x * weight + bias` will perform the equivalent + computation of `(x - running_mean) / sqrt(running_var) * weight + bias`. + When loading a backbone model from Caffe2, "running_mean" and "running_var" + will be left unchanged as identity transformation. + + Other pre-trained backbone models may contain all 4 parameters. + + The forward is implemented by `F.batch_norm(..., training=False)`. + """ + + _version = 3 + + def __init__(self, num_features, eps=1e-5): + super().__init__() + self.num_features = num_features + self.eps = eps + self.register_buffer("weight", torch.ones(num_features)) + self.register_buffer("bias", torch.zeros(num_features)) + self.register_buffer("running_mean", torch.zeros(num_features)) + self.register_buffer("running_var", torch.ones(num_features) - eps) + + def forward(self, x): + if x.requires_grad: + # When gradients are needed, F.batch_norm will use extra memory + # because its backward op computes gradients for weight/bias as well. + scale = self.weight * (self.running_var + self.eps).rsqrt() + bias = self.bias - self.running_mean * scale + scale = scale.reshape(1, -1, 1, 1) + bias = bias.reshape(1, -1, 1, 1) + return x * scale + bias + else: + # When gradients are not needed, F.batch_norm is a single fused op + # and provide more optimization opportunities. + return F.batch_norm( + x, + self.running_mean, + self.running_var, + self.weight, + self.bias, + training=False, + eps=self.eps, + ) + + def _load_from_state_dict( + self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs + ): + version = local_metadata.get("version", None) + + if version is None or version < 2: + # No running_mean/var in early versions + # This will silent the warnings + if prefix + "running_mean" not in state_dict: + state_dict[prefix + "running_mean"] = torch.zeros_like(self.running_mean) + if prefix + "running_var" not in state_dict: + state_dict[prefix + "running_var"] = torch.ones_like(self.running_var) + + if version is not None and version < 3: + logger = logging.getLogger(__name__) + logger.info("FrozenBatchNorm {} is upgraded to version 3.".format(prefix.rstrip("."))) + # In version < 3, running_var are used without +eps. + state_dict[prefix + "running_var"] -= self.eps + + super()._load_from_state_dict( + state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs + ) + + def __repr__(self): + return "FrozenBatchNorm2d(num_features={}, eps={})".format(self.num_features, self.eps) + + @classmethod + def convert_frozen_batchnorm(cls, module): + """ + Convert BatchNorm/SyncBatchNorm in module into FrozenBatchNorm. + + Args: + module (torch.nn.Module): + + Returns: + If module is BatchNorm/SyncBatchNorm, returns a new module. + Otherwise, in-place convert module and return it. + + Similar to convert_sync_batchnorm in + https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/batchnorm.py + """ + bn_module = nn.modules.batchnorm + bn_module = (bn_module.BatchNorm2d, bn_module.SyncBatchNorm) + res = module + if isinstance(module, bn_module): + res = cls(module.num_features) + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + else: + for name, child in module.named_children(): + new_child = cls.convert_frozen_batchnorm(child) + if new_child is not child: + res.add_module(name, new_child) + return res + + +def get_norm(norm, out_channels): + """ + Args: + norm (str or callable): either one of BN, SyncBN, FrozenBN, GN; + or a callable that takes a channel number and returns + the normalization layer as a nn.Module. + + Returns: + nn.Module or None: the normalization layer + """ + if isinstance(norm, str): + if len(norm) == 0: + return None + norm = { + "BN": BatchNorm2d, + # Fixed in https://github.com/pytorch/pytorch/pull/36382 + "SyncBN": NaiveSyncBatchNorm if TORCH_VERSION <= (1, 5) else nn.SyncBatchNorm, + "FrozenBN": FrozenBatchNorm2d, + "GN": lambda channels: nn.GroupNorm(32, channels), + # for debugging: + "nnSyncBN": nn.SyncBatchNorm, + "naiveSyncBN": NaiveSyncBatchNorm, + }[norm] + return norm(out_channels) + + +class AllReduce(Function): + @staticmethod + def forward(ctx, input): + input_list = [torch.zeros_like(input) for k in range(dist.get_world_size())] + # Use allgather instead of allreduce since I don't trust in-place operations .. + dist.all_gather(input_list, input, async_op=False) + inputs = torch.stack(input_list, dim=0) + return torch.sum(inputs, dim=0) + + @staticmethod + def backward(ctx, grad_output): + dist.all_reduce(grad_output, async_op=False) + return grad_output + + +class NaiveSyncBatchNorm(BatchNorm2d): + """ + In PyTorch<=1.5, `nn.SyncBatchNorm` has incorrect gradient + when the batch size on each worker is different. + (e.g., when scale augmentation is used, or when it is applied to mask head). + + This is a slower but correct alternative to `nn.SyncBatchNorm`. + + Note: + There isn't a single definition of Sync BatchNorm. + + When ``stats_mode==""``, this module computes overall statistics by using + statistics of each worker with equal weight. The result is true statistics + of all samples (as if they are all on one worker) only when all workers + have the same (N, H, W). This mode does not support inputs with zero batch size. + + When ``stats_mode=="N"``, this module computes overall statistics by weighting + the statistics of each worker by their ``N``. The result is true statistics + of all samples (as if they are all on one worker) only when all workers + have the same (H, W). It is slower than ``stats_mode==""``. + + Even though the result of this module may not be the true statistics of all samples, + it may still be reasonable because it might be preferrable to assign equal weights + to all workers, regardless of their (H, W) dimension, instead of putting larger weight + on larger images. From preliminary experiments, little difference is found between such + a simplified implementation and an accurate computation of overall mean & variance. + """ + + def __init__(self, *args, stats_mode="", **kwargs): + super().__init__(*args, **kwargs) + assert stats_mode in ["", "N"] + self._stats_mode = stats_mode + + def forward(self, input): + if comm.get_world_size() == 1 or not self.training: + return super().forward(input) + + B, C = input.shape[0], input.shape[1] + + mean = torch.mean(input, dim=[0, 2, 3]) + meansqr = torch.mean(input * input, dim=[0, 2, 3]) + + if self._stats_mode == "": + assert B > 0, 'SyncBatchNorm(stats_mode="") does not support zero batch size.' + vec = torch.cat([mean, meansqr], dim=0) + vec = AllReduce.apply(vec) * (1.0 / dist.get_world_size()) + mean, meansqr = torch.split(vec, C) + momentum = self.momentum + else: + if B == 0: + vec = torch.zeros([2 * C + 1], device=mean.device, dtype=mean.dtype) + vec = vec + input.sum() # make sure there is gradient w.r.t input + else: + vec = torch.cat( + [mean, meansqr, torch.ones([1], device=mean.device, dtype=mean.dtype)], dim=0 + ) + vec = AllReduce.apply(vec * B) + + total_batch = vec[-1].detach() + momentum = total_batch.clamp(max=1) * self.momentum # no update if total_batch is 0 + total_batch = torch.max(total_batch, torch.ones_like(total_batch)) # avoid div-by-zero + mean, meansqr, _ = torch.split(vec / total_batch, C) + + var = meansqr - mean * mean + invstd = torch.rsqrt(var + self.eps) + scale = self.weight * invstd + bias = self.bias - mean * scale + scale = scale.reshape(1, -1, 1, 1) + bias = bias.reshape(1, -1, 1, 1) + + self.running_mean += momentum * (mean.detach() - self.running_mean) + self.running_var += momentum * (var.detach() - self.running_var) + return input * scale + bias diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/blocks.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..1d06fec22e472febbc960c49f747acddd2ab7208 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/blocks.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from torch import nn + +from .batch_norm import FrozenBatchNorm2d + + +class CNNBlockBase(nn.Module): + """ + A CNN block is assumed to have input channels, output channels and a stride. + The input and output of `forward()` method must be NCHW tensors. + The method can perform arbitrary computation but must match the given + channels and stride specification. + + Attribute: + in_channels (int): + out_channels (int): + stride (int): + """ + + def __init__(self, in_channels, out_channels, stride): + """ + The `__init__` method of any subclass should also contain these arguments. + + Args: + in_channels (int): + out_channels (int): + stride (int): + """ + super().__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.stride = stride + + def freeze(self): + """ + Make this block not trainable. + This method sets all parameters to `requires_grad=False`, + and convert all BatchNorm layers to FrozenBatchNorm + + Returns: + the block itself + """ + for p in self.parameters(): + p.requires_grad = False + FrozenBatchNorm2d.convert_frozen_batchnorm(self) + return self diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/README.md new file mode 100644 index 0000000000000000000000000000000000000000..778ed3da0bae89820831bcd8a72ff7b9cad8d4dd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/README.md @@ -0,0 +1,7 @@ + + +To add a new Op: + +1. Create a new directory +2. Implement new ops there +3. Delcare its Python interface in `vision.cpp`. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlign/ROIAlign.h b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlign/ROIAlign.h new file mode 100644 index 0000000000000000000000000000000000000000..2d95eac6e29d5e5624afbc6c545776d78ebc709c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlign/ROIAlign.h @@ -0,0 +1,130 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#pragma once +#include + +namespace detectron2 { + +at::Tensor ROIAlign_forward_cpu( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + bool aligned); + +at::Tensor ROIAlign_backward_cpu( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio, + bool aligned); + +#ifdef WITH_CUDA +at::Tensor ROIAlign_forward_cuda( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + bool aligned); + +at::Tensor ROIAlign_backward_cuda( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio, + bool aligned); +#endif + +// Interface for Python +inline at::Tensor ROIAlign_forward( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + bool aligned) { + if (input.is_cuda()) { +#ifdef WITH_CUDA + return ROIAlign_forward_cuda( + input, + rois, + spatial_scale, + pooled_height, + pooled_width, + sampling_ratio, + aligned); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + return ROIAlign_forward_cpu( + input, + rois, + spatial_scale, + pooled_height, + pooled_width, + sampling_ratio, + aligned); +} + +inline at::Tensor ROIAlign_backward( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio, + bool aligned) { + if (grad.is_cuda()) { +#ifdef WITH_CUDA + return ROIAlign_backward_cuda( + grad, + rois, + spatial_scale, + pooled_height, + pooled_width, + batch_size, + channels, + height, + width, + sampling_ratio, + aligned); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + return ROIAlign_backward_cpu( + grad, + rois, + spatial_scale, + pooled_height, + pooled_width, + batch_size, + channels, + height, + width, + sampling_ratio, + aligned); +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlign/ROIAlign_cpu.cpp b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlign/ROIAlign_cpu.cpp new file mode 100644 index 0000000000000000000000000000000000000000..52fc83f8140b29de7b2ad3cb490b8cb672959e16 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlign/ROIAlign_cpu.cpp @@ -0,0 +1,508 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#include +#include "ROIAlign.h" + +namespace { + +// implementation taken from Caffe2 +template +struct PreCalc { + int pos1; + int pos2; + int pos3; + int pos4; + T w1; + T w2; + T w3; + T w4; +}; + +template +void pre_calc_for_bilinear_interpolate( + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int iy_upper, + const int ix_upper, + T roi_start_h, + T roi_start_w, + T bin_size_h, + T bin_size_w, + int roi_bin_grid_h, + int roi_bin_grid_w, + std::vector>& pre_calc) { + int pre_calc_index = 0; + for (int ph = 0; ph < pooled_height; ph++) { + for (int pw = 0; pw < pooled_width; pw++) { + for (int iy = 0; iy < iy_upper; iy++) { + const T yy = roi_start_h + ph * bin_size_h + + static_cast(iy + .5f) * bin_size_h / + static_cast(roi_bin_grid_h); // e.g., 0.5, 1.5 + for (int ix = 0; ix < ix_upper; ix++) { + const T xx = roi_start_w + pw * bin_size_w + + static_cast(ix + .5f) * bin_size_w / + static_cast(roi_bin_grid_w); + + T x = xx; + T y = yy; + // deal with: inverse elements are out of feature map boundary + if (y < -1.0 || y > height || x < -1.0 || x > width) { + // empty + PreCalc pc; + pc.pos1 = 0; + pc.pos2 = 0; + pc.pos3 = 0; + pc.pos4 = 0; + pc.w1 = 0; + pc.w2 = 0; + pc.w3 = 0; + pc.w4 = 0; + pre_calc[pre_calc_index] = pc; + pre_calc_index += 1; + continue; + } + + if (y <= 0) { + y = 0; + } + if (x <= 0) { + x = 0; + } + + int y_low = (int)y; + int x_low = (int)x; + int y_high; + int x_high; + + if (y_low >= height - 1) { + y_high = y_low = height - 1; + y = (T)y_low; + } else { + y_high = y_low + 1; + } + + if (x_low >= width - 1) { + x_high = x_low = width - 1; + x = (T)x_low; + } else { + x_high = x_low + 1; + } + + T ly = y - y_low; + T lx = x - x_low; + T hy = 1. - ly, hx = 1. - lx; + T w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx; + + // save weights and indices + PreCalc pc; + pc.pos1 = y_low * width + x_low; + pc.pos2 = y_low * width + x_high; + pc.pos3 = y_high * width + x_low; + pc.pos4 = y_high * width + x_high; + pc.w1 = w1; + pc.w2 = w2; + pc.w3 = w3; + pc.w4 = w4; + pre_calc[pre_calc_index] = pc; + + pre_calc_index += 1; + } + } + } + } +} + +template +void ROIAlignForward( + const int nthreads, + const T* input, + const T& spatial_scale, + const int channels, + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + const T* rois, + T* output, + bool aligned) { + int n_rois = nthreads / channels / pooled_width / pooled_height; + // (n, c, ph, pw) is an element in the pooled output + // can be parallelized using omp + // #pragma omp parallel for num_threads(32) + for (int n = 0; n < n_rois; n++) { + int index_n = n * channels * pooled_width * pooled_height; + + const T* offset_rois = rois + n * 5; + int roi_batch_ind = offset_rois[0]; + + // Do not use rounding; this implementation detail is critical + T offset = aligned ? (T)0.5 : (T)0.0; + T roi_start_w = offset_rois[1] * spatial_scale - offset; + T roi_start_h = offset_rois[2] * spatial_scale - offset; + T roi_end_w = offset_rois[3] * spatial_scale - offset; + T roi_end_h = offset_rois[4] * spatial_scale - offset; + + T roi_width = roi_end_w - roi_start_w; + T roi_height = roi_end_h - roi_start_h; + if (aligned) { + AT_ASSERTM( + roi_width >= 0 && roi_height >= 0, + "ROIs in ROIAlign cannot have non-negative size!"); + } else { // for backward-compatibility only + roi_width = std::max(roi_width, (T)1.); + roi_height = std::max(roi_height, (T)1.); + } + T bin_size_h = static_cast(roi_height) / static_cast(pooled_height); + T bin_size_w = static_cast(roi_width) / static_cast(pooled_width); + + // We use roi_bin_grid to sample the grid and mimic integral + int roi_bin_grid_h = (sampling_ratio > 0) + ? sampling_ratio + : ceil(roi_height / pooled_height); // e.g., = 2 + int roi_bin_grid_w = + (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width); + + // We do average (integral) pooling inside a bin + // When the grid is empty, output zeros == 0/1, instead of NaN. + const T count = std::max(roi_bin_grid_h * roi_bin_grid_w, 1); // e.g. = 4 + + // we want to precalculate indices and weights shared by all channels, + // this is the key point of optimization + std::vector> pre_calc( + roi_bin_grid_h * roi_bin_grid_w * pooled_width * pooled_height); + pre_calc_for_bilinear_interpolate( + height, + width, + pooled_height, + pooled_width, + roi_bin_grid_h, + roi_bin_grid_w, + roi_start_h, + roi_start_w, + bin_size_h, + bin_size_w, + roi_bin_grid_h, + roi_bin_grid_w, + pre_calc); + + for (int c = 0; c < channels; c++) { + int index_n_c = index_n + c * pooled_width * pooled_height; + const T* offset_input = + input + (roi_batch_ind * channels + c) * height * width; + int pre_calc_index = 0; + + for (int ph = 0; ph < pooled_height; ph++) { + for (int pw = 0; pw < pooled_width; pw++) { + int index = index_n_c + ph * pooled_width + pw; + + T output_val = 0.; + for (int iy = 0; iy < roi_bin_grid_h; iy++) { + for (int ix = 0; ix < roi_bin_grid_w; ix++) { + PreCalc pc = pre_calc[pre_calc_index]; + output_val += pc.w1 * offset_input[pc.pos1] + + pc.w2 * offset_input[pc.pos2] + + pc.w3 * offset_input[pc.pos3] + pc.w4 * offset_input[pc.pos4]; + + pre_calc_index += 1; + } + } + output_val /= count; + + output[index] = output_val; + } // for pw + } // for ph + } // for c + } // for n +} + +template +void bilinear_interpolate_gradient( + const int height, + const int width, + T y, + T x, + T& w1, + T& w2, + T& w3, + T& w4, + int& x_low, + int& x_high, + int& y_low, + int& y_high, + const int index /* index for debug only*/) { + // deal with cases that inverse elements are out of feature map boundary + if (y < -1.0 || y > height || x < -1.0 || x > width) { + // empty + w1 = w2 = w3 = w4 = 0.; + x_low = x_high = y_low = y_high = -1; + return; + } + + if (y <= 0) + y = 0; + if (x <= 0) + x = 0; + + y_low = (int)y; + x_low = (int)x; + + if (y_low >= height - 1) { + y_high = y_low = height - 1; + y = (T)y_low; + } else { + y_high = y_low + 1; + } + + if (x_low >= width - 1) { + x_high = x_low = width - 1; + x = (T)x_low; + } else { + x_high = x_low + 1; + } + + T ly = y - y_low; + T lx = x - x_low; + T hy = 1. - ly, hx = 1. - lx; + + // reference in forward + // T v1 = input[y_low * width + x_low]; + // T v2 = input[y_low * width + x_high]; + // T v3 = input[y_high * width + x_low]; + // T v4 = input[y_high * width + x_high]; + // T val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + + w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx; + + return; +} + +template +inline void add(T* address, const T& val) { + *address += val; +} + +template +void ROIAlignBackward( + const int nthreads, + // may not be contiguous, and should be indexed using n_stride, etc + const T* grad_output, + const T& spatial_scale, + const int channels, + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + T* grad_input, + const T* rois, + const int n_stride, + const int c_stride, + const int h_stride, + const int w_stride, + bool aligned) { + for (int index = 0; index < nthreads; index++) { + // (n, c, ph, pw) is an element in the pooled output + int pw = index % pooled_width; + int ph = (index / pooled_width) % pooled_height; + int c = (index / pooled_width / pooled_height) % channels; + int n = index / pooled_width / pooled_height / channels; + + const T* offset_rois = rois + n * 5; + int roi_batch_ind = offset_rois[0]; + + // Do not use rounding; this implementation detail is critical + T offset = aligned ? (T)0.5 : (T)0.0; + T roi_start_w = offset_rois[1] * spatial_scale - offset; + T roi_start_h = offset_rois[2] * spatial_scale - offset; + T roi_end_w = offset_rois[3] * spatial_scale - offset; + T roi_end_h = offset_rois[4] * spatial_scale - offset; + + T roi_width = roi_end_w - roi_start_w; + T roi_height = roi_end_h - roi_start_h; + if (aligned) { + AT_ASSERTM( + roi_width >= 0 && roi_height >= 0, + "ROIs in ROIAlign do not have non-negative size!"); + } else { // for backward-compatibility only + roi_width = std::max(roi_width, (T)1.); + roi_height = std::max(roi_height, (T)1.); + } + T bin_size_h = static_cast(roi_height) / static_cast(pooled_height); + T bin_size_w = static_cast(roi_width) / static_cast(pooled_width); + + T* offset_grad_input = + grad_input + ((roi_batch_ind * channels + c) * height * width); + + int output_offset = n * n_stride + c * c_stride; + const T* offset_grad_output = grad_output + output_offset; + const T grad_output_this_bin = + offset_grad_output[ph * h_stride + pw * w_stride]; + + // We use roi_bin_grid to sample the grid and mimic integral + int roi_bin_grid_h = (sampling_ratio > 0) + ? sampling_ratio + : ceil(roi_height / pooled_height); // e.g., = 2 + int roi_bin_grid_w = + (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width); + + // We do average (integral) pooling inside a bin + const T count = roi_bin_grid_h * roi_bin_grid_w; // e.g. = 4 + + for (int iy = 0; iy < roi_bin_grid_h; iy++) { + const T y = roi_start_h + ph * bin_size_h + + static_cast(iy + .5f) * bin_size_h / + static_cast(roi_bin_grid_h); // e.g., 0.5, 1.5 + for (int ix = 0; ix < roi_bin_grid_w; ix++) { + const T x = roi_start_w + pw * bin_size_w + + static_cast(ix + .5f) * bin_size_w / + static_cast(roi_bin_grid_w); + + T w1, w2, w3, w4; + int x_low, x_high, y_low, y_high; + + bilinear_interpolate_gradient( + height, + width, + y, + x, + w1, + w2, + w3, + w4, + x_low, + x_high, + y_low, + y_high, + index); + + T g1 = grad_output_this_bin * w1 / count; + T g2 = grad_output_this_bin * w2 / count; + T g3 = grad_output_this_bin * w3 / count; + T g4 = grad_output_this_bin * w4 / count; + + if (x_low >= 0 && x_high >= 0 && y_low >= 0 && y_high >= 0) { + // atomic add is not needed for now since it is single threaded + add(offset_grad_input + y_low * width + x_low, static_cast(g1)); + add(offset_grad_input + y_low * width + x_high, static_cast(g2)); + add(offset_grad_input + y_high * width + x_low, static_cast(g3)); + add(offset_grad_input + y_high * width + x_high, static_cast(g4)); + } // if + } // ix + } // iy + } // for +} // ROIAlignBackward + +} // namespace + +namespace detectron2 { + +at::Tensor ROIAlign_forward_cpu( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + bool aligned) { + AT_ASSERTM(input.device().is_cpu(), "input must be a CPU tensor"); + AT_ASSERTM(rois.device().is_cpu(), "rois must be a CPU tensor"); + + at::TensorArg input_t{input, "input", 1}, rois_t{rois, "rois", 2}; + + at::CheckedFrom c = "ROIAlign_forward_cpu"; + at::checkAllSameType(c, {input_t, rois_t}); + + auto num_rois = rois.size(0); + auto channels = input.size(1); + auto height = input.size(2); + auto width = input.size(3); + + at::Tensor output = at::zeros( + {num_rois, channels, pooled_height, pooled_width}, input.options()); + + auto output_size = num_rois * pooled_height * pooled_width * channels; + + if (output.numel() == 0) + return output; + + auto input_ = input.contiguous(), rois_ = rois.contiguous(); + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + input.scalar_type(), "ROIAlign_forward", [&] { + ROIAlignForward( + output_size, + input_.data_ptr(), + spatial_scale, + channels, + height, + width, + pooled_height, + pooled_width, + sampling_ratio, + rois_.data_ptr(), + output.data_ptr(), + aligned); + }); + return output; +} + +at::Tensor ROIAlign_backward_cpu( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio, + bool aligned) { + AT_ASSERTM(grad.device().is_cpu(), "grad must be a CPU tensor"); + AT_ASSERTM(rois.device().is_cpu(), "rois must be a CPU tensor"); + + at::TensorArg grad_t{grad, "grad", 1}, rois_t{rois, "rois", 2}; + + at::CheckedFrom c = "ROIAlign_backward_cpu"; + at::checkAllSameType(c, {grad_t, rois_t}); + + at::Tensor grad_input = + at::zeros({batch_size, channels, height, width}, grad.options()); + + // handle possibly empty gradients + if (grad.numel() == 0) { + return grad_input; + } + + // get stride values to ensure indexing into gradients is correct. + int n_stride = grad.stride(0); + int c_stride = grad.stride(1); + int h_stride = grad.stride(2); + int w_stride = grad.stride(3); + + auto rois_ = rois.contiguous(); + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + grad.scalar_type(), "ROIAlign_forward", [&] { + ROIAlignBackward( + grad.numel(), + grad.data_ptr(), + spatial_scale, + channels, + height, + width, + pooled_height, + pooled_width, + sampling_ratio, + grad_input.data_ptr(), + rois_.data_ptr(), + n_stride, + c_stride, + h_stride, + w_stride, + aligned); + }); + return grad_input; +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlign/ROIAlign_cuda.cu b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlign/ROIAlign_cuda.cu new file mode 100644 index 0000000000000000000000000000000000000000..2e05953b03089203d29bc304726afbca7ee5d464 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlign/ROIAlign_cuda.cu @@ -0,0 +1,430 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#include +#include +#include +#include + +// TODO make it in a common file +#define CUDA_1D_KERNEL_LOOP(i, n) \ + for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n; \ + i += blockDim.x * gridDim.x) + +template +__device__ T bilinear_interpolate( + const T* bottom_data, + const int height, + const int width, + T y, + T x, + const int index /* index for debug only*/) { + // deal with cases that inverse elements are out of feature map boundary + if (y < -1.0 || y > height || x < -1.0 || x > width) { + // empty + return 0; + } + + if (y <= 0) + y = 0; + if (x <= 0) + x = 0; + + int y_low = (int)y; + int x_low = (int)x; + int y_high; + int x_high; + + if (y_low >= height - 1) { + y_high = y_low = height - 1; + y = (T)y_low; + } else { + y_high = y_low + 1; + } + + if (x_low >= width - 1) { + x_high = x_low = width - 1; + x = (T)x_low; + } else { + x_high = x_low + 1; + } + + T ly = y - y_low; + T lx = x - x_low; + T hy = 1. - ly, hx = 1. - lx; + // do bilinear interpolation + T v1 = bottom_data[y_low * width + x_low]; + T v2 = bottom_data[y_low * width + x_high]; + T v3 = bottom_data[y_high * width + x_low]; + T v4 = bottom_data[y_high * width + x_high]; + T w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx; + + T val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + + return val; +} + +template +__global__ void RoIAlignForward( + const int nthreads, + const T* bottom_data, + const T spatial_scale, + const int channels, + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + const T* bottom_rois, + T* top_data, + bool aligned) { + CUDA_1D_KERNEL_LOOP(index, nthreads) { + // (n, c, ph, pw) is an element in the pooled output + int pw = index % pooled_width; + int ph = (index / pooled_width) % pooled_height; + int c = (index / pooled_width / pooled_height) % channels; + int n = index / pooled_width / pooled_height / channels; + + const T* offset_bottom_rois = bottom_rois + n * 5; + int roi_batch_ind = offset_bottom_rois[0]; + + // Do not use rounding; this implementation detail is critical + T offset = aligned ? (T)0.5 : (T)0.0; + T roi_start_w = offset_bottom_rois[1] * spatial_scale - offset; + T roi_start_h = offset_bottom_rois[2] * spatial_scale - offset; + T roi_end_w = offset_bottom_rois[3] * spatial_scale - offset; + T roi_end_h = offset_bottom_rois[4] * spatial_scale - offset; + + T roi_width = roi_end_w - roi_start_w; + T roi_height = roi_end_h - roi_start_h; + if (!aligned) { // for backward-compatibility only + roi_width = max(roi_width, (T)1.); + roi_height = max(roi_height, (T)1.); + } + T bin_size_h = static_cast(roi_height) / static_cast(pooled_height); + T bin_size_w = static_cast(roi_width) / static_cast(pooled_width); + + const T* offset_bottom_data = + bottom_data + (roi_batch_ind * channels + c) * height * width; + + // We use roi_bin_grid to sample the grid and mimic integral + int roi_bin_grid_h = (sampling_ratio > 0) + ? sampling_ratio + : ceil(roi_height / pooled_height); // e.g., = 2 + int roi_bin_grid_w = + (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width); + + // We do average (integral) pooling inside a bin + // When the grid is empty, output zeros == 0/1, instead of NaN. + const T count = max(roi_bin_grid_h * roi_bin_grid_w, 1); // e.g. = 4 + + T output_val = 0.; + for (int iy = 0; iy < roi_bin_grid_h; iy++) // e.g., iy = 0, 1 + { + const T y = roi_start_h + ph * bin_size_h + + static_cast(iy + .5f) * bin_size_h / + static_cast(roi_bin_grid_h); // e.g., 0.5, 1.5 + for (int ix = 0; ix < roi_bin_grid_w; ix++) { + const T x = roi_start_w + pw * bin_size_w + + static_cast(ix + .5f) * bin_size_w / + static_cast(roi_bin_grid_w); + + T val = bilinear_interpolate( + offset_bottom_data, height, width, y, x, index); + output_val += val; + } + } + output_val /= count; + + top_data[index] = output_val; + } +} + +template +__device__ void bilinear_interpolate_gradient( + const int height, + const int width, + T y, + T x, + T& w1, + T& w2, + T& w3, + T& w4, + int& x_low, + int& x_high, + int& y_low, + int& y_high, + const int index /* index for debug only*/) { + // deal with cases that inverse elements are out of feature map boundary + if (y < -1.0 || y > height || x < -1.0 || x > width) { + // empty + w1 = w2 = w3 = w4 = 0.; + x_low = x_high = y_low = y_high = -1; + return; + } + + if (y <= 0) + y = 0; + if (x <= 0) + x = 0; + + y_low = (int)y; + x_low = (int)x; + + if (y_low >= height - 1) { + y_high = y_low = height - 1; + y = (T)y_low; + } else { + y_high = y_low + 1; + } + + if (x_low >= width - 1) { + x_high = x_low = width - 1; + x = (T)x_low; + } else { + x_high = x_low + 1; + } + + T ly = y - y_low; + T lx = x - x_low; + T hy = 1. - ly, hx = 1. - lx; + + // reference in forward + // T v1 = bottom_data[y_low * width + x_low]; + // T v2 = bottom_data[y_low * width + x_high]; + // T v3 = bottom_data[y_high * width + x_low]; + // T v4 = bottom_data[y_high * width + x_high]; + // T val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + + w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx; + + return; +} + +template +__global__ void RoIAlignBackwardFeature( + const int nthreads, + const T* top_diff, + const int num_rois, + const T spatial_scale, + const int channels, + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + T* bottom_diff, + const T* bottom_rois, + bool aligned) { + CUDA_1D_KERNEL_LOOP(index, nthreads) { + // (n, c, ph, pw) is an element in the pooled output + int pw = index % pooled_width; + int ph = (index / pooled_width) % pooled_height; + int c = (index / pooled_width / pooled_height) % channels; + int n = index / pooled_width / pooled_height / channels; + + const T* offset_bottom_rois = bottom_rois + n * 5; + int roi_batch_ind = offset_bottom_rois[0]; + + // Do not use rounding; this implementation detail is critical + T offset = aligned ? (T)0.5 : (T)0.0; + T roi_start_w = offset_bottom_rois[1] * spatial_scale - offset; + T roi_start_h = offset_bottom_rois[2] * spatial_scale - offset; + T roi_end_w = offset_bottom_rois[3] * spatial_scale - offset; + T roi_end_h = offset_bottom_rois[4] * spatial_scale - offset; + + T roi_width = roi_end_w - roi_start_w; + T roi_height = roi_end_h - roi_start_h; + if (!aligned) { // for backward-compatibility only + roi_width = max(roi_width, (T)1.); + roi_height = max(roi_height, (T)1.); + } + T bin_size_h = static_cast(roi_height) / static_cast(pooled_height); + T bin_size_w = static_cast(roi_width) / static_cast(pooled_width); + + T* offset_bottom_diff = + bottom_diff + (roi_batch_ind * channels + c) * height * width; + + int top_offset = (n * channels + c) * pooled_height * pooled_width; + const T* offset_top_diff = top_diff + top_offset; + const T top_diff_this_bin = offset_top_diff[ph * pooled_width + pw]; + + // We use roi_bin_grid to sample the grid and mimic integral + int roi_bin_grid_h = (sampling_ratio > 0) + ? sampling_ratio + : ceil(roi_height / pooled_height); // e.g., = 2 + int roi_bin_grid_w = + (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width); + + // We do average (integral) pooling inside a bin + const T count = roi_bin_grid_h * roi_bin_grid_w; // e.g. = 4 + + for (int iy = 0; iy < roi_bin_grid_h; iy++) // e.g., iy = 0, 1 + { + const T y = roi_start_h + ph * bin_size_h + + static_cast(iy + .5f) * bin_size_h / + static_cast(roi_bin_grid_h); // e.g., 0.5, 1.5 + for (int ix = 0; ix < roi_bin_grid_w; ix++) { + const T x = roi_start_w + pw * bin_size_w + + static_cast(ix + .5f) * bin_size_w / + static_cast(roi_bin_grid_w); + + T w1, w2, w3, w4; + int x_low, x_high, y_low, y_high; + + bilinear_interpolate_gradient( + height, + width, + y, + x, + w1, + w2, + w3, + w4, + x_low, + x_high, + y_low, + y_high, + index); + + T g1 = top_diff_this_bin * w1 / count; + T g2 = top_diff_this_bin * w2 / count; + T g3 = top_diff_this_bin * w3 / count; + T g4 = top_diff_this_bin * w4 / count; + + if (x_low >= 0 && x_high >= 0 && y_low >= 0 && y_high >= 0) { + atomicAdd( + offset_bottom_diff + y_low * width + x_low, static_cast(g1)); + atomicAdd( + offset_bottom_diff + y_low * width + x_high, static_cast(g2)); + atomicAdd( + offset_bottom_diff + y_high * width + x_low, static_cast(g3)); + atomicAdd( + offset_bottom_diff + y_high * width + x_high, static_cast(g4)); + } // if + } // ix + } // iy + } // CUDA_1D_KERNEL_LOOP +} // RoIAlignBackward + +namespace detectron2 { + +at::Tensor ROIAlign_forward_cuda( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + bool aligned) { + AT_ASSERTM(input.device().is_cuda(), "input must be a CUDA tensor"); + AT_ASSERTM(rois.device().is_cuda(), "rois must be a CUDA tensor"); + at::TensorArg input_t{input, "input", 1}, rois_t{rois, "rois", 2}; + + at::CheckedFrom c = "ROIAlign_forward_cuda"; + at::checkAllSameGPU(c, {input_t, rois_t}); + at::checkAllSameType(c, {input_t, rois_t}); + at::cuda::CUDAGuard device_guard(input.device()); + + auto num_rois = rois.size(0); + auto channels = input.size(1); + auto height = input.size(2); + auto width = input.size(3); + + auto output = at::empty( + {num_rois, channels, pooled_height, pooled_width}, input.options()); + auto output_size = num_rois * pooled_height * pooled_width * channels; + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + dim3 grid(std::min( + at::cuda::ATenCeilDiv( + static_cast(output_size), static_cast(512)), + static_cast(4096))); + dim3 block(512); + + if (output.numel() == 0) { + AT_CUDA_CHECK(cudaGetLastError()); + return output; + } + + auto input_ = input.contiguous(), rois_ = rois.contiguous(); + AT_DISPATCH_FLOATING_TYPES(input.scalar_type(), "ROIAlign_forward", [&] { + RoIAlignForward<<>>( + output_size, + input_.data_ptr(), + spatial_scale, + channels, + height, + width, + pooled_height, + pooled_width, + sampling_ratio, + rois_.data_ptr(), + output.data_ptr(), + aligned); + }); + cudaDeviceSynchronize(); + AT_CUDA_CHECK(cudaGetLastError()); + return output; +} + +// TODO remove the dependency on input and use instead its sizes -> save memory +at::Tensor ROIAlign_backward_cuda( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio, + bool aligned) { + AT_ASSERTM(grad.device().is_cuda(), "grad must be a CUDA tensor"); + AT_ASSERTM(rois.device().is_cuda(), "rois must be a CUDA tensor"); + + at::TensorArg grad_t{grad, "grad", 1}, rois_t{rois, "rois", 2}; + at::CheckedFrom c = "ROIAlign_backward_cuda"; + at::checkAllSameGPU(c, {grad_t, rois_t}); + at::checkAllSameType(c, {grad_t, rois_t}); + at::cuda::CUDAGuard device_guard(grad.device()); + + auto num_rois = rois.size(0); + auto grad_input = + at::zeros({batch_size, channels, height, width}, grad.options()); + + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + dim3 grid(std::min( + at::cuda::ATenCeilDiv( + static_cast(grad.numel()), static_cast(512)), + static_cast(4096))); + dim3 block(512); + + // handle possibly empty gradients + if (grad.numel() == 0) { + AT_CUDA_CHECK(cudaGetLastError()); + return grad_input; + } + + auto grad_ = grad.contiguous(), rois_ = rois.contiguous(); + AT_DISPATCH_FLOATING_TYPES(grad.scalar_type(), "ROIAlign_backward", [&] { + RoIAlignBackwardFeature<<>>( + grad.numel(), + grad_.data_ptr(), + num_rois, + spatial_scale, + channels, + height, + width, + pooled_height, + pooled_width, + sampling_ratio, + grad_input.data_ptr(), + rois_.data_ptr(), + aligned); + }); + AT_CUDA_CHECK(cudaGetLastError()); + return grad_input; +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated.h b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated.h new file mode 100644 index 0000000000000000000000000000000000000000..a99c8ebddaa4936e26437b42d62e2b8355c655aa --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated.h @@ -0,0 +1,115 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#pragma once +#include + +namespace detectron2 { + +at::Tensor ROIAlignRotated_forward_cpu( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio); + +at::Tensor ROIAlignRotated_backward_cpu( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio); + +#ifdef WITH_CUDA +at::Tensor ROIAlignRotated_forward_cuda( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio); + +at::Tensor ROIAlignRotated_backward_cuda( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio); +#endif + +// Interface for Python +inline at::Tensor ROIAlignRotated_forward( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio) { + if (input.is_cuda()) { +#ifdef WITH_CUDA + return ROIAlignRotated_forward_cuda( + input, + rois, + spatial_scale, + pooled_height, + pooled_width, + sampling_ratio); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + return ROIAlignRotated_forward_cpu( + input, rois, spatial_scale, pooled_height, pooled_width, sampling_ratio); +} + +inline at::Tensor ROIAlignRotated_backward( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio) { + if (grad.is_cuda()) { +#ifdef WITH_CUDA + return ROIAlignRotated_backward_cuda( + grad, + rois, + spatial_scale, + pooled_height, + pooled_width, + batch_size, + channels, + height, + width, + sampling_ratio); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + return ROIAlignRotated_backward_cpu( + grad, + rois, + spatial_scale, + pooled_height, + pooled_width, + batch_size, + channels, + height, + width, + sampling_ratio); +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cpu.cpp b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cpu.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7e5e1ffdccd0e2ced15fa34b4906388d371bffe2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cpu.cpp @@ -0,0 +1,522 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#include +#include "ROIAlignRotated.h" + +// Note: this implementation originates from the Caffe2 ROIAlignRotated Op +// and PyTorch ROIAlign (non-rotated) Op implementations. +// The key difference between this implementation and those ones is +// we don't do "legacy offset" in this version, as there aren't many previous +// works, if any, using the "legacy" ROIAlignRotated Op. +// This would make the interface a bit cleaner. + +namespace detectron2 { + +namespace { +template +struct PreCalc { + int pos1; + int pos2; + int pos3; + int pos4; + T w1; + T w2; + T w3; + T w4; +}; + +template +void pre_calc_for_bilinear_interpolate( + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int iy_upper, + const int ix_upper, + T roi_start_h, + T roi_start_w, + T bin_size_h, + T bin_size_w, + int roi_bin_grid_h, + int roi_bin_grid_w, + T roi_center_h, + T roi_center_w, + T cos_theta, + T sin_theta, + std::vector>& pre_calc) { + int pre_calc_index = 0; + for (int ph = 0; ph < pooled_height; ph++) { + for (int pw = 0; pw < pooled_width; pw++) { + for (int iy = 0; iy < iy_upper; iy++) { + const T yy = roi_start_h + ph * bin_size_h + + static_cast(iy + .5f) * bin_size_h / + static_cast(roi_bin_grid_h); // e.g., 0.5, 1.5 + for (int ix = 0; ix < ix_upper; ix++) { + const T xx = roi_start_w + pw * bin_size_w + + static_cast(ix + .5f) * bin_size_w / + static_cast(roi_bin_grid_w); + + // Rotate by theta around the center and translate + // In image space, (y, x) is the order for Right Handed System, + // and this is essentially multiplying the point by a rotation matrix + // to rotate it counterclockwise through angle theta. + T y = yy * cos_theta - xx * sin_theta + roi_center_h; + T x = yy * sin_theta + xx * cos_theta + roi_center_w; + // deal with: inverse elements are out of feature map boundary + if (y < -1.0 || y > height || x < -1.0 || x > width) { + // empty + PreCalc pc; + pc.pos1 = 0; + pc.pos2 = 0; + pc.pos3 = 0; + pc.pos4 = 0; + pc.w1 = 0; + pc.w2 = 0; + pc.w3 = 0; + pc.w4 = 0; + pre_calc[pre_calc_index] = pc; + pre_calc_index += 1; + continue; + } + + if (y < 0) { + y = 0; + } + if (x < 0) { + x = 0; + } + + int y_low = (int)y; + int x_low = (int)x; + int y_high; + int x_high; + + if (y_low >= height - 1) { + y_high = y_low = height - 1; + y = (T)y_low; + } else { + y_high = y_low + 1; + } + + if (x_low >= width - 1) { + x_high = x_low = width - 1; + x = (T)x_low; + } else { + x_high = x_low + 1; + } + + T ly = y - y_low; + T lx = x - x_low; + T hy = 1. - ly, hx = 1. - lx; + T w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx; + + // save weights and indices + PreCalc pc; + pc.pos1 = y_low * width + x_low; + pc.pos2 = y_low * width + x_high; + pc.pos3 = y_high * width + x_low; + pc.pos4 = y_high * width + x_high; + pc.w1 = w1; + pc.w2 = w2; + pc.w3 = w3; + pc.w4 = w4; + pre_calc[pre_calc_index] = pc; + + pre_calc_index += 1; + } + } + } + } +} + +template +void bilinear_interpolate_gradient( + const int height, + const int width, + T y, + T x, + T& w1, + T& w2, + T& w3, + T& w4, + int& x_low, + int& x_high, + int& y_low, + int& y_high) { + // deal with cases that inverse elements are out of feature map boundary + if (y < -1.0 || y > height || x < -1.0 || x > width) { + // empty + w1 = w2 = w3 = w4 = 0.; + x_low = x_high = y_low = y_high = -1; + return; + } + + if (y < 0) { + y = 0; + } + + if (x < 0) { + x = 0; + } + + y_low = (int)y; + x_low = (int)x; + + if (y_low >= height - 1) { + y_high = y_low = height - 1; + y = (T)y_low; + } else { + y_high = y_low + 1; + } + + if (x_low >= width - 1) { + x_high = x_low = width - 1; + x = (T)x_low; + } else { + x_high = x_low + 1; + } + + T ly = y - y_low; + T lx = x - x_low; + T hy = 1. - ly, hx = 1. - lx; + + // reference in forward + // T v1 = input[y_low * width + x_low]; + // T v2 = input[y_low * width + x_high]; + // T v3 = input[y_high * width + x_low]; + // T v4 = input[y_high * width + x_high]; + // T val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + + w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx; + + return; +} + +template +inline void add(T* address, const T& val) { + *address += val; +} + +} // namespace + +template +void ROIAlignRotatedForward( + const int nthreads, + const T* input, + const T& spatial_scale, + const int channels, + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + const T* rois, + T* output) { + int n_rois = nthreads / channels / pooled_width / pooled_height; + // (n, c, ph, pw) is an element in the pooled output + // can be parallelized using omp + // #pragma omp parallel for num_threads(32) + for (int n = 0; n < n_rois; n++) { + int index_n = n * channels * pooled_width * pooled_height; + + const T* current_roi = rois + n * 6; + int roi_batch_ind = current_roi[0]; + + // Do not use rounding; this implementation detail is critical + // ROIAlignRotated supports align == true, i.e., continuous coordinate + // by default, thus the 0.5 offset + T offset = (T)0.5; + T roi_center_w = current_roi[1] * spatial_scale - offset; + T roi_center_h = current_roi[2] * spatial_scale - offset; + T roi_width = current_roi[3] * spatial_scale; + T roi_height = current_roi[4] * spatial_scale; + T theta = current_roi[5] * M_PI / 180.0; + T cos_theta = cos(theta); + T sin_theta = sin(theta); + + AT_ASSERTM( + roi_width >= 0 && roi_height >= 0, + "ROIs in ROIAlignRotated do not have non-negative size!"); + + T bin_size_h = static_cast(roi_height) / static_cast(pooled_height); + T bin_size_w = static_cast(roi_width) / static_cast(pooled_width); + + // We use roi_bin_grid to sample the grid and mimic integral + int roi_bin_grid_h = (sampling_ratio > 0) + ? sampling_ratio + : ceil(roi_height / pooled_height); // e.g., = 2 + int roi_bin_grid_w = + (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width); + + // We do average (integral) pooling inside a bin + const T count = std::max(roi_bin_grid_h * roi_bin_grid_w, 1); // e.g. = 4 + + // we want to precalculate indices and weights shared by all channels, + // this is the key point of optimization + std::vector> pre_calc( + roi_bin_grid_h * roi_bin_grid_w * pooled_width * pooled_height); + + // roi_start_h and roi_start_w are computed wrt the center of RoI (x, y). + // Appropriate translation needs to be applied after. + T roi_start_h = -roi_height / 2.0; + T roi_start_w = -roi_width / 2.0; + + pre_calc_for_bilinear_interpolate( + height, + width, + pooled_height, + pooled_width, + roi_bin_grid_h, + roi_bin_grid_w, + roi_start_h, + roi_start_w, + bin_size_h, + bin_size_w, + roi_bin_grid_h, + roi_bin_grid_w, + roi_center_h, + roi_center_w, + cos_theta, + sin_theta, + pre_calc); + + for (int c = 0; c < channels; c++) { + int index_n_c = index_n + c * pooled_width * pooled_height; + const T* offset_input = + input + (roi_batch_ind * channels + c) * height * width; + int pre_calc_index = 0; + + for (int ph = 0; ph < pooled_height; ph++) { + for (int pw = 0; pw < pooled_width; pw++) { + int index = index_n_c + ph * pooled_width + pw; + + T output_val = 0.; + for (int iy = 0; iy < roi_bin_grid_h; iy++) { + for (int ix = 0; ix < roi_bin_grid_w; ix++) { + PreCalc pc = pre_calc[pre_calc_index]; + output_val += pc.w1 * offset_input[pc.pos1] + + pc.w2 * offset_input[pc.pos2] + + pc.w3 * offset_input[pc.pos3] + pc.w4 * offset_input[pc.pos4]; + + pre_calc_index += 1; + } + } + output_val /= count; + + output[index] = output_val; + } // for pw + } // for ph + } // for c + } // for n +} + +template +void ROIAlignRotatedBackward( + const int nthreads, + // may not be contiguous. should index using n_stride, etc + const T* grad_output, + const T& spatial_scale, + const int channels, + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + T* grad_input, + const T* rois, + const int n_stride, + const int c_stride, + const int h_stride, + const int w_stride) { + for (int index = 0; index < nthreads; index++) { + // (n, c, ph, pw) is an element in the pooled output + int pw = index % pooled_width; + int ph = (index / pooled_width) % pooled_height; + int c = (index / pooled_width / pooled_height) % channels; + int n = index / pooled_width / pooled_height / channels; + + const T* current_roi = rois + n * 6; + int roi_batch_ind = current_roi[0]; + + // Do not use rounding; this implementation detail is critical + // ROIAlignRotated supports align == true, i.e., continuous coordinate + // by default, thus the 0.5 offset + T offset = (T)0.5; + T roi_center_w = current_roi[1] * spatial_scale - offset; + T roi_center_h = current_roi[2] * spatial_scale - offset; + T roi_width = current_roi[3] * spatial_scale; + T roi_height = current_roi[4] * spatial_scale; + T theta = current_roi[5] * M_PI / 180.0; + T cos_theta = cos(theta); + T sin_theta = sin(theta); + + AT_ASSERTM( + roi_width >= 0 && roi_height >= 0, + "ROIs in ROIAlignRotated do not have non-negative size!"); + + T bin_size_h = static_cast(roi_height) / static_cast(pooled_height); + T bin_size_w = static_cast(roi_width) / static_cast(pooled_width); + + T* offset_grad_input = + grad_input + ((roi_batch_ind * channels + c) * height * width); + + int output_offset = n * n_stride + c * c_stride; + const T* offset_grad_output = grad_output + output_offset; + const T grad_output_this_bin = + offset_grad_output[ph * h_stride + pw * w_stride]; + + // We use roi_bin_grid to sample the grid and mimic integral + int roi_bin_grid_h = (sampling_ratio > 0) + ? sampling_ratio + : ceil(roi_height / pooled_height); // e.g., = 2 + int roi_bin_grid_w = + (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width); + + // roi_start_h and roi_start_w are computed wrt the center of RoI (x, y). + // Appropriate translation needs to be applied after. + T roi_start_h = -roi_height / 2.0; + T roi_start_w = -roi_width / 2.0; + + // We do average (integral) pooling inside a bin + const T count = roi_bin_grid_h * roi_bin_grid_w; // e.g. = 4 + + for (int iy = 0; iy < roi_bin_grid_h; iy++) { + const T yy = roi_start_h + ph * bin_size_h + + static_cast(iy + .5f) * bin_size_h / + static_cast(roi_bin_grid_h); // e.g., 0.5, 1.5 + for (int ix = 0; ix < roi_bin_grid_w; ix++) { + const T xx = roi_start_w + pw * bin_size_w + + static_cast(ix + .5f) * bin_size_w / + static_cast(roi_bin_grid_w); + + // Rotate by theta around the center and translate + T y = yy * cos_theta - xx * sin_theta + roi_center_h; + T x = yy * sin_theta + xx * cos_theta + roi_center_w; + + T w1, w2, w3, w4; + int x_low, x_high, y_low, y_high; + + bilinear_interpolate_gradient( + height, width, y, x, w1, w2, w3, w4, x_low, x_high, y_low, y_high); + + T g1 = grad_output_this_bin * w1 / count; + T g2 = grad_output_this_bin * w2 / count; + T g3 = grad_output_this_bin * w3 / count; + T g4 = grad_output_this_bin * w4 / count; + + if (x_low >= 0 && x_high >= 0 && y_low >= 0 && y_high >= 0) { + // atomic add is not needed for now since it is single threaded + add(offset_grad_input + y_low * width + x_low, static_cast(g1)); + add(offset_grad_input + y_low * width + x_high, static_cast(g2)); + add(offset_grad_input + y_high * width + x_low, static_cast(g3)); + add(offset_grad_input + y_high * width + x_high, static_cast(g4)); + } // if + } // ix + } // iy + } // for +} // ROIAlignRotatedBackward + +at::Tensor ROIAlignRotated_forward_cpu( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio) { + AT_ASSERTM(input.device().is_cpu(), "input must be a CPU tensor"); + AT_ASSERTM(rois.device().is_cpu(), "rois must be a CPU tensor"); + + at::TensorArg input_t{input, "input", 1}, rois_t{rois, "rois", 2}; + + at::CheckedFrom c = "ROIAlign_forward_cpu"; + at::checkAllSameType(c, {input_t, rois_t}); + + auto num_rois = rois.size(0); + auto channels = input.size(1); + auto height = input.size(2); + auto width = input.size(3); + + at::Tensor output = at::zeros( + {num_rois, channels, pooled_height, pooled_width}, input.options()); + + auto output_size = num_rois * pooled_height * pooled_width * channels; + + if (output.numel() == 0) { + return output; + } + + auto input_ = input.contiguous(), rois_ = rois.contiguous(); + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + input.scalar_type(), "ROIAlignRotated_forward", [&] { + ROIAlignRotatedForward( + output_size, + input_.data_ptr(), + spatial_scale, + channels, + height, + width, + pooled_height, + pooled_width, + sampling_ratio, + rois_.data_ptr(), + output.data_ptr()); + }); + return output; +} + +at::Tensor ROIAlignRotated_backward_cpu( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio) { + AT_ASSERTM(grad.device().is_cpu(), "grad must be a CPU tensor"); + AT_ASSERTM(rois.device().is_cpu(), "rois must be a CPU tensor"); + + at::TensorArg grad_t{grad, "grad", 1}, rois_t{rois, "rois", 2}; + + at::CheckedFrom c = "ROIAlignRotated_backward_cpu"; + at::checkAllSameType(c, {grad_t, rois_t}); + + at::Tensor grad_input = + at::zeros({batch_size, channels, height, width}, grad.options()); + + // handle possibly empty gradients + if (grad.numel() == 0) { + return grad_input; + } + + // get stride values to ensure indexing into gradients is correct. + int n_stride = grad.stride(0); + int c_stride = grad.stride(1); + int h_stride = grad.stride(2); + int w_stride = grad.stride(3); + + auto rois_ = rois.contiguous(); + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + grad.scalar_type(), "ROIAlignRotated_forward", [&] { + ROIAlignRotatedBackward( + grad.numel(), + grad.data_ptr(), + spatial_scale, + channels, + height, + width, + pooled_height, + pooled_width, + sampling_ratio, + grad_input.data_ptr(), + rois_.data_ptr(), + n_stride, + c_stride, + h_stride, + w_stride); + }); + return grad_input; +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cuda.cu b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cuda.cu new file mode 100644 index 0000000000000000000000000000000000000000..9c376fc6973b75b34967faf870a9f85a3ee430be --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cuda.cu @@ -0,0 +1,443 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#include +#include +#include +#include + +// TODO make it in a common file +#define CUDA_1D_KERNEL_LOOP(i, n) \ + for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n; \ + i += blockDim.x * gridDim.x) + +// Note: this implementation originates from the Caffe2 ROIAlignRotated Op +// and PyTorch ROIAlign (non-rotated) Op implementations. +// The key difference between this implementation and those ones is +// we don't do "legacy offset" in this version, as there aren't many previous +// works, if any, using the "legacy" ROIAlignRotated Op. +// This would make the interface a bit cleaner. + +namespace detectron2 { + +namespace { + +template +__device__ T bilinear_interpolate( + const T* input, + const int height, + const int width, + T y, + T x) { + // deal with cases that inverse elements are out of feature map boundary + if (y < -1.0 || y > height || x < -1.0 || x > width) { + // empty + return 0; + } + + if (y < 0) { + y = 0; + } + + if (x < 0) { + x = 0; + } + + int y_low = (int)y; + int x_low = (int)x; + int y_high; + int x_high; + + if (y_low >= height - 1) { + y_high = y_low = height - 1; + y = (T)y_low; + } else { + y_high = y_low + 1; + } + + if (x_low >= width - 1) { + x_high = x_low = width - 1; + x = (T)x_low; + } else { + x_high = x_low + 1; + } + + T ly = y - y_low; + T lx = x - x_low; + T hy = 1. - ly, hx = 1. - lx; + // do bilinear interpolation + T v1 = input[y_low * width + x_low]; + T v2 = input[y_low * width + x_high]; + T v3 = input[y_high * width + x_low]; + T v4 = input[y_high * width + x_high]; + T w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx; + + T val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + + return val; +} + +template +__device__ void bilinear_interpolate_gradient( + const int height, + const int width, + T y, + T x, + T& w1, + T& w2, + T& w3, + T& w4, + int& x_low, + int& x_high, + int& y_low, + int& y_high) { + // deal with cases that inverse elements are out of feature map boundary + if (y < -1.0 || y > height || x < -1.0 || x > width) { + // empty + w1 = w2 = w3 = w4 = 0.; + x_low = x_high = y_low = y_high = -1; + return; + } + + if (y < 0) { + y = 0; + } + + if (x < 0) { + x = 0; + } + + y_low = (int)y; + x_low = (int)x; + + if (y_low >= height - 1) { + y_high = y_low = height - 1; + y = (T)y_low; + } else { + y_high = y_low + 1; + } + + if (x_low >= width - 1) { + x_high = x_low = width - 1; + x = (T)x_low; + } else { + x_high = x_low + 1; + } + + T ly = y - y_low; + T lx = x - x_low; + T hy = 1. - ly, hx = 1. - lx; + + // reference in forward + // T v1 = input[y_low * width + x_low]; + // T v2 = input[y_low * width + x_high]; + // T v3 = input[y_high * width + x_low]; + // T v4 = input[y_high * width + x_high]; + // T val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + + w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx; + + return; +} + +} // namespace + +template +__global__ void RoIAlignRotatedForward( + const int nthreads, + const T* input, + const T spatial_scale, + const int channels, + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + const T* rois, + T* top_data) { + CUDA_1D_KERNEL_LOOP(index, nthreads) { + // (n, c, ph, pw) is an element in the pooled output + int pw = index % pooled_width; + int ph = (index / pooled_width) % pooled_height; + int c = (index / pooled_width / pooled_height) % channels; + int n = index / pooled_width / pooled_height / channels; + + const T* current_roi = rois + n * 6; + int roi_batch_ind = current_roi[0]; + + // Do not use rounding; this implementation detail is critical + // ROIAlignRotated supports align == true, i.e., continuous coordinate + // by default, thus the 0.5 offset + T offset = (T)0.5; + T roi_center_w = current_roi[1] * spatial_scale - offset; + T roi_center_h = current_roi[2] * spatial_scale - offset; + T roi_width = current_roi[3] * spatial_scale; + T roi_height = current_roi[4] * spatial_scale; + T theta = current_roi[5] * M_PI / 180.0; + T cos_theta = cos(theta); + T sin_theta = sin(theta); + + T bin_size_h = static_cast(roi_height) / static_cast(pooled_height); + T bin_size_w = static_cast(roi_width) / static_cast(pooled_width); + + const T* offset_input = + input + (roi_batch_ind * channels + c) * height * width; + + // We use roi_bin_grid to sample the grid and mimic integral + int roi_bin_grid_h = (sampling_ratio > 0) + ? sampling_ratio + : ceil(roi_height / pooled_height); // e.g., = 2 + int roi_bin_grid_w = + (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width); + + // roi_start_h and roi_start_w are computed wrt the center of RoI (x, y). + // Appropriate translation needs to be applied after. + T roi_start_h = -roi_height / 2.0; + T roi_start_w = -roi_width / 2.0; + + // We do average (inte gral) pooling inside a bin + const T count = max(roi_bin_grid_h * roi_bin_grid_w, 1); // e.g. = 4 + + T output_val = 0.; + for (int iy = 0; iy < roi_bin_grid_h; iy++) // e.g., iy = 0, 1 + { + const T yy = roi_start_h + ph * bin_size_h + + static_cast(iy + .5f) * bin_size_h / + static_cast(roi_bin_grid_h); // e.g., 0.5, 1.5 + for (int ix = 0; ix < roi_bin_grid_w; ix++) { + const T xx = roi_start_w + pw * bin_size_w + + static_cast(ix + .5f) * bin_size_w / + static_cast(roi_bin_grid_w); + + // Rotate by theta around the center and translate + T y = yy * cos_theta - xx * sin_theta + roi_center_h; + T x = yy * sin_theta + xx * cos_theta + roi_center_w; + + T val = bilinear_interpolate(offset_input, height, width, y, x); + output_val += val; + } + } + output_val /= count; + + top_data[index] = output_val; + } +} + +template +__global__ void RoIAlignRotatedBackwardFeature( + const int nthreads, + const T* top_diff, + const int num_rois, + const T spatial_scale, + const int channels, + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + T* bottom_diff, + const T* rois) { + CUDA_1D_KERNEL_LOOP(index, nthreads) { + // (n, c, ph, pw) is an element in the pooled output + int pw = index % pooled_width; + int ph = (index / pooled_width) % pooled_height; + int c = (index / pooled_width / pooled_height) % channels; + int n = index / pooled_width / pooled_height / channels; + + const T* current_roi = rois + n * 6; + int roi_batch_ind = current_roi[0]; + + // Do not use rounding; this implementation detail is critical + // ROIAlignRotated supports align == true, i.e., continuous coordinate + // by default, thus the 0.5 offset + T offset = (T)0.5; + T roi_center_w = current_roi[1] * spatial_scale - offset; + T roi_center_h = current_roi[2] * spatial_scale - offset; + T roi_width = current_roi[3] * spatial_scale; + T roi_height = current_roi[4] * spatial_scale; + T theta = current_roi[5] * M_PI / 180.0; + T cos_theta = cos(theta); + T sin_theta = sin(theta); + + T bin_size_h = static_cast(roi_height) / static_cast(pooled_height); + T bin_size_w = static_cast(roi_width) / static_cast(pooled_width); + + T* offset_bottom_diff = + bottom_diff + (roi_batch_ind * channels + c) * height * width; + + int top_offset = (n * channels + c) * pooled_height * pooled_width; + const T* offset_top_diff = top_diff + top_offset; + const T top_diff_this_bin = offset_top_diff[ph * pooled_width + pw]; + + // We use roi_bin_grid to sample the grid and mimic integral + int roi_bin_grid_h = (sampling_ratio > 0) + ? sampling_ratio + : ceil(roi_height / pooled_height); // e.g., = 2 + int roi_bin_grid_w = + (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width); + + // roi_start_h and roi_start_w are computed wrt the center of RoI (x, y). + // Appropriate translation needs to be applied after. + T roi_start_h = -roi_height / 2.0; + T roi_start_w = -roi_width / 2.0; + + // We do average (integral) pooling inside a bin + const T count = roi_bin_grid_h * roi_bin_grid_w; // e.g. = 4 + + for (int iy = 0; iy < roi_bin_grid_h; iy++) // e.g., iy = 0, 1 + { + const T yy = roi_start_h + ph * bin_size_h + + static_cast(iy + .5f) * bin_size_h / + static_cast(roi_bin_grid_h); // e.g., 0.5, 1.5 + for (int ix = 0; ix < roi_bin_grid_w; ix++) { + const T xx = roi_start_w + pw * bin_size_w + + static_cast(ix + .5f) * bin_size_w / + static_cast(roi_bin_grid_w); + + // Rotate by theta around the center and translate + T y = yy * cos_theta - xx * sin_theta + roi_center_h; + T x = yy * sin_theta + xx * cos_theta + roi_center_w; + + T w1, w2, w3, w4; + int x_low, x_high, y_low, y_high; + + bilinear_interpolate_gradient( + height, width, y, x, w1, w2, w3, w4, x_low, x_high, y_low, y_high); + + T g1 = top_diff_this_bin * w1 / count; + T g2 = top_diff_this_bin * w2 / count; + T g3 = top_diff_this_bin * w3 / count; + T g4 = top_diff_this_bin * w4 / count; + + if (x_low >= 0 && x_high >= 0 && y_low >= 0 && y_high >= 0) { + atomicAdd( + offset_bottom_diff + y_low * width + x_low, static_cast(g1)); + atomicAdd( + offset_bottom_diff + y_low * width + x_high, static_cast(g2)); + atomicAdd( + offset_bottom_diff + y_high * width + x_low, static_cast(g3)); + atomicAdd( + offset_bottom_diff + y_high * width + x_high, static_cast(g4)); + } // if + } // ix + } // iy + } // CUDA_1D_KERNEL_LOOP +} // RoIAlignRotatedBackward + +at::Tensor ROIAlignRotated_forward_cuda( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio) { + AT_ASSERTM(input.device().is_cuda(), "input must be a CUDA tensor"); + AT_ASSERTM(rois.device().is_cuda(), "rois must be a CUDA tensor"); + at::TensorArg input_t{input, "input", 1}, rois_t{rois, "rois", 2}; + + at::CheckedFrom c = "ROIAlignRotated_forward_cuda"; + at::checkAllSameGPU(c, {input_t, rois_t}); + at::checkAllSameType(c, {input_t, rois_t}); + at::cuda::CUDAGuard device_guard(input.device()); + + auto num_rois = rois.size(0); + auto channels = input.size(1); + auto height = input.size(2); + auto width = input.size(3); + + auto output = at::empty( + {num_rois, channels, pooled_height, pooled_width}, input.options()); + auto output_size = num_rois * pooled_height * pooled_width * channels; + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + dim3 grid(std::min( + at::cuda::ATenCeilDiv( + static_cast(output_size), static_cast(512)), + static_cast(4096))); + dim3 block(512); + + if (output.numel() == 0) { + AT_CUDA_CHECK(cudaGetLastError()); + return output; + } + + auto input_ = input.contiguous(), rois_ = rois.contiguous(); + AT_DISPATCH_FLOATING_TYPES( + input.scalar_type(), "ROIAlignRotated_forward", [&] { + RoIAlignRotatedForward<<>>( + output_size, + input_.data_ptr(), + spatial_scale, + channels, + height, + width, + pooled_height, + pooled_width, + sampling_ratio, + rois_.data_ptr(), + output.data_ptr()); + }); + cudaDeviceSynchronize(); + AT_CUDA_CHECK(cudaGetLastError()); + return output; +} + +// TODO remove the dependency on input and use instead its sizes -> save memory +at::Tensor ROIAlignRotated_backward_cuda( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio) { + AT_ASSERTM(grad.device().is_cuda(), "grad must be a CUDA tensor"); + AT_ASSERTM(rois.device().is_cuda(), "rois must be a CUDA tensor"); + + at::TensorArg grad_t{grad, "grad", 1}, rois_t{rois, "rois", 2}; + at::CheckedFrom c = "ROIAlign_backward_cuda"; + at::checkAllSameGPU(c, {grad_t, rois_t}); + at::checkAllSameType(c, {grad_t, rois_t}); + at::cuda::CUDAGuard device_guard(grad.device()); + + auto num_rois = rois.size(0); + auto grad_input = + at::zeros({batch_size, channels, height, width}, grad.options()); + + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + dim3 grid(std::min( + at::cuda::ATenCeilDiv( + static_cast(grad.numel()), static_cast(512)), + static_cast(4096))); + dim3 block(512); + + // handle possibly empty gradients + if (grad.numel() == 0) { + AT_CUDA_CHECK(cudaGetLastError()); + return grad_input; + } + + auto grad_ = grad.contiguous(), rois_ = rois.contiguous(); + AT_DISPATCH_FLOATING_TYPES( + grad.scalar_type(), "ROIAlignRotated_backward", [&] { + RoIAlignRotatedBackwardFeature<<>>( + grad.numel(), + grad_.data_ptr(), + num_rois, + spatial_scale, + channels, + height, + width, + pooled_height, + pooled_width, + sampling_ratio, + grad_input.data_ptr(), + rois_.data_ptr()); + }); + AT_CUDA_CHECK(cudaGetLastError()); + return grad_input; +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated.h b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated.h new file mode 100644 index 0000000000000000000000000000000000000000..7c389c6cbdbefdfb623296b0918c27c634d621bb --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated.h @@ -0,0 +1,35 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#pragma once +#include + +namespace detectron2 { + +at::Tensor box_iou_rotated_cpu( + const at::Tensor& boxes1, + const at::Tensor& boxes2); + +#ifdef WITH_CUDA +at::Tensor box_iou_rotated_cuda( + const at::Tensor& boxes1, + const at::Tensor& boxes2); +#endif + +// Interface for Python +// inline is needed to prevent multiple function definitions when this header is +// included by different cpps +inline at::Tensor box_iou_rotated( + const at::Tensor& boxes1, + const at::Tensor& boxes2) { + assert(boxes1.device().is_cuda() == boxes2.device().is_cuda()); + if (boxes1.device().is_cuda()) { +#ifdef WITH_CUDA + return box_iou_rotated_cuda(boxes1.contiguous(), boxes2.contiguous()); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + + return box_iou_rotated_cpu(boxes1.contiguous(), boxes2.contiguous()); +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cpu.cpp b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cpu.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f2b02d171077d96fcaf29b585fa6a678af1f2842 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cpu.cpp @@ -0,0 +1,39 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#include "box_iou_rotated.h" +#include "box_iou_rotated_utils.h" + +namespace detectron2 { + +template +void box_iou_rotated_cpu_kernel( + const at::Tensor& boxes1, + const at::Tensor& boxes2, + at::Tensor& ious) { + auto num_boxes1 = boxes1.size(0); + auto num_boxes2 = boxes2.size(0); + + for (int i = 0; i < num_boxes1; i++) { + for (int j = 0; j < num_boxes2; j++) { + ious[i * num_boxes2 + j] = single_box_iou_rotated( + boxes1[i].data_ptr(), boxes2[j].data_ptr()); + } + } +} + +at::Tensor box_iou_rotated_cpu( + // input must be contiguous: + const at::Tensor& boxes1, + const at::Tensor& boxes2) { + auto num_boxes1 = boxes1.size(0); + auto num_boxes2 = boxes2.size(0); + at::Tensor ious = + at::empty({num_boxes1 * num_boxes2}, boxes1.options().dtype(at::kFloat)); + + box_iou_rotated_cpu_kernel(boxes1, boxes2, ious); + + // reshape from 1d array to 2d array + auto shape = std::vector{num_boxes1, num_boxes2}; + return ious.reshape(shape); +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cuda.cu b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cuda.cu new file mode 100644 index 0000000000000000000000000000000000000000..e3403c11796cb313771b8b6350c793b9fbdfbcaa --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cuda.cu @@ -0,0 +1,130 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#include +#include +#include +#include +#include "box_iou_rotated_utils.h" + +namespace detectron2 { + +// 2D block with 32 * 16 = 512 threads per block +const int BLOCK_DIM_X = 32; +const int BLOCK_DIM_Y = 16; + +template +__global__ void box_iou_rotated_cuda_kernel( + const int n_boxes1, + const int n_boxes2, + const T* dev_boxes1, + const T* dev_boxes2, + T* dev_ious) { + const int row_start = blockIdx.x * blockDim.x; + const int col_start = blockIdx.y * blockDim.y; + + const int row_size = min(n_boxes1 - row_start, blockDim.x); + const int col_size = min(n_boxes2 - col_start, blockDim.y); + + __shared__ float block_boxes1[BLOCK_DIM_X * 5]; + __shared__ float block_boxes2[BLOCK_DIM_Y * 5]; + + // It's safe to copy using threadIdx.x since BLOCK_DIM_X >= BLOCK_DIM_Y + if (threadIdx.x < row_size && threadIdx.y == 0) { + block_boxes1[threadIdx.x * 5 + 0] = + dev_boxes1[(row_start + threadIdx.x) * 5 + 0]; + block_boxes1[threadIdx.x * 5 + 1] = + dev_boxes1[(row_start + threadIdx.x) * 5 + 1]; + block_boxes1[threadIdx.x * 5 + 2] = + dev_boxes1[(row_start + threadIdx.x) * 5 + 2]; + block_boxes1[threadIdx.x * 5 + 3] = + dev_boxes1[(row_start + threadIdx.x) * 5 + 3]; + block_boxes1[threadIdx.x * 5 + 4] = + dev_boxes1[(row_start + threadIdx.x) * 5 + 4]; + } + + if (threadIdx.x < col_size && threadIdx.y == 0) { + block_boxes2[threadIdx.x * 5 + 0] = + dev_boxes2[(col_start + threadIdx.x) * 5 + 0]; + block_boxes2[threadIdx.x * 5 + 1] = + dev_boxes2[(col_start + threadIdx.x) * 5 + 1]; + block_boxes2[threadIdx.x * 5 + 2] = + dev_boxes2[(col_start + threadIdx.x) * 5 + 2]; + block_boxes2[threadIdx.x * 5 + 3] = + dev_boxes2[(col_start + threadIdx.x) * 5 + 3]; + block_boxes2[threadIdx.x * 5 + 4] = + dev_boxes2[(col_start + threadIdx.x) * 5 + 4]; + } + __syncthreads(); + + if (threadIdx.x < row_size && threadIdx.y < col_size) { + int offset = (row_start + threadIdx.x) * n_boxes2 + col_start + threadIdx.y; + dev_ious[offset] = single_box_iou_rotated( + block_boxes1 + threadIdx.x * 5, block_boxes2 + threadIdx.y * 5); + } +} + +at::Tensor box_iou_rotated_cuda( + // input must be contiguous + const at::Tensor& boxes1, + const at::Tensor& boxes2) { + using scalar_t = float; + AT_ASSERTM( + boxes1.scalar_type() == at::kFloat, "boxes1 must be a float tensor"); + AT_ASSERTM( + boxes2.scalar_type() == at::kFloat, "boxes2 must be a float tensor"); + AT_ASSERTM(boxes1.is_cuda(), "boxes1 must be a CUDA tensor"); + AT_ASSERTM(boxes2.is_cuda(), "boxes2 must be a CUDA tensor"); + at::cuda::CUDAGuard device_guard(boxes1.device()); + + auto num_boxes1 = boxes1.size(0); + auto num_boxes2 = boxes2.size(0); + + at::Tensor ious = + at::empty({num_boxes1 * num_boxes2}, boxes1.options().dtype(at::kFloat)); + + bool transpose = false; + if (num_boxes1 > 0 && num_boxes2 > 0) { + scalar_t *data1 = boxes1.data_ptr(), + *data2 = boxes2.data_ptr(); + + if (num_boxes2 > 65535 * BLOCK_DIM_Y) { + AT_ASSERTM( + num_boxes1 <= 65535 * BLOCK_DIM_Y, + "Too many boxes for box_iou_rotated_cuda!"); + // x dim is allowed to be large, but y dim cannot, + // so we transpose the two to avoid "invalid configuration argument" + // error. We assume one of them is small. Otherwise the result is hard to + // fit in memory anyway. + std::swap(num_boxes1, num_boxes2); + std::swap(data1, data2); + transpose = true; + } + + const int blocks_x = + at::cuda::ATenCeilDiv(static_cast(num_boxes1), BLOCK_DIM_X); + const int blocks_y = + at::cuda::ATenCeilDiv(static_cast(num_boxes2), BLOCK_DIM_Y); + + dim3 blocks(blocks_x, blocks_y); + dim3 threads(BLOCK_DIM_X, BLOCK_DIM_Y); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + box_iou_rotated_cuda_kernel<<>>( + num_boxes1, + num_boxes2, + data1, + data2, + (scalar_t*)ious.data_ptr()); + + AT_CUDA_CHECK(cudaGetLastError()); + } + + // reshape from 1d array to 2d array + auto shape = std::vector{num_boxes1, num_boxes2}; + if (transpose) { + return ious.view(shape).t(); + } else { + return ious.view(shape); + } +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_utils.h b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..d8757ec376e8703e1edc5f76bf5ef214620bd69f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_utils.h @@ -0,0 +1,363 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#pragma once + +#include +#include + +#ifdef __CUDACC__ +// Designates functions callable from the host (CPU) and the device (GPU) +#define HOST_DEVICE __host__ __device__ +#define HOST_DEVICE_INLINE HOST_DEVICE __forceinline__ +#else +#include +#define HOST_DEVICE +#define HOST_DEVICE_INLINE HOST_DEVICE inline +#endif + +namespace detectron2 { + +namespace { + +template +struct RotatedBox { + T x_ctr, y_ctr, w, h, a; +}; + +template +struct Point { + T x, y; + HOST_DEVICE_INLINE Point(const T& px = 0, const T& py = 0) : x(px), y(py) {} + HOST_DEVICE_INLINE Point operator+(const Point& p) const { + return Point(x + p.x, y + p.y); + } + HOST_DEVICE_INLINE Point& operator+=(const Point& p) { + x += p.x; + y += p.y; + return *this; + } + HOST_DEVICE_INLINE Point operator-(const Point& p) const { + return Point(x - p.x, y - p.y); + } + HOST_DEVICE_INLINE Point operator*(const T coeff) const { + return Point(x * coeff, y * coeff); + } +}; + +template +HOST_DEVICE_INLINE T dot_2d(const Point& A, const Point& B) { + return A.x * B.x + A.y * B.y; +} + +// R: result type. can be different from input type +template +HOST_DEVICE_INLINE R cross_2d(const Point& A, const Point& B) { + return static_cast(A.x) * static_cast(B.y) - + static_cast(B.x) * static_cast(A.y); +} + +template +HOST_DEVICE_INLINE void get_rotated_vertices( + const RotatedBox& box, + Point (&pts)[4]) { + // M_PI / 180. == 0.01745329251 + double theta = box.a * 0.01745329251; + T cosTheta2 = (T)cos(theta) * 0.5f; + T sinTheta2 = (T)sin(theta) * 0.5f; + + // y: top --> down; x: left --> right + pts[0].x = box.x_ctr + sinTheta2 * box.h + cosTheta2 * box.w; + pts[0].y = box.y_ctr + cosTheta2 * box.h - sinTheta2 * box.w; + pts[1].x = box.x_ctr - sinTheta2 * box.h + cosTheta2 * box.w; + pts[1].y = box.y_ctr - cosTheta2 * box.h - sinTheta2 * box.w; + pts[2].x = 2 * box.x_ctr - pts[0].x; + pts[2].y = 2 * box.y_ctr - pts[0].y; + pts[3].x = 2 * box.x_ctr - pts[1].x; + pts[3].y = 2 * box.y_ctr - pts[1].y; +} + +template +HOST_DEVICE_INLINE int get_intersection_points( + const Point (&pts1)[4], + const Point (&pts2)[4], + Point (&intersections)[24]) { + // Line vector + // A line from p1 to p2 is: p1 + (p2-p1)*t, t=[0,1] + Point vec1[4], vec2[4]; + for (int i = 0; i < 4; i++) { + vec1[i] = pts1[(i + 1) % 4] - pts1[i]; + vec2[i] = pts2[(i + 1) % 4] - pts2[i]; + } + + // Line test - test all line combos for intersection + int num = 0; // number of intersections + for (int i = 0; i < 4; i++) { + for (int j = 0; j < 4; j++) { + // Solve for 2x2 Ax=b + T det = cross_2d(vec2[j], vec1[i]); + + // This takes care of parallel lines + if (fabs(det) <= 1e-14) { + continue; + } + + auto vec12 = pts2[j] - pts1[i]; + + T t1 = cross_2d(vec2[j], vec12) / det; + T t2 = cross_2d(vec1[i], vec12) / det; + + if (t1 >= 0.0f && t1 <= 1.0f && t2 >= 0.0f && t2 <= 1.0f) { + intersections[num++] = pts1[i] + vec1[i] * t1; + } + } + } + + // Check for vertices of rect1 inside rect2 + { + const auto& AB = vec2[0]; + const auto& DA = vec2[3]; + auto ABdotAB = dot_2d(AB, AB); + auto ADdotAD = dot_2d(DA, DA); + for (int i = 0; i < 4; i++) { + // assume ABCD is the rectangle, and P is the point to be judged + // P is inside ABCD iff. P's projection on AB lies within AB + // and P's projection on AD lies within AD + + auto AP = pts1[i] - pts2[0]; + + auto APdotAB = dot_2d(AP, AB); + auto APdotAD = -dot_2d(AP, DA); + + if ((APdotAB >= 0) && (APdotAD >= 0) && (APdotAB <= ABdotAB) && + (APdotAD <= ADdotAD)) { + intersections[num++] = pts1[i]; + } + } + } + + // Reverse the check - check for vertices of rect2 inside rect1 + { + const auto& AB = vec1[0]; + const auto& DA = vec1[3]; + auto ABdotAB = dot_2d(AB, AB); + auto ADdotAD = dot_2d(DA, DA); + for (int i = 0; i < 4; i++) { + auto AP = pts2[i] - pts1[0]; + + auto APdotAB = dot_2d(AP, AB); + auto APdotAD = -dot_2d(AP, DA); + + if ((APdotAB >= 0) && (APdotAD >= 0) && (APdotAB <= ABdotAB) && + (APdotAD <= ADdotAD)) { + intersections[num++] = pts2[i]; + } + } + } + + return num; +} + +template +HOST_DEVICE_INLINE int convex_hull_graham( + const Point (&p)[24], + const int& num_in, + Point (&q)[24], + bool shift_to_zero = false) { + assert(num_in >= 2); + + // Step 1: + // Find point with minimum y + // if more than 1 points have the same minimum y, + // pick the one with the minimum x. + int t = 0; + for (int i = 1; i < num_in; i++) { + if (p[i].y < p[t].y || (p[i].y == p[t].y && p[i].x < p[t].x)) { + t = i; + } + } + auto& start = p[t]; // starting point + + // Step 2: + // Subtract starting point from every points (for sorting in the next step) + for (int i = 0; i < num_in; i++) { + q[i] = p[i] - start; + } + + // Swap the starting point to position 0 + auto tmp = q[0]; + q[0] = q[t]; + q[t] = tmp; + + // Step 3: + // Sort point 1 ~ num_in according to their relative cross-product values + // (essentially sorting according to angles) + // If the angles are the same, sort according to their distance to origin + T dist[24]; +#ifdef __CUDACC__ + // compute distance to origin before sort, and sort them together with the + // points + for (int i = 0; i < num_in; i++) { + dist[i] = dot_2d(q[i], q[i]); + } + + // CUDA version + // In the future, we can potentially use thrust + // for sorting here to improve speed (though not guaranteed) + for (int i = 1; i < num_in - 1; i++) { + for (int j = i + 1; j < num_in; j++) { + T crossProduct = cross_2d(q[i], q[j]); + if ((crossProduct < -1e-6) || + (fabs(crossProduct) < 1e-6 && dist[i] > dist[j])) { + auto q_tmp = q[i]; + q[i] = q[j]; + q[j] = q_tmp; + auto dist_tmp = dist[i]; + dist[i] = dist[j]; + dist[j] = dist_tmp; + } + } + } +#else + // CPU version + std::sort( + q + 1, q + num_in, [](const Point& A, const Point& B) -> bool { + T temp = cross_2d(A, B); + if (fabs(temp) < 1e-6) { + return dot_2d(A, A) < dot_2d(B, B); + } else { + return temp > 0; + } + }); + // compute distance to origin after sort, since the points are now different. + for (int i = 0; i < num_in; i++) { + dist[i] = dot_2d(q[i], q[i]); + } +#endif + + // Step 4: + // Make sure there are at least 2 points (that don't overlap with each other) + // in the stack + int k; // index of the non-overlapped second point + for (k = 1; k < num_in; k++) { + if (dist[k] > 1e-8) { + break; + } + } + if (k == num_in) { + // We reach the end, which means the convex hull is just one point + q[0] = p[t]; + return 1; + } + q[1] = q[k]; + int m = 2; // 2 points in the stack + // Step 5: + // Finally we can start the scanning process. + // When a non-convex relationship between the 3 points is found + // (either concave shape or duplicated points), + // we pop the previous point from the stack + // until the 3-point relationship is convex again, or + // until the stack only contains two points + for (int i = k + 1; i < num_in; i++) { + while (m > 1) { + auto q1 = q[i] - q[m - 2], q2 = q[m - 1] - q[m - 2]; + // cross_2d() uses FMA and therefore computes round(round(q1.x*q2.y) - + // q2.x*q1.y) So it may not return 0 even when q1==q2. Therefore we + // compare round(q1.x*q2.y) and round(q2.x*q1.y) directly. (round means + // round to nearest floating point). + if (q1.x * q2.y >= q2.x * q1.y) + m--; + else + break; + } + // Using double also helps, but float can solve the issue for now. + // while (m > 1 && cross_2d(q[i] - q[m - 2], q[m - 1] - q[m - 2]) + // >= 0) { + // m--; + // } + q[m++] = q[i]; + } + + // Step 6 (Optional): + // In general sense we need the original coordinates, so we + // need to shift the points back (reverting Step 2) + // But if we're only interested in getting the area/perimeter of the shape + // We can simply return. + if (!shift_to_zero) { + for (int i = 0; i < m; i++) { + q[i] += start; + } + } + + return m; +} + +template +HOST_DEVICE_INLINE T polygon_area(const Point (&q)[24], const int& m) { + if (m <= 2) { + return 0; + } + + T area = 0; + for (int i = 1; i < m - 1; i++) { + area += fabs(cross_2d(q[i] - q[0], q[i + 1] - q[0])); + } + + return area / 2.0; +} + +template +HOST_DEVICE_INLINE T rotated_boxes_intersection( + const RotatedBox& box1, + const RotatedBox& box2) { + // There are up to 4 x 4 + 4 + 4 = 24 intersections (including dups) returned + // from rotated_rect_intersection_pts + Point intersectPts[24], orderedPts[24]; + + Point pts1[4]; + Point pts2[4]; + get_rotated_vertices(box1, pts1); + get_rotated_vertices(box2, pts2); + + int num = get_intersection_points(pts1, pts2, intersectPts); + + if (num <= 2) { + return 0.0; + } + + // Convex Hull to order the intersection points in clockwise order and find + // the contour area. + int num_convex = convex_hull_graham(intersectPts, num, orderedPts, true); + return polygon_area(orderedPts, num_convex); +} + +} // namespace + +template +HOST_DEVICE_INLINE T +single_box_iou_rotated(T const* const box1_raw, T const* const box2_raw) { + // shift center to the middle point to achieve higher precision in result + RotatedBox box1, box2; + auto center_shift_x = (box1_raw[0] + box2_raw[0]) / 2.0; + auto center_shift_y = (box1_raw[1] + box2_raw[1]) / 2.0; + box1.x_ctr = box1_raw[0] - center_shift_x; + box1.y_ctr = box1_raw[1] - center_shift_y; + box1.w = box1_raw[2]; + box1.h = box1_raw[3]; + box1.a = box1_raw[4]; + box2.x_ctr = box2_raw[0] - center_shift_x; + box2.y_ctr = box2_raw[1] - center_shift_y; + box2.w = box2_raw[2]; + box2.h = box2_raw[3]; + box2.a = box2_raw[4]; + + T area1 = box1.w * box1.h; + T area2 = box2.w * box2.h; + if (area1 < 1e-14 || area2 < 1e-14) { + return 0.f; + } + + T intersection = rotated_boxes_intersection(box1, box2); + T iou = intersection / (area1 + area2 - intersection); + return iou; +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/cuda_version.cu b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/cuda_version.cu new file mode 100644 index 0000000000000000000000000000000000000000..af088e7572f6f27b9d653b4d7178f4e03de6befc --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/cuda_version.cu @@ -0,0 +1,9 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +#include + +namespace detectron2 { +int get_cudart_version() { + return CUDART_VERSION; +} +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/deformable/deform_conv.h b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/deformable/deform_conv.h new file mode 100644 index 0000000000000000000000000000000000000000..49ccd868ace8fd79f6fcbde6fe41f2b95873c414 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/deformable/deform_conv.h @@ -0,0 +1,377 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#pragma once +#include + +namespace detectron2 { + +#ifdef WITH_CUDA +int deform_conv_forward_cuda( + at::Tensor input, + at::Tensor weight, + at::Tensor offset, + at::Tensor output, + at::Tensor columns, + at::Tensor ones, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + int im2col_step); + +int deform_conv_backward_input_cuda( + at::Tensor input, + at::Tensor offset, + at::Tensor gradOutput, + at::Tensor gradInput, + at::Tensor gradOffset, + at::Tensor weight, + at::Tensor columns, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + int im2col_step); + +int deform_conv_backward_parameters_cuda( + at::Tensor input, + at::Tensor offset, + at::Tensor gradOutput, + at::Tensor gradWeight, // at::Tensor gradBias, + at::Tensor columns, + at::Tensor ones, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + float scale, + int im2col_step); + +void modulated_deform_conv_cuda_forward( + at::Tensor input, + at::Tensor weight, + at::Tensor bias, + at::Tensor ones, + at::Tensor offset, + at::Tensor mask, + at::Tensor output, + at::Tensor columns, + int kernel_h, + int kernel_w, + const int stride_h, + const int stride_w, + const int pad_h, + const int pad_w, + const int dilation_h, + const int dilation_w, + const int group, + const int deformable_group, + const bool with_bias); + +void modulated_deform_conv_cuda_backward( + at::Tensor input, + at::Tensor weight, + at::Tensor bias, + at::Tensor ones, + at::Tensor offset, + at::Tensor mask, + at::Tensor columns, + at::Tensor grad_input, + at::Tensor grad_weight, + at::Tensor grad_bias, + at::Tensor grad_offset, + at::Tensor grad_mask, + at::Tensor grad_output, + int kernel_h, + int kernel_w, + int stride_h, + int stride_w, + int pad_h, + int pad_w, + int dilation_h, + int dilation_w, + int group, + int deformable_group, + const bool with_bias); + +#endif + +inline int deform_conv_forward( + at::Tensor input, + at::Tensor weight, + at::Tensor offset, + at::Tensor output, + at::Tensor columns, + at::Tensor ones, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + int im2col_step) { + if (input.is_cuda()) { +#ifdef WITH_CUDA + TORCH_CHECK(weight.is_cuda(), "weight tensor is not on GPU!"); + TORCH_CHECK(offset.is_cuda(), "offset tensor is not on GPU!"); + return deform_conv_forward_cuda( + input, + weight, + offset, + output, + columns, + ones, + kW, + kH, + dW, + dH, + padW, + padH, + dilationW, + dilationH, + group, + deformable_group, + im2col_step); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + AT_ERROR("Not implemented on the CPU"); +} + +inline int deform_conv_backward_input( + at::Tensor input, + at::Tensor offset, + at::Tensor gradOutput, + at::Tensor gradInput, + at::Tensor gradOffset, + at::Tensor weight, + at::Tensor columns, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + int im2col_step) { + if (gradOutput.is_cuda()) { +#ifdef WITH_CUDA + TORCH_CHECK(input.is_cuda(), "input tensor is not on GPU!"); + TORCH_CHECK(weight.is_cuda(), "weight tensor is not on GPU!"); + TORCH_CHECK(offset.is_cuda(), "offset tensor is not on GPU!"); + return deform_conv_backward_input_cuda( + input, + offset, + gradOutput, + gradInput, + gradOffset, + weight, + columns, + kW, + kH, + dW, + dH, + padW, + padH, + dilationW, + dilationH, + group, + deformable_group, + im2col_step); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + AT_ERROR("Not implemented on the CPU"); +} + +inline int deform_conv_backward_filter( + at::Tensor input, + at::Tensor offset, + at::Tensor gradOutput, + at::Tensor gradWeight, // at::Tensor gradBias, + at::Tensor columns, + at::Tensor ones, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + float scale, + int im2col_step) { + if (gradOutput.is_cuda()) { +#ifdef WITH_CUDA + TORCH_CHECK(input.is_cuda(), "input tensor is not on GPU!"); + TORCH_CHECK(offset.is_cuda(), "offset tensor is not on GPU!"); + return deform_conv_backward_parameters_cuda( + input, + offset, + gradOutput, + gradWeight, + columns, + ones, + kW, + kH, + dW, + dH, + padW, + padH, + dilationW, + dilationH, + group, + deformable_group, + scale, + im2col_step); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + AT_ERROR("Not implemented on the CPU"); +} + +inline void modulated_deform_conv_forward( + at::Tensor input, + at::Tensor weight, + at::Tensor bias, + at::Tensor ones, + at::Tensor offset, + at::Tensor mask, + at::Tensor output, + at::Tensor columns, + int kernel_h, + int kernel_w, + const int stride_h, + const int stride_w, + const int pad_h, + const int pad_w, + const int dilation_h, + const int dilation_w, + const int group, + const int deformable_group, + const bool with_bias) { + if (input.is_cuda()) { +#ifdef WITH_CUDA + TORCH_CHECK(weight.is_cuda(), "weight tensor is not on GPU!"); + TORCH_CHECK(bias.is_cuda(), "bias tensor is not on GPU!"); + TORCH_CHECK(offset.is_cuda(), "offset tensor is not on GPU!"); + return modulated_deform_conv_cuda_forward( + input, + weight, + bias, + ones, + offset, + mask, + output, + columns, + kernel_h, + kernel_w, + stride_h, + stride_w, + pad_h, + pad_w, + dilation_h, + dilation_w, + group, + deformable_group, + with_bias); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + AT_ERROR("Not implemented on the CPU"); +} + +inline void modulated_deform_conv_backward( + at::Tensor input, + at::Tensor weight, + at::Tensor bias, + at::Tensor ones, + at::Tensor offset, + at::Tensor mask, + at::Tensor columns, + at::Tensor grad_input, + at::Tensor grad_weight, + at::Tensor grad_bias, + at::Tensor grad_offset, + at::Tensor grad_mask, + at::Tensor grad_output, + int kernel_h, + int kernel_w, + int stride_h, + int stride_w, + int pad_h, + int pad_w, + int dilation_h, + int dilation_w, + int group, + int deformable_group, + const bool with_bias) { + if (grad_output.is_cuda()) { +#ifdef WITH_CUDA + TORCH_CHECK(input.is_cuda(), "input tensor is not on GPU!"); + TORCH_CHECK(weight.is_cuda(), "weight tensor is not on GPU!"); + TORCH_CHECK(bias.is_cuda(), "bias tensor is not on GPU!"); + TORCH_CHECK(offset.is_cuda(), "offset tensor is not on GPU!"); + return modulated_deform_conv_cuda_backward( + input, + weight, + bias, + ones, + offset, + mask, + columns, + grad_input, + grad_weight, + grad_bias, + grad_offset, + grad_mask, + grad_output, + kernel_h, + kernel_w, + stride_h, + stride_w, + pad_h, + pad_w, + dilation_h, + dilation_w, + group, + deformable_group, + with_bias); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + AT_ERROR("Not implemented on the CPU"); +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/deformable/deform_conv_cuda.cu b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/deformable/deform_conv_cuda.cu new file mode 100644 index 0000000000000000000000000000000000000000..5376db0cc4d93e245cfc9fea0f3b5715a1f88db2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/deformable/deform_conv_cuda.cu @@ -0,0 +1,1131 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +// modified from +// https://github.com/open-mmlab/mmdetection/blob/master/mmdet/ops/dcn/src/deform_conv_cuda.cpp +// Original license: Apache 2.0 + +// modify from +// https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch/blob/mmdetection/mmdet/ops/dcn/src/deform_conv_cuda.c +// Original license: Apache 2.0 + +#include + +#include "deform_conv.h" + +#include +#include + +namespace detectron2 { + +void deformable_im2col( + const at::Tensor data_im, + const at::Tensor data_offset, + const int channels, + const int height, + const int width, + const int ksize_h, + const int ksize_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int parallel_imgs, + const int deformable_group, + at::Tensor data_col); + +void deformable_col2im( + const at::Tensor data_col, + const at::Tensor data_offset, + const int channels, + const int height, + const int width, + const int ksize_h, + const int ksize_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int parallel_imgs, + const int deformable_group, + at::Tensor grad_im); + +void deformable_col2im_coord( + const at::Tensor data_col, + const at::Tensor data_im, + const at::Tensor data_offset, + const int channels, + const int height, + const int width, + const int ksize_h, + const int ksize_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int parallel_imgs, + const int deformable_group, + at::Tensor grad_offset); + +void modulated_deformable_im2col_cuda( + const at::Tensor data_im, + const at::Tensor data_offset, + const at::Tensor data_mask, + const int batch_size, + const int channels, + const int height_im, + const int width_im, + const int height_col, + const int width_col, + const int kernel_h, + const int kenerl_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int deformable_group, + at::Tensor data_col); + +void modulated_deformable_col2im_cuda( + const at::Tensor data_col, + const at::Tensor data_offset, + const at::Tensor data_mask, + const int batch_size, + const int channels, + const int height_im, + const int width_im, + const int height_col, + const int width_col, + const int kernel_h, + const int kenerl_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int deformable_group, + at::Tensor grad_im); + +void modulated_deformable_col2im_coord_cuda( + const at::Tensor data_col, + const at::Tensor data_im, + const at::Tensor data_offset, + const at::Tensor data_mask, + const int batch_size, + const int channels, + const int height_im, + const int width_im, + const int height_col, + const int width_col, + const int kernel_h, + const int kenerl_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int deformable_group, + at::Tensor grad_offset, + at::Tensor grad_mask); + +void shape_check( + at::Tensor input, + at::Tensor offset, + at::Tensor* gradOutput, + at::Tensor weight, + int kH, + int kW, + int dH, + int dW, + int padH, + int padW, + int dilationH, + int dilationW, + int group, + int deformable_group) { + TORCH_CHECK( + weight.ndimension() == 4, + "4D weight tensor (nOutputPlane,nInputPlane,kH,kW) expected, " + "but got: %s", + weight.ndimension()); + + TORCH_CHECK(weight.is_contiguous(), "weight tensor has to be contiguous"); + + TORCH_CHECK( + kW > 0 && kH > 0, + "kernel size should be greater than zero, but got kH: %d kW: %d", + kH, + kW); + + TORCH_CHECK( + (weight.size(2) == kH && weight.size(3) == kW), + "kernel size should be consistent with weight, ", + "but got kH: %d kW: %d weight.size(2): %d, weight.size(3): %d", + kH, + kW, + weight.size(2), + weight.size(3)); + + TORCH_CHECK( + dW > 0 && dH > 0, + "stride should be greater than zero, but got dH: %d dW: %d", + dH, + dW); + + TORCH_CHECK( + dilationW > 0 && dilationH > 0, + "dilation should be greater than 0, but got dilationH: %d dilationW: %d", + dilationH, + dilationW); + + int ndim = input.ndimension(); + int dimf = 0; + int dimh = 1; + int dimw = 2; + + if (ndim == 4) { + dimf++; + dimh++; + dimw++; + } + + TORCH_CHECK( + ndim == 3 || ndim == 4, + "3D or 4D input tensor expected but got: %s", + ndim); + + long nInputPlane = weight.size(1) * group; + long inputHeight = input.size(dimh); + long inputWidth = input.size(dimw); + long nOutputPlane = weight.size(0); + long outputHeight = + (inputHeight + 2 * padH - (dilationH * (kH - 1) + 1)) / dH + 1; + long outputWidth = + (inputWidth + 2 * padW - (dilationW * (kW - 1) + 1)) / dW + 1; + + TORCH_CHECK( + nInputPlane % deformable_group == 0, + "input channels must divide deformable group size"); + + if (outputWidth < 1 || outputHeight < 1) + AT_ERROR( + "Given input size: (%ld x %ld x %ld). " + "Calculated output size: (%ld x %ld x %ld). Output size is too small", + nInputPlane, + inputHeight, + inputWidth, + nOutputPlane, + outputHeight, + outputWidth); + + TORCH_CHECK( + input.size(1) == nInputPlane, + "invalid number of input planes, expected: %d, but got: %d", + nInputPlane, + input.size(1)); + + TORCH_CHECK( + (inputHeight >= kH && inputWidth >= kW), + "input image is smaller than kernel"); + + TORCH_CHECK( + (offset.size(2) == outputHeight && offset.size(3) == outputWidth), + "invalid spatial size of offset, expected height: %d width: %d, but " + "got height: %d width: %d", + outputHeight, + outputWidth, + offset.size(2), + offset.size(3)); + + TORCH_CHECK( + (offset.size(1) == deformable_group * 2 * kH * kW), + "invalid number of channels of offset"); + + if (gradOutput != NULL) { + TORCH_CHECK( + gradOutput->size(dimf) == nOutputPlane, + "invalid number of gradOutput planes, expected: %d, but got: %d", + nOutputPlane, + gradOutput->size(dimf)); + + TORCH_CHECK( + (gradOutput->size(dimh) == outputHeight && + gradOutput->size(dimw) == outputWidth), + "invalid size of gradOutput, expected height: %d width: %d , but " + "got height: %d width: %d", + outputHeight, + outputWidth, + gradOutput->size(dimh), + gradOutput->size(dimw)); + } +} + +int deform_conv_forward_cuda( + at::Tensor input, + at::Tensor weight, + at::Tensor offset, + at::Tensor output, + at::Tensor columns, + at::Tensor ones, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + int im2col_step) { + // todo: resize columns to include im2col: done + // todo: add im2col_step as input + // todo: add new output buffer and transpose it to output (or directly + // transpose output) todo: possibly change data indexing because of + // parallel_imgs + + shape_check( + input, + offset, + NULL, + weight, + kH, + kW, + dH, + dW, + padH, + padW, + dilationH, + dilationW, + group, + deformable_group); + + input = input.contiguous(); + offset = offset.contiguous(); + weight = weight.contiguous(); + + int batch = 1; + if (input.ndimension() == 3) { + // Force batch + batch = 0; + input.unsqueeze_(0); + offset.unsqueeze_(0); + } + + // todo: assert batchsize dividable by im2col_step + + long batchSize = input.size(0); + long nInputPlane = input.size(1); + long inputHeight = input.size(2); + long inputWidth = input.size(3); + + long nOutputPlane = weight.size(0); + + long outputWidth = + (inputWidth + 2 * padW - (dilationW * (kW - 1) + 1)) / dW + 1; + long outputHeight = + (inputHeight + 2 * padH - (dilationH * (kH - 1) + 1)) / dH + 1; + + TORCH_CHECK((offset.size(0) == batchSize), "invalid batch size of offset"); + + output = output.view({batchSize / im2col_step, + im2col_step, + nOutputPlane, + outputHeight, + outputWidth}); + columns = at::zeros( + {nInputPlane * kW * kH, im2col_step * outputHeight * outputWidth}, + input.options()); + + if (ones.ndimension() != 2 || + ones.size(0) * ones.size(1) < outputHeight * outputWidth) { + ones = at::ones({outputHeight, outputWidth}, input.options()); + } + + input = input.view({batchSize / im2col_step, + im2col_step, + nInputPlane, + inputHeight, + inputWidth}); + offset = offset.view({batchSize / im2col_step, + im2col_step, + deformable_group * 2 * kH * kW, + outputHeight, + outputWidth}); + + at::Tensor output_buffer = at::zeros( + {batchSize / im2col_step, + nOutputPlane, + im2col_step * outputHeight, + outputWidth}, + output.options()); + + output_buffer = output_buffer.view({output_buffer.size(0), + group, + output_buffer.size(1) / group, + output_buffer.size(2), + output_buffer.size(3)}); + + for (int elt = 0; elt < batchSize / im2col_step; elt++) { + deformable_im2col( + input[elt], + offset[elt], + nInputPlane, + inputHeight, + inputWidth, + kH, + kW, + padH, + padW, + dH, + dW, + dilationH, + dilationW, + im2col_step, + deformable_group, + columns); + + columns = columns.view({group, columns.size(0) / group, columns.size(1)}); + weight = weight.view({group, + weight.size(0) / group, + weight.size(1), + weight.size(2), + weight.size(3)}); + + for (int g = 0; g < group; g++) { + output_buffer[elt][g] = output_buffer[elt][g] + .flatten(1) + .addmm_(weight[g].flatten(1), columns[g]) + .view_as(output_buffer[elt][g]); + } + } + + output_buffer = + output_buffer.view({output_buffer.size(0), + output_buffer.size(1) * output_buffer.size(2), + output_buffer.size(3), + output_buffer.size(4)}); + + output_buffer = output_buffer.view({batchSize / im2col_step, + nOutputPlane, + im2col_step, + outputHeight, + outputWidth}); + output_buffer.transpose_(1, 2); + output.copy_(output_buffer); + output = output.view({batchSize, nOutputPlane, outputHeight, outputWidth}); + + input = input.view({batchSize, nInputPlane, inputHeight, inputWidth}); + offset = offset.view( + {batchSize, deformable_group * 2 * kH * kW, outputHeight, outputWidth}); + + if (batch == 0) { + output = output.view({nOutputPlane, outputHeight, outputWidth}); + input = input.view({nInputPlane, inputHeight, inputWidth}); + offset = offset.view({offset.size(1), offset.size(2), offset.size(3)}); + } + + return 1; +} + +int deform_conv_backward_input_cuda( + at::Tensor input, + at::Tensor offset, + at::Tensor gradOutput, + at::Tensor gradInput, + at::Tensor gradOffset, + at::Tensor weight, + at::Tensor columns, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + int im2col_step) { + shape_check( + input, + offset, + &gradOutput, + weight, + kH, + kW, + dH, + dW, + padH, + padW, + dilationH, + dilationW, + group, + deformable_group); + + input = input.contiguous(); + offset = offset.contiguous(); + gradOutput = gradOutput.contiguous(); + weight = weight.contiguous(); + + int batch = 1; + + if (input.ndimension() == 3) { + // Force batch + batch = 0; + input = input.view({1, input.size(0), input.size(1), input.size(2)}); + offset = offset.view({1, offset.size(0), offset.size(1), offset.size(2)}); + gradOutput = gradOutput.view( + {1, gradOutput.size(0), gradOutput.size(1), gradOutput.size(2)}); + } + + long batchSize = input.size(0); + long nInputPlane = input.size(1); + long inputHeight = input.size(2); + long inputWidth = input.size(3); + + long nOutputPlane = weight.size(0); + + long outputWidth = + (inputWidth + 2 * padW - (dilationW * (kW - 1) + 1)) / dW + 1; + long outputHeight = + (inputHeight + 2 * padH - (dilationH * (kH - 1) + 1)) / dH + 1; + + TORCH_CHECK((offset.size(0) == batchSize), 3, "invalid batch size of offset"); + gradInput = gradInput.view({batchSize, nInputPlane, inputHeight, inputWidth}); + columns = at::zeros( + {nInputPlane * kW * kH, im2col_step * outputHeight * outputWidth}, + input.options()); + + // change order of grad output + gradOutput = gradOutput.view({batchSize / im2col_step, + im2col_step, + nOutputPlane, + outputHeight, + outputWidth}); + gradOutput.transpose_(1, 2); + + gradInput = gradInput.view({batchSize / im2col_step, + im2col_step, + nInputPlane, + inputHeight, + inputWidth}); + input = input.view({batchSize / im2col_step, + im2col_step, + nInputPlane, + inputHeight, + inputWidth}); + gradOffset = gradOffset.view({batchSize / im2col_step, + im2col_step, + deformable_group * 2 * kH * kW, + outputHeight, + outputWidth}); + offset = offset.view({batchSize / im2col_step, + im2col_step, + deformable_group * 2 * kH * kW, + outputHeight, + outputWidth}); + + for (int elt = 0; elt < batchSize / im2col_step; elt++) { + // divide into groups + columns = columns.view({group, columns.size(0) / group, columns.size(1)}); + weight = weight.view({group, + weight.size(0) / group, + weight.size(1), + weight.size(2), + weight.size(3)}); + gradOutput = gradOutput.view({gradOutput.size(0), + group, + gradOutput.size(1) / group, + gradOutput.size(2), + gradOutput.size(3), + gradOutput.size(4)}); + + for (int g = 0; g < group; g++) { + columns[g] = columns[g].addmm_( + weight[g].flatten(1).transpose(0, 1), + gradOutput[elt][g].flatten(1), + 0.0f, + 1.0f); + } + + columns = + columns.view({columns.size(0) * columns.size(1), columns.size(2)}); + gradOutput = gradOutput.view({gradOutput.size(0), + gradOutput.size(1) * gradOutput.size(2), + gradOutput.size(3), + gradOutput.size(4), + gradOutput.size(5)}); + + deformable_col2im_coord( + columns, + input[elt], + offset[elt], + nInputPlane, + inputHeight, + inputWidth, + kH, + kW, + padH, + padW, + dH, + dW, + dilationH, + dilationW, + im2col_step, + deformable_group, + gradOffset[elt]); + + deformable_col2im( + columns, + offset[elt], + nInputPlane, + inputHeight, + inputWidth, + kH, + kW, + padH, + padW, + dH, + dW, + dilationH, + dilationW, + im2col_step, + deformable_group, + gradInput[elt]); + } + + gradOutput.transpose_(1, 2); + gradOutput = + gradOutput.view({batchSize, nOutputPlane, outputHeight, outputWidth}); + + gradInput = gradInput.view({batchSize, nInputPlane, inputHeight, inputWidth}); + input = input.view({batchSize, nInputPlane, inputHeight, inputWidth}); + gradOffset = gradOffset.view( + {batchSize, deformable_group * 2 * kH * kW, outputHeight, outputWidth}); + offset = offset.view( + {batchSize, deformable_group * 2 * kH * kW, outputHeight, outputWidth}); + + if (batch == 0) { + gradOutput = gradOutput.view({nOutputPlane, outputHeight, outputWidth}); + input = input.view({nInputPlane, inputHeight, inputWidth}); + gradInput = gradInput.view({nInputPlane, inputHeight, inputWidth}); + offset = offset.view({offset.size(1), offset.size(2), offset.size(3)}); + gradOffset = + gradOffset.view({offset.size(1), offset.size(2), offset.size(3)}); + } + + return 1; +} + +int deform_conv_backward_parameters_cuda( + at::Tensor input, + at::Tensor offset, + at::Tensor gradOutput, + at::Tensor gradWeight, // at::Tensor gradBias, + at::Tensor columns, + at::Tensor ones, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + float scale, + int im2col_step) { + // todo: transpose and reshape outGrad + // todo: reshape columns + // todo: add im2col_step as input + + shape_check( + input, + offset, + &gradOutput, + gradWeight, + kH, + kW, + dH, + dW, + padH, + padW, + dilationH, + dilationW, + group, + deformable_group); + + input = input.contiguous(); + offset = offset.contiguous(); + gradOutput = gradOutput.contiguous(); + + int batch = 1; + + if (input.ndimension() == 3) { + // Force batch + batch = 0; + input = input.view( + at::IntList({1, input.size(0), input.size(1), input.size(2)})); + gradOutput = gradOutput.view( + {1, gradOutput.size(0), gradOutput.size(1), gradOutput.size(2)}); + } + + long batchSize = input.size(0); + long nInputPlane = input.size(1); + long inputHeight = input.size(2); + long inputWidth = input.size(3); + + long nOutputPlane = gradWeight.size(0); + + long outputWidth = + (inputWidth + 2 * padW - (dilationW * (kW - 1) + 1)) / dW + 1; + long outputHeight = + (inputHeight + 2 * padH - (dilationH * (kH - 1) + 1)) / dH + 1; + + TORCH_CHECK((offset.size(0) == batchSize), "invalid batch size of offset"); + + columns = at::zeros( + {nInputPlane * kW * kH, im2col_step * outputHeight * outputWidth}, + input.options()); + + gradOutput = gradOutput.view({batchSize / im2col_step, + im2col_step, + nOutputPlane, + outputHeight, + outputWidth}); + gradOutput.transpose_(1, 2); + + at::Tensor gradOutputBuffer = at::zeros_like(gradOutput); + gradOutputBuffer = gradOutputBuffer.view({batchSize / im2col_step, + nOutputPlane, + im2col_step, + outputHeight, + outputWidth}); + gradOutputBuffer.copy_(gradOutput); + // gradOutput is not contiguous, so we do reshape (instead of view) next + gradOutputBuffer = gradOutputBuffer.reshape({batchSize / im2col_step, + nOutputPlane, + im2col_step * outputHeight, + outputWidth}); + + gradOutput.transpose_(1, 2); + gradOutput = + gradOutput.view({batchSize, nOutputPlane, outputHeight, outputWidth}); + + input = input.view({batchSize / im2col_step, + im2col_step, + nInputPlane, + inputHeight, + inputWidth}); + offset = offset.view({batchSize / im2col_step, + im2col_step, + deformable_group * 2 * kH * kW, + outputHeight, + outputWidth}); + + for (int elt = 0; elt < batchSize / im2col_step; elt++) { + deformable_im2col( + input[elt], + offset[elt], + nInputPlane, + inputHeight, + inputWidth, + kH, + kW, + padH, + padW, + dH, + dW, + dilationH, + dilationW, + im2col_step, + deformable_group, + columns); + + // divide into group + gradOutputBuffer = gradOutputBuffer.view({gradOutputBuffer.size(0), + group, + gradOutputBuffer.size(1) / group, + gradOutputBuffer.size(2), + gradOutputBuffer.size(3)}); + columns = columns.view({group, columns.size(0) / group, columns.size(1)}); + gradWeight = gradWeight.view({group, + gradWeight.size(0) / group, + gradWeight.size(1), + gradWeight.size(2), + gradWeight.size(3)}); + + for (int g = 0; g < group; g++) { + gradWeight[g] = gradWeight[g] + .flatten(1) + .addmm_( + gradOutputBuffer[elt][g].flatten(1), + columns[g].transpose(1, 0), + 1.0, + scale) + .view_as(gradWeight[g]); + } + gradOutputBuffer = gradOutputBuffer.view( + {gradOutputBuffer.size(0), + gradOutputBuffer.size(1) * gradOutputBuffer.size(2), + gradOutputBuffer.size(3), + gradOutputBuffer.size(4)}); + columns = + columns.view({columns.size(0) * columns.size(1), columns.size(2)}); + gradWeight = gradWeight.view({gradWeight.size(0) * gradWeight.size(1), + gradWeight.size(2), + gradWeight.size(3), + gradWeight.size(4)}); + } + + input = input.view({batchSize, nInputPlane, inputHeight, inputWidth}); + offset = offset.view( + {batchSize, deformable_group * 2 * kH * kW, outputHeight, outputWidth}); + + if (batch == 0) { + gradOutput = gradOutput.view({nOutputPlane, outputHeight, outputWidth}); + input = input.view({nInputPlane, inputHeight, inputWidth}); + } + + return 1; +} + +void modulated_deform_conv_cuda_forward( + at::Tensor input, + at::Tensor weight, + at::Tensor bias, + at::Tensor ones, + at::Tensor offset, + at::Tensor mask, + at::Tensor output, + at::Tensor columns, + int kernel_h, + int kernel_w, + const int stride_h, + const int stride_w, + const int pad_h, + const int pad_w, + const int dilation_h, + const int dilation_w, + const int group, + const int deformable_group, + const bool with_bias) { + TORCH_CHECK(input.is_contiguous(), "input tensor has to be contiguous"); + TORCH_CHECK(weight.is_contiguous(), "weight tensor has to be contiguous"); + + const int batch = input.size(0); + const int channels = input.size(1); + const int height = input.size(2); + const int width = input.size(3); + + const int channels_out = weight.size(0); + const int channels_kernel = weight.size(1); + const int kernel_h_ = weight.size(2); + const int kernel_w_ = weight.size(3); + + if (kernel_h_ != kernel_h || kernel_w_ != kernel_w) + AT_ERROR( + "Input shape and kernel shape wont match: (%d x %d vs %d x %d).", + kernel_h_, + kernel_w, + kernel_h_, + kernel_w_); + if (channels != channels_kernel * group) + AT_ERROR( + "Input shape and kernel channels wont match: (%d vs %d).", + channels, + channels_kernel * group); + + const int height_out = + (height + 2 * pad_h - (dilation_h * (kernel_h - 1) + 1)) / stride_h + 1; + const int width_out = + (width + 2 * pad_w - (dilation_w * (kernel_w - 1) + 1)) / stride_w + 1; + + if (ones.ndimension() != 2 || + ones.size(0) * ones.size(1) < height_out * width_out) { + // Resize plane and fill with ones... + ones = at::ones({height_out, width_out}, input.options()); + } + + // resize output + output = output.view({batch, channels_out, height_out, width_out}).zero_(); + // resize temporary columns + columns = at::zeros( + {channels * kernel_h * kernel_w, 1 * height_out * width_out}, + input.options()); + + output = output.view({output.size(0), + group, + output.size(1) / group, + output.size(2), + output.size(3)}); + + for (int b = 0; b < batch; b++) { + modulated_deformable_im2col_cuda( + input[b], + offset[b], + mask[b], + 1, + channels, + height, + width, + height_out, + width_out, + kernel_h, + kernel_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + deformable_group, + columns); + + // divide into group + weight = weight.view({group, + weight.size(0) / group, + weight.size(1), + weight.size(2), + weight.size(3)}); + columns = columns.view({group, columns.size(0) / group, columns.size(1)}); + + for (int g = 0; g < group; g++) { + output[b][g] = output[b][g] + .flatten(1) + .addmm_(weight[g].flatten(1), columns[g]) + .view_as(output[b][g]); + } + + weight = weight.view({weight.size(0) * weight.size(1), + weight.size(2), + weight.size(3), + weight.size(4)}); + columns = + columns.view({columns.size(0) * columns.size(1), columns.size(2)}); + } + + output = output.view({output.size(0), + output.size(1) * output.size(2), + output.size(3), + output.size(4)}); + + if (with_bias) { + output += bias.view({1, bias.size(0), 1, 1}); + } +} + +void modulated_deform_conv_cuda_backward( + at::Tensor input, + at::Tensor weight, + at::Tensor bias, + at::Tensor ones, + at::Tensor offset, + at::Tensor mask, + at::Tensor columns, + at::Tensor grad_input, + at::Tensor grad_weight, + at::Tensor grad_bias, + at::Tensor grad_offset, + at::Tensor grad_mask, + at::Tensor grad_output, + int kernel_h, + int kernel_w, + int stride_h, + int stride_w, + int pad_h, + int pad_w, + int dilation_h, + int dilation_w, + int group, + int deformable_group, + const bool with_bias) { + TORCH_CHECK(input.is_contiguous(), "input tensor has to be contiguous"); + TORCH_CHECK(weight.is_contiguous(), "weight tensor has to be contiguous"); + + const int batch = input.size(0); + const int channels = input.size(1); + const int height = input.size(2); + const int width = input.size(3); + + const int channels_kernel = weight.size(1); + const int kernel_h_ = weight.size(2); + const int kernel_w_ = weight.size(3); + if (kernel_h_ != kernel_h || kernel_w_ != kernel_w) + AT_ERROR( + "Input shape and kernel shape wont match: (%d x %d vs %d x %d).", + kernel_h_, + kernel_w, + kernel_h_, + kernel_w_); + if (channels != channels_kernel * group) + AT_ERROR( + "Input shape and kernel channels wont match: (%d vs %d).", + channels, + channels_kernel * group); + + const int height_out = + (height + 2 * pad_h - (dilation_h * (kernel_h - 1) + 1)) / stride_h + 1; + const int width_out = + (width + 2 * pad_w - (dilation_w * (kernel_w - 1) + 1)) / stride_w + 1; + + if (ones.ndimension() != 2 || + ones.size(0) * ones.size(1) < height_out * width_out) { + // Resize plane and fill with ones... + ones = at::ones({height_out, width_out}, input.options()); + } + + grad_input = grad_input.view({batch, channels, height, width}); + columns = at::zeros( + {channels * kernel_h * kernel_w, height_out * width_out}, + input.options()); + + grad_output = grad_output.view({grad_output.size(0), + group, + grad_output.size(1) / group, + grad_output.size(2), + grad_output.size(3)}); + + for (int b = 0; b < batch; b++) { + // divide int group + columns = columns.view({group, columns.size(0) / group, columns.size(1)}); + weight = weight.view({group, + weight.size(0) / group, + weight.size(1), + weight.size(2), + weight.size(3)}); + + for (int g = 0; g < group; g++) { + columns[g].addmm_( + weight[g].flatten(1).transpose(0, 1), + grad_output[b][g].flatten(1), + 0.0f, + 1.0f); + } + + columns = + columns.view({columns.size(0) * columns.size(1), columns.size(2)}); + weight = weight.view({weight.size(0) * weight.size(1), + weight.size(2), + weight.size(3), + weight.size(4)}); + + // gradient w.r.t. input coordinate data + modulated_deformable_col2im_coord_cuda( + columns, + input[b], + offset[b], + mask[b], + 1, + channels, + height, + width, + height_out, + width_out, + kernel_h, + kernel_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + deformable_group, + grad_offset[b], + grad_mask[b]); + // gradient w.r.t. input data + modulated_deformable_col2im_cuda( + columns, + offset[b], + mask[b], + 1, + channels, + height, + width, + height_out, + width_out, + kernel_h, + kernel_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + deformable_group, + grad_input[b]); + + // gradient w.r.t. weight, dWeight should accumulate across the batch and + // group + modulated_deformable_im2col_cuda( + input[b], + offset[b], + mask[b], + 1, + channels, + height, + width, + height_out, + width_out, + kernel_h, + kernel_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + deformable_group, + columns); + + columns = columns.view({group, columns.size(0) / group, columns.size(1)}); + grad_weight = grad_weight.view({group, + grad_weight.size(0) / group, + grad_weight.size(1), + grad_weight.size(2), + grad_weight.size(3)}); + if (with_bias) + grad_bias = grad_bias.view({group, grad_bias.size(0) / group}); + + for (int g = 0; g < group; g++) { + grad_weight[g] = + grad_weight[g] + .flatten(1) + .addmm_(grad_output[b][g].flatten(1), columns[g].transpose(0, 1)) + .view_as(grad_weight[g]); + if (with_bias) { + grad_bias[g] = + grad_bias[g] + .view({-1, 1}) + .addmm_(grad_output[b][g].flatten(1), ones.view({-1, 1})) + .view(-1); + } + } + + columns = + columns.view({columns.size(0) * columns.size(1), columns.size(2)}); + grad_weight = grad_weight.view({grad_weight.size(0) * grad_weight.size(1), + grad_weight.size(2), + grad_weight.size(3), + grad_weight.size(4)}); + if (with_bias) + grad_bias = grad_bias.view({grad_bias.size(0) * grad_bias.size(1)}); + } + grad_output = grad_output.view({grad_output.size(0) * grad_output.size(1), + grad_output.size(2), + grad_output.size(3), + grad_output.size(4)}); +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/deformable/deform_conv_cuda_kernel.cu b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/deformable/deform_conv_cuda_kernel.cu new file mode 100644 index 0000000000000000000000000000000000000000..841f3166c902e7f1c17fe58137d42a58e4f66d69 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/deformable/deform_conv_cuda_kernel.cu @@ -0,0 +1,1288 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +// modified from +// https://github.com/open-mmlab/mmdetection/blob/master/mmdet/ops/dcn/src/deform_conv_cuda_kernel.cu +// Original license: Apache 2.0 +// clang-format off + +// modify from +// https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch/blob/mmdetection/mmdet/ops/dcn/src/deform_conv_cuda_kernel.cu + +/*! + ******************* BEGIN Caffe Copyright Notice and Disclaimer ***************** + * + * COPYRIGHT + * + * All contributions by the University of California: + * Copyright (c) 2014-2017 The Regents of the University of California (Regents) + * All rights reserved. + * + * All other contributions: + * Copyright (c) 2014-2017, the respective contributors + * All rights reserved. + * + * Caffe uses a shared copyright model: each contributor holds copyright over + * their contributions to Caffe. The project versioning records all such + * contribution and copyright details. If a contributor wants to further mark + * their specific copyright on a particular contribution, they should indicate + * their copyright solely in the commit message of the change when it is + * committed. + * + * LICENSE + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + *AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + *IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + *FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + *DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + *SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + *CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + *OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + *OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * CONTRIBUTION AGREEMENT + * + * By contributing to the BVLC/caffe repository through pull-request, comment, + * or otherwise, the contributor releases their content to the + * license and copyright terms herein. + * + ***************** END Caffe Copyright Notice and Disclaimer ********************* + * + * Copyright (c) 2018 Microsoft + * Licensed under The MIT License [see LICENSE for details] + * \file modulated_deformable_im2col.cuh + * \brief Function definitions of converting an image to + * column matrix based on kernel, padding, dilation, and offset. + * These functions are mainly used in deformable convolution operators. + * \ref: https://arxiv.org/abs/1703.06211 + * \author Yuwen Xiong, Haozhi Qi, Jifeng Dai, Xizhou Zhu, Han Hu, Dazhi Cheng + */ + +#include +#include +#include +#include +#include +#include + +using namespace at; + +#define CUDA_KERNEL_LOOP(i, n) \ + for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < (n); \ + i += blockDim.x * gridDim.x) + + +namespace { + +const int CUDA_NUM_THREADS = 1024; +const int kMaxGridNum = 65535; + +inline int GET_BLOCKS(const int N) { + return std::min(kMaxGridNum, (N + CUDA_NUM_THREADS - 1) / CUDA_NUM_THREADS); +} + +} + +template +__device__ scalar_t deformable_im2col_bilinear( + const scalar_t* bottom_data, + const int data_width, + const int height, + const int width, + scalar_t h, + scalar_t w) { + int h_low = floor(h); + int w_low = floor(w); + int h_high = h_low + 1; + int w_high = w_low + 1; + + scalar_t lh = h - h_low; + scalar_t lw = w - w_low; + scalar_t hh = 1 - lh, hw = 1 - lw; + + scalar_t v1 = 0; + if (h_low >= 0 && w_low >= 0) + v1 = bottom_data[h_low * data_width + w_low]; + scalar_t v2 = 0; + if (h_low >= 0 && w_high <= width - 1) + v2 = bottom_data[h_low * data_width + w_high]; + scalar_t v3 = 0; + if (h_high <= height - 1 && w_low >= 0) + v3 = bottom_data[h_high * data_width + w_low]; + scalar_t v4 = 0; + if (h_high <= height - 1 && w_high <= width - 1) + v4 = bottom_data[h_high * data_width + w_high]; + + scalar_t w1 = hh * hw, w2 = hh * lw, w3 = lh * hw, w4 = lh * lw; + + scalar_t val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + return val; +} + +template +__device__ scalar_t get_gradient_weight( + scalar_t argmax_h, + scalar_t argmax_w, + const int h, + const int w, + const int height, + const int width) { + if (argmax_h <= -1 || argmax_h >= height || argmax_w <= -1 || + argmax_w >= width) { + // empty + return 0; + } + + int argmax_h_low = floor(argmax_h); + int argmax_w_low = floor(argmax_w); + int argmax_h_high = argmax_h_low + 1; + int argmax_w_high = argmax_w_low + 1; + + scalar_t weight = 0; + if (h == argmax_h_low && w == argmax_w_low) + weight = (h + 1 - argmax_h) * (w + 1 - argmax_w); + if (h == argmax_h_low && w == argmax_w_high) + weight = (h + 1 - argmax_h) * (argmax_w + 1 - w); + if (h == argmax_h_high && w == argmax_w_low) + weight = (argmax_h + 1 - h) * (w + 1 - argmax_w); + if (h == argmax_h_high && w == argmax_w_high) + weight = (argmax_h + 1 - h) * (argmax_w + 1 - w); + return weight; +} + +template +__device__ scalar_t get_coordinate_weight( + scalar_t argmax_h, + scalar_t argmax_w, + const int height, + const int width, + const scalar_t* im_data, + const int data_width, + const int bp_dir) { + if (argmax_h <= -1 || argmax_h >= height || argmax_w <= -1 || + argmax_w >= width) { + // empty + return 0; + } + + int argmax_h_low = floor(argmax_h); + int argmax_w_low = floor(argmax_w); + int argmax_h_high = argmax_h_low + 1; + int argmax_w_high = argmax_w_low + 1; + + scalar_t weight = 0; + + if (bp_dir == 0) { + if (argmax_h_low >= 0 && argmax_w_low >= 0) + weight += -1 * (argmax_w_low + 1 - argmax_w) * + im_data[argmax_h_low * data_width + argmax_w_low]; + if (argmax_h_low >= 0 && argmax_w_high <= width - 1) + weight += -1 * (argmax_w - argmax_w_low) * + im_data[argmax_h_low * data_width + argmax_w_high]; + if (argmax_h_high <= height - 1 && argmax_w_low >= 0) + weight += (argmax_w_low + 1 - argmax_w) * + im_data[argmax_h_high * data_width + argmax_w_low]; + if (argmax_h_high <= height - 1 && argmax_w_high <= width - 1) + weight += (argmax_w - argmax_w_low) * + im_data[argmax_h_high * data_width + argmax_w_high]; + } else if (bp_dir == 1) { + if (argmax_h_low >= 0 && argmax_w_low >= 0) + weight += -1 * (argmax_h_low + 1 - argmax_h) * + im_data[argmax_h_low * data_width + argmax_w_low]; + if (argmax_h_low >= 0 && argmax_w_high <= width - 1) + weight += (argmax_h_low + 1 - argmax_h) * + im_data[argmax_h_low * data_width + argmax_w_high]; + if (argmax_h_high <= height - 1 && argmax_w_low >= 0) + weight += -1 * (argmax_h - argmax_h_low) * + im_data[argmax_h_high * data_width + argmax_w_low]; + if (argmax_h_high <= height - 1 && argmax_w_high <= width - 1) + weight += (argmax_h - argmax_h_low) * + im_data[argmax_h_high * data_width + argmax_w_high]; + } + + return weight; +} + +template +__global__ void deformable_im2col_gpu_kernel( + const int n, + const scalar_t* data_im, + const scalar_t* data_offset, + const int height, + const int width, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int channel_per_deformable_group, + const int batch_size, + const int num_channels, + const int deformable_group, + const int height_col, + const int width_col, + scalar_t* data_col) { + CUDA_KERNEL_LOOP(index, n) { + // index index of output matrix + const int w_col = index % width_col; + const int h_col = (index / width_col) % height_col; + const int b_col = (index / width_col / height_col) % batch_size; + const int c_im = (index / width_col / height_col) / batch_size; + const int c_col = c_im * kernel_h * kernel_w; + + // compute deformable group index + const int deformable_group_index = c_im / channel_per_deformable_group; + + const int h_in = h_col * stride_h - pad_h; + const int w_in = w_col * stride_w - pad_w; + scalar_t* data_col_ptr = data_col + + ((c_col * batch_size + b_col) * height_col + h_col) * width_col + w_col; + // const scalar_t* data_im_ptr = data_im + ((b_col * num_channels + c_im) * + // height + h_in) * width + w_in; + const scalar_t* data_im_ptr = + data_im + (b_col * num_channels + c_im) * height * width; + const scalar_t* data_offset_ptr = data_offset + + (b_col * deformable_group + deformable_group_index) * 2 * kernel_h * + kernel_w * height_col * width_col; + + for (int i = 0; i < kernel_h; ++i) { + for (int j = 0; j < kernel_w; ++j) { + const int data_offset_h_ptr = + ((2 * (i * kernel_w + j)) * height_col + h_col) * width_col + w_col; + const int data_offset_w_ptr = + ((2 * (i * kernel_w + j) + 1) * height_col + h_col) * width_col + + w_col; + const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr]; + const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr]; + scalar_t val = static_cast(0); + const scalar_t h_im = h_in + i * dilation_h + offset_h; + const scalar_t w_im = w_in + j * dilation_w + offset_w; + if (h_im > -1 && w_im > -1 && h_im < height && w_im < width) { + // const scalar_t map_h = i * dilation_h + offset_h; + // const scalar_t map_w = j * dilation_w + offset_w; + // const int cur_height = height - h_in; + // const int cur_width = width - w_in; + // val = deformable_im2col_bilinear(data_im_ptr, width, cur_height, + // cur_width, map_h, map_w); + val = deformable_im2col_bilinear( + data_im_ptr, width, height, width, h_im, w_im); + } + *data_col_ptr = val; + data_col_ptr += batch_size * height_col * width_col; + } + } + } +} + + +template +__global__ void deformable_col2im_gpu_kernel( + const int n, + const scalar_t* data_col, + const scalar_t* data_offset, + const int channels, + const int height, + const int width, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int channel_per_deformable_group, + const int batch_size, + const int deformable_group, + const int height_col, + const int width_col, + scalar_t* grad_im) { + CUDA_KERNEL_LOOP(index, n) { + const int j = (index / width_col / height_col / batch_size) % kernel_w; + const int i = + (index / width_col / height_col / batch_size / kernel_w) % kernel_h; + const int c = + index / width_col / height_col / batch_size / kernel_w / kernel_h; + // compute the start and end of the output + + const int deformable_group_index = c / channel_per_deformable_group; + + int w_out = index % width_col; + int h_out = (index / width_col) % height_col; + int b = (index / width_col / height_col) % batch_size; + int w_in = w_out * stride_w - pad_w; + int h_in = h_out * stride_h - pad_h; + + const scalar_t* data_offset_ptr = data_offset + + (b * deformable_group + deformable_group_index) * 2 * kernel_h * + kernel_w * height_col * width_col; + const int data_offset_h_ptr = + ((2 * (i * kernel_w + j)) * height_col + h_out) * width_col + w_out; + const int data_offset_w_ptr = + ((2 * (i * kernel_w + j) + 1) * height_col + h_out) * width_col + w_out; + const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr]; + const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr]; + const scalar_t cur_inv_h_data = h_in + i * dilation_h + offset_h; + const scalar_t cur_inv_w_data = w_in + j * dilation_w + offset_w; + + const scalar_t cur_top_grad = data_col[index]; + const int cur_h = (int)cur_inv_h_data; + const int cur_w = (int)cur_inv_w_data; + for (int dy = -2; dy <= 2; dy++) { + for (int dx = -2; dx <= 2; dx++) { + if (cur_h + dy >= 0 && cur_h + dy < height && cur_w + dx >= 0 && + cur_w + dx < width && abs(cur_inv_h_data - (cur_h + dy)) < 1 && + abs(cur_inv_w_data - (cur_w + dx)) < 1) { + int cur_bottom_grad_pos = + ((b * channels + c) * height + cur_h + dy) * width + cur_w + dx; + scalar_t weight = get_gradient_weight( + cur_inv_h_data, + cur_inv_w_data, + cur_h + dy, + cur_w + dx, + height, + width); + atomicAdd(grad_im + cur_bottom_grad_pos, weight * cur_top_grad); + } + } + } + } +} + + +template +__global__ void deformable_col2im_coord_gpu_kernel( + const int n, + const scalar_t* data_col, + const scalar_t* data_im, + const scalar_t* data_offset, + const int channels, + const int height, + const int width, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int channel_per_deformable_group, + const int batch_size, + const int offset_channels, + const int deformable_group, + const int height_col, + const int width_col, + scalar_t* grad_offset) { + CUDA_KERNEL_LOOP(index, n) { + scalar_t val = 0; + int w = index % width_col; + int h = (index / width_col) % height_col; + int c = (index / width_col / height_col) % offset_channels; + int b = (index / width_col / height_col) / offset_channels; + // compute the start and end of the output + + const int deformable_group_index = c / (2 * kernel_h * kernel_w); + const int col_step = kernel_h * kernel_w; + int cnt = 0; + const scalar_t* data_col_ptr = data_col + + deformable_group_index * channel_per_deformable_group * batch_size * + width_col * height_col; + const scalar_t* data_im_ptr = data_im + + (b * deformable_group + deformable_group_index) * + channel_per_deformable_group / kernel_h / kernel_w * height * width; + const scalar_t* data_offset_ptr = data_offset + + (b * deformable_group + deformable_group_index) * 2 * kernel_h * + kernel_w * height_col * width_col; + + const int offset_c = c - deformable_group_index * 2 * kernel_h * kernel_w; + + for (int col_c = (offset_c / 2); col_c < channel_per_deformable_group; + col_c += col_step) { + const int col_pos = + (((col_c * batch_size + b) * height_col) + h) * width_col + w; + const int bp_dir = offset_c % 2; + + int j = (col_pos / width_col / height_col / batch_size) % kernel_w; + int i = + (col_pos / width_col / height_col / batch_size / kernel_w) % kernel_h; + int w_out = col_pos % width_col; + int h_out = (col_pos / width_col) % height_col; + int w_in = w_out * stride_w - pad_w; + int h_in = h_out * stride_h - pad_h; + const int data_offset_h_ptr = + (((2 * (i * kernel_w + j)) * height_col + h_out) * width_col + w_out); + const int data_offset_w_ptr = + (((2 * (i * kernel_w + j) + 1) * height_col + h_out) * width_col + + w_out); + const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr]; + const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr]; + scalar_t inv_h = h_in + i * dilation_h + offset_h; + scalar_t inv_w = w_in + j * dilation_w + offset_w; + if (inv_h <= -1 || inv_w <= -1 || inv_h >= height || inv_w >= width) { + inv_h = inv_w = -2; + } + const scalar_t weight = get_coordinate_weight( + inv_h, + inv_w, + height, + width, + data_im_ptr + cnt * height * width, + width, + bp_dir); + val += weight * data_col_ptr[col_pos]; + cnt += 1; + } + + grad_offset[index] = val; + } +} + + +namespace detectron2 { + +void deformable_im2col( + const at::Tensor data_im, + const at::Tensor data_offset, + const int channels, + const int height, + const int width, + const int ksize_h, + const int ksize_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int parallel_imgs, + const int deformable_group, + at::Tensor data_col) { + // num_axes should be smaller than block size + // todo: check parallel_imgs is correctly passed in + int height_col = + (height + 2 * pad_h - (dilation_h * (ksize_h - 1) + 1)) / stride_h + 1; + int width_col = + (width + 2 * pad_w - (dilation_w * (ksize_w - 1) + 1)) / stride_w + 1; + int num_kernels = channels * height_col * width_col * parallel_imgs; + int channel_per_deformable_group = channels / deformable_group; + + at::cuda::CUDAGuard device_guard(data_im.device()); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + data_im.scalar_type(), "deformable_im2col_gpu", ([&] { + const scalar_t* data_im_ = data_im.data_ptr(); + const scalar_t* data_offset_ = data_offset.data_ptr(); + scalar_t* data_col_ = data_col.data_ptr(); + + deformable_im2col_gpu_kernel<<< + GET_BLOCKS(num_kernels), + CUDA_NUM_THREADS, + 0, + stream>>>( + num_kernels, + data_im_, + data_offset_, + height, + width, + ksize_h, + ksize_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + channel_per_deformable_group, + parallel_imgs, + channels, + deformable_group, + height_col, + width_col, + data_col_); + })); + + cudaError_t err = cudaGetLastError(); + if (err != cudaSuccess) { + printf("error in deformable_im2col: %s\n", cudaGetErrorString(err)); + } +} + + +void deformable_col2im( + const at::Tensor data_col, + const at::Tensor data_offset, + const int channels, + const int height, + const int width, + const int ksize_h, + const int ksize_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int parallel_imgs, + const int deformable_group, + at::Tensor grad_im) { + // todo: make sure parallel_imgs is passed in correctly + int height_col = + (height + 2 * pad_h - (dilation_h * (ksize_h - 1) + 1)) / stride_h + 1; + int width_col = + (width + 2 * pad_w - (dilation_w * (ksize_w - 1) + 1)) / stride_w + 1; + int num_kernels = + channels * ksize_h * ksize_w * height_col * width_col * parallel_imgs; + int channel_per_deformable_group = channels / deformable_group; + + at::cuda::CUDAGuard device_guard(data_col.device()); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + data_col.scalar_type(), "deformable_col2im_gpu", ([&] { + const scalar_t* data_col_ = data_col.data_ptr(); + const scalar_t* data_offset_ = data_offset.data_ptr(); + scalar_t* grad_im_ = grad_im.data_ptr(); + + deformable_col2im_gpu_kernel<<< + GET_BLOCKS(num_kernels), + CUDA_NUM_THREADS, + 0, + stream>>>( + num_kernels, + data_col_, + data_offset_, + channels, + height, + width, + ksize_h, + ksize_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + channel_per_deformable_group, + parallel_imgs, + deformable_group, + height_col, + width_col, + grad_im_); + })); + + cudaError_t err = cudaGetLastError(); + if (err != cudaSuccess) { + printf("error in deformable_col2im: %s\n", cudaGetErrorString(err)); + } +} + + +void deformable_col2im_coord( + const at::Tensor data_col, + const at::Tensor data_im, + const at::Tensor data_offset, + const int channels, + const int height, + const int width, + const int ksize_h, + const int ksize_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int parallel_imgs, + const int deformable_group, + at::Tensor grad_offset) { + int height_col = + (height + 2 * pad_h - (dilation_h * (ksize_h - 1) + 1)) / stride_h + 1; + int width_col = + (width + 2 * pad_w - (dilation_w * (ksize_w - 1) + 1)) / stride_w + 1; + int num_kernels = height_col * width_col * 2 * ksize_h * ksize_w * + deformable_group * parallel_imgs; + int channel_per_deformable_group = + channels * ksize_h * ksize_w / deformable_group; + + at::cuda::CUDAGuard device_guard(data_col.device()); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + data_col.scalar_type(), "deformable_col2im_coord_gpu", ([&] { + const scalar_t* data_col_ = data_col.data_ptr(); + const scalar_t* data_im_ = data_im.data_ptr(); + const scalar_t* data_offset_ = data_offset.data_ptr(); + scalar_t* grad_offset_ = grad_offset.data_ptr(); + + deformable_col2im_coord_gpu_kernel<<< + GET_BLOCKS(num_kernels), + CUDA_NUM_THREADS, + 0, + stream>>>( + num_kernels, + data_col_, + data_im_, + data_offset_, + channels, + height, + width, + ksize_h, + ksize_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + channel_per_deformable_group, + parallel_imgs, + 2 * ksize_h * ksize_w * deformable_group, + deformable_group, + height_col, + width_col, + grad_offset_); + })); +} + +} // namespace detectron2 + + +template +__device__ scalar_t dmcn_im2col_bilinear( + const scalar_t* bottom_data, + const int data_width, + const int height, + const int width, + scalar_t h, + scalar_t w) { + int h_low = floor(h); + int w_low = floor(w); + int h_high = h_low + 1; + int w_high = w_low + 1; + + scalar_t lh = h - h_low; + scalar_t lw = w - w_low; + scalar_t hh = 1 - lh, hw = 1 - lw; + + scalar_t v1 = 0; + if (h_low >= 0 && w_low >= 0) + v1 = bottom_data[h_low * data_width + w_low]; + scalar_t v2 = 0; + if (h_low >= 0 && w_high <= width - 1) + v2 = bottom_data[h_low * data_width + w_high]; + scalar_t v3 = 0; + if (h_high <= height - 1 && w_low >= 0) + v3 = bottom_data[h_high * data_width + w_low]; + scalar_t v4 = 0; + if (h_high <= height - 1 && w_high <= width - 1) + v4 = bottom_data[h_high * data_width + w_high]; + + scalar_t w1 = hh * hw, w2 = hh * lw, w3 = lh * hw, w4 = lh * lw; + + scalar_t val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + return val; +} + +template +__device__ scalar_t dmcn_get_gradient_weight( + scalar_t argmax_h, + scalar_t argmax_w, + const int h, + const int w, + const int height, + const int width) { + if (argmax_h <= -1 || argmax_h >= height || argmax_w <= -1 || + argmax_w >= width) { + // empty + return 0; + } + + int argmax_h_low = floor(argmax_h); + int argmax_w_low = floor(argmax_w); + int argmax_h_high = argmax_h_low + 1; + int argmax_w_high = argmax_w_low + 1; + + scalar_t weight = 0; + if (h == argmax_h_low && w == argmax_w_low) + weight = (h + 1 - argmax_h) * (w + 1 - argmax_w); + if (h == argmax_h_low && w == argmax_w_high) + weight = (h + 1 - argmax_h) * (argmax_w + 1 - w); + if (h == argmax_h_high && w == argmax_w_low) + weight = (argmax_h + 1 - h) * (w + 1 - argmax_w); + if (h == argmax_h_high && w == argmax_w_high) + weight = (argmax_h + 1 - h) * (argmax_w + 1 - w); + return weight; +} + +template +__device__ scalar_t dmcn_get_coordinate_weight( + scalar_t argmax_h, + scalar_t argmax_w, + const int height, + const int width, + const scalar_t* im_data, + const int data_width, + const int bp_dir) { + if (argmax_h <= -1 || argmax_h >= height || argmax_w <= -1 || + argmax_w >= width) { + // empty + return 0; + } + + int argmax_h_low = floor(argmax_h); + int argmax_w_low = floor(argmax_w); + int argmax_h_high = argmax_h_low + 1; + int argmax_w_high = argmax_w_low + 1; + + scalar_t weight = 0; + + if (bp_dir == 0) { + if (argmax_h_low >= 0 && argmax_w_low >= 0) + weight += -1 * (argmax_w_low + 1 - argmax_w) * + im_data[argmax_h_low * data_width + argmax_w_low]; + if (argmax_h_low >= 0 && argmax_w_high <= width - 1) + weight += -1 * (argmax_w - argmax_w_low) * + im_data[argmax_h_low * data_width + argmax_w_high]; + if (argmax_h_high <= height - 1 && argmax_w_low >= 0) + weight += (argmax_w_low + 1 - argmax_w) * + im_data[argmax_h_high * data_width + argmax_w_low]; + if (argmax_h_high <= height - 1 && argmax_w_high <= width - 1) + weight += (argmax_w - argmax_w_low) * + im_data[argmax_h_high * data_width + argmax_w_high]; + } else if (bp_dir == 1) { + if (argmax_h_low >= 0 && argmax_w_low >= 0) + weight += -1 * (argmax_h_low + 1 - argmax_h) * + im_data[argmax_h_low * data_width + argmax_w_low]; + if (argmax_h_low >= 0 && argmax_w_high <= width - 1) + weight += (argmax_h_low + 1 - argmax_h) * + im_data[argmax_h_low * data_width + argmax_w_high]; + if (argmax_h_high <= height - 1 && argmax_w_low >= 0) + weight += -1 * (argmax_h - argmax_h_low) * + im_data[argmax_h_high * data_width + argmax_w_low]; + if (argmax_h_high <= height - 1 && argmax_w_high <= width - 1) + weight += (argmax_h - argmax_h_low) * + im_data[argmax_h_high * data_width + argmax_w_high]; + } + + return weight; +} + +template +__global__ void modulated_deformable_im2col_gpu_kernel( + const int n, + const scalar_t* data_im, + const scalar_t* data_offset, + const scalar_t* data_mask, + const int height, + const int width, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int channel_per_deformable_group, + const int batch_size, + const int num_channels, + const int deformable_group, + const int height_col, + const int width_col, + scalar_t* data_col) { + CUDA_KERNEL_LOOP(index, n) { + // index index of output matrix + const int w_col = index % width_col; + const int h_col = (index / width_col) % height_col; + const int b_col = (index / width_col / height_col) % batch_size; + const int c_im = (index / width_col / height_col) / batch_size; + const int c_col = c_im * kernel_h * kernel_w; + + // compute deformable group index + const int deformable_group_index = c_im / channel_per_deformable_group; + + const int h_in = h_col * stride_h - pad_h; + const int w_in = w_col * stride_w - pad_w; + + scalar_t* data_col_ptr = data_col + + ((c_col * batch_size + b_col) * height_col + h_col) * width_col + w_col; + // const float* data_im_ptr = data_im + ((b_col * num_channels + c_im) * + // height + h_in) * width + w_in; + const scalar_t* data_im_ptr = + data_im + (b_col * num_channels + c_im) * height * width; + const scalar_t* data_offset_ptr = data_offset + + (b_col * deformable_group + deformable_group_index) * 2 * kernel_h * + kernel_w * height_col * width_col; + + const scalar_t* data_mask_ptr = data_mask + + (b_col * deformable_group + deformable_group_index) * kernel_h * + kernel_w * height_col * width_col; + + for (int i = 0; i < kernel_h; ++i) { + for (int j = 0; j < kernel_w; ++j) { + const int data_offset_h_ptr = + ((2 * (i * kernel_w + j)) * height_col + h_col) * width_col + w_col; + const int data_offset_w_ptr = + ((2 * (i * kernel_w + j) + 1) * height_col + h_col) * width_col + + w_col; + const int data_mask_hw_ptr = + ((i * kernel_w + j) * height_col + h_col) * width_col + w_col; + const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr]; + const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr]; + const scalar_t mask = data_mask_ptr[data_mask_hw_ptr]; + scalar_t val = static_cast(0); + const scalar_t h_im = h_in + i * dilation_h + offset_h; + const scalar_t w_im = w_in + j * dilation_w + offset_w; + // if (h_im >= 0 && w_im >= 0 && h_im < height && w_im < width) { + if (h_im > -1 && w_im > -1 && h_im < height && w_im < width) { + // const float map_h = i * dilation_h + offset_h; + // const float map_w = j * dilation_w + offset_w; + // const int cur_height = height - h_in; + // const int cur_width = width - w_in; + // val = dmcn_im2col_bilinear(data_im_ptr, width, cur_height, + // cur_width, map_h, map_w); + val = dmcn_im2col_bilinear( + data_im_ptr, width, height, width, h_im, w_im); + } + *data_col_ptr = val * mask; + data_col_ptr += batch_size * height_col * width_col; + // data_col_ptr += height_col * width_col; + } + } + } +} + +template +__global__ void modulated_deformable_col2im_gpu_kernel( + const int n, + const scalar_t* data_col, + const scalar_t* data_offset, + const scalar_t* data_mask, + const int channels, + const int height, + const int width, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int channel_per_deformable_group, + const int batch_size, + const int deformable_group, + const int height_col, + const int width_col, + scalar_t* grad_im) { + CUDA_KERNEL_LOOP(index, n) { + const int j = (index / width_col / height_col / batch_size) % kernel_w; + const int i = + (index / width_col / height_col / batch_size / kernel_w) % kernel_h; + const int c = + index / width_col / height_col / batch_size / kernel_w / kernel_h; + // compute the start and end of the output + + const int deformable_group_index = c / channel_per_deformable_group; + + int w_out = index % width_col; + int h_out = (index / width_col) % height_col; + int b = (index / width_col / height_col) % batch_size; + int w_in = w_out * stride_w - pad_w; + int h_in = h_out * stride_h - pad_h; + + const scalar_t* data_offset_ptr = data_offset + + (b * deformable_group + deformable_group_index) * 2 * kernel_h * + kernel_w * height_col * width_col; + const scalar_t* data_mask_ptr = data_mask + + (b * deformable_group + deformable_group_index) * kernel_h * kernel_w * + height_col * width_col; + const int data_offset_h_ptr = + ((2 * (i * kernel_w + j)) * height_col + h_out) * width_col + w_out; + const int data_offset_w_ptr = + ((2 * (i * kernel_w + j) + 1) * height_col + h_out) * width_col + w_out; + const int data_mask_hw_ptr = + ((i * kernel_w + j) * height_col + h_out) * width_col + w_out; + const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr]; + const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr]; + const scalar_t mask = data_mask_ptr[data_mask_hw_ptr]; + const scalar_t cur_inv_h_data = h_in + i * dilation_h + offset_h; + const scalar_t cur_inv_w_data = w_in + j * dilation_w + offset_w; + + const scalar_t cur_top_grad = data_col[index] * mask; + const int cur_h = (int)cur_inv_h_data; + const int cur_w = (int)cur_inv_w_data; + for (int dy = -2; dy <= 2; dy++) { + for (int dx = -2; dx <= 2; dx++) { + if (cur_h + dy >= 0 && cur_h + dy < height && cur_w + dx >= 0 && + cur_w + dx < width && abs(cur_inv_h_data - (cur_h + dy)) < 1 && + abs(cur_inv_w_data - (cur_w + dx)) < 1) { + int cur_bottom_grad_pos = + ((b * channels + c) * height + cur_h + dy) * width + cur_w + dx; + scalar_t weight = dmcn_get_gradient_weight( + cur_inv_h_data, + cur_inv_w_data, + cur_h + dy, + cur_w + dx, + height, + width); + atomicAdd(grad_im + cur_bottom_grad_pos, weight * cur_top_grad); + } + } + } + } +} + +template +__global__ void modulated_deformable_col2im_coord_gpu_kernel( + const int n, + const scalar_t* data_col, + const scalar_t* data_im, + const scalar_t* data_offset, + const scalar_t* data_mask, + const int channels, + const int height, + const int width, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int channel_per_deformable_group, + const int batch_size, + const int offset_channels, + const int deformable_group, + const int height_col, + const int width_col, + scalar_t* grad_offset, + scalar_t* grad_mask) { + CUDA_KERNEL_LOOP(index, n) { + scalar_t val = 0, mval = 0; + int w = index % width_col; + int h = (index / width_col) % height_col; + int c = (index / width_col / height_col) % offset_channels; + int b = (index / width_col / height_col) / offset_channels; + // compute the start and end of the output + + const int deformable_group_index = c / (2 * kernel_h * kernel_w); + const int col_step = kernel_h * kernel_w; + int cnt = 0; + const scalar_t* data_col_ptr = data_col + + deformable_group_index * channel_per_deformable_group * batch_size * + width_col * height_col; + const scalar_t* data_im_ptr = data_im + + (b * deformable_group + deformable_group_index) * + channel_per_deformable_group / kernel_h / kernel_w * height * width; + const scalar_t* data_offset_ptr = data_offset + + (b * deformable_group + deformable_group_index) * 2 * kernel_h * + kernel_w * height_col * width_col; + const scalar_t* data_mask_ptr = data_mask + + (b * deformable_group + deformable_group_index) * kernel_h * kernel_w * + height_col * width_col; + + const int offset_c = c - deformable_group_index * 2 * kernel_h * kernel_w; + + for (int col_c = (offset_c / 2); col_c < channel_per_deformable_group; + col_c += col_step) { + const int col_pos = + (((col_c * batch_size + b) * height_col) + h) * width_col + w; + const int bp_dir = offset_c % 2; + + int j = (col_pos / width_col / height_col / batch_size) % kernel_w; + int i = + (col_pos / width_col / height_col / batch_size / kernel_w) % kernel_h; + int w_out = col_pos % width_col; + int h_out = (col_pos / width_col) % height_col; + int w_in = w_out * stride_w - pad_w; + int h_in = h_out * stride_h - pad_h; + const int data_offset_h_ptr = + (((2 * (i * kernel_w + j)) * height_col + h_out) * width_col + w_out); + const int data_offset_w_ptr = + (((2 * (i * kernel_w + j) + 1) * height_col + h_out) * width_col + + w_out); + const int data_mask_hw_ptr = + (((i * kernel_w + j) * height_col + h_out) * width_col + w_out); + const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr]; + const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr]; + const scalar_t mask = data_mask_ptr[data_mask_hw_ptr]; + scalar_t inv_h = h_in + i * dilation_h + offset_h; + scalar_t inv_w = w_in + j * dilation_w + offset_w; + if (inv_h <= -1 || inv_w <= -1 || inv_h >= height || inv_w >= width) { + inv_h = inv_w = -2; + } else { + mval += data_col_ptr[col_pos] * + dmcn_im2col_bilinear( + data_im_ptr + cnt * height * width, + width, + height, + width, + inv_h, + inv_w); + } + const scalar_t weight = dmcn_get_coordinate_weight( + inv_h, + inv_w, + height, + width, + data_im_ptr + cnt * height * width, + width, + bp_dir); + val += weight * data_col_ptr[col_pos] * mask; + cnt += 1; + } + // KERNEL_ASSIGN(grad_offset[index], offset_req, val); + grad_offset[index] = val; + if (offset_c % 2 == 0) + // KERNEL_ASSIGN(grad_mask[(((b * deformable_group + + // deformable_group_index) * kernel_h * kernel_w + offset_c / 2) * + // height_col + h) * width_col + w], mask_req, mval); + grad_mask + [(((b * deformable_group + deformable_group_index) * kernel_h * + kernel_w + + offset_c / 2) * + height_col + + h) * + width_col + + w] = mval; + } +} + + +namespace detectron2 { + +void modulated_deformable_im2col_cuda( + const at::Tensor data_im, + const at::Tensor data_offset, + const at::Tensor data_mask, + const int batch_size, + const int channels, + const int height_im, + const int width_im, + const int height_col, + const int width_col, + const int kernel_h, + const int kenerl_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int deformable_group, + at::Tensor data_col) { + // num_axes should be smaller than block size + const int channel_per_deformable_group = channels / deformable_group; + const int num_kernels = channels * batch_size * height_col * width_col; + + at::cuda::CUDAGuard device_guard(data_im.device()); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + data_im.scalar_type(), "modulated_deformable_im2col_gpu", ([&] { + const scalar_t* data_im_ = data_im.data_ptr(); + const scalar_t* data_offset_ = data_offset.data_ptr(); + const scalar_t* data_mask_ = data_mask.data_ptr(); + scalar_t* data_col_ = data_col.data_ptr(); + + modulated_deformable_im2col_gpu_kernel<<< + GET_BLOCKS(num_kernels), + CUDA_NUM_THREADS, + 0, + stream>>>( + num_kernels, + data_im_, + data_offset_, + data_mask_, + height_im, + width_im, + kernel_h, + kenerl_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + channel_per_deformable_group, + batch_size, + channels, + deformable_group, + height_col, + width_col, + data_col_); + })); + + cudaError_t err = cudaGetLastError(); + if (err != cudaSuccess) { + printf( + "error in modulated_deformable_im2col_cuda: %s\n", + cudaGetErrorString(err)); + } +} + +void modulated_deformable_col2im_cuda( + const at::Tensor data_col, + const at::Tensor data_offset, + const at::Tensor data_mask, + const int batch_size, + const int channels, + const int height_im, + const int width_im, + const int height_col, + const int width_col, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int deformable_group, + at::Tensor grad_im) { + const int channel_per_deformable_group = channels / deformable_group; + const int num_kernels = + channels * kernel_h * kernel_w * batch_size * height_col * width_col; + + at::cuda::CUDAGuard device_guard(data_col.device()); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + data_col.scalar_type(), "modulated_deformable_col2im_gpu", ([&] { + const scalar_t* data_col_ = data_col.data_ptr(); + const scalar_t* data_offset_ = data_offset.data_ptr(); + const scalar_t* data_mask_ = data_mask.data_ptr(); + scalar_t* grad_im_ = grad_im.data_ptr(); + + modulated_deformable_col2im_gpu_kernel<<< + GET_BLOCKS(num_kernels), + CUDA_NUM_THREADS, + 0, + stream>>>( + num_kernels, + data_col_, + data_offset_, + data_mask_, + channels, + height_im, + width_im, + kernel_h, + kernel_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + channel_per_deformable_group, + batch_size, + deformable_group, + height_col, + width_col, + grad_im_); + })); + + cudaError_t err = cudaGetLastError(); + if (err != cudaSuccess) { + printf( + "error in modulated_deformable_col2im_cuda: %s\n", + cudaGetErrorString(err)); + } +} + +void modulated_deformable_col2im_coord_cuda( + const at::Tensor data_col, + const at::Tensor data_im, + const at::Tensor data_offset, + const at::Tensor data_mask, + const int batch_size, + const int channels, + const int height_im, + const int width_im, + const int height_col, + const int width_col, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int deformable_group, + at::Tensor grad_offset, + at::Tensor grad_mask) { + const int num_kernels = batch_size * height_col * width_col * 2 * kernel_h * + kernel_w * deformable_group; + const int channel_per_deformable_group = + channels * kernel_h * kernel_w / deformable_group; + + at::cuda::CUDAGuard device_guard(data_col.device()); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + data_col.scalar_type(), "modulated_deformable_col2im_coord_gpu", ([&] { + const scalar_t* data_col_ = data_col.data_ptr(); + const scalar_t* data_im_ = data_im.data_ptr(); + const scalar_t* data_offset_ = data_offset.data_ptr(); + const scalar_t* data_mask_ = data_mask.data_ptr(); + scalar_t* grad_offset_ = grad_offset.data_ptr(); + scalar_t* grad_mask_ = grad_mask.data_ptr(); + + modulated_deformable_col2im_coord_gpu_kernel<<< + GET_BLOCKS(num_kernels), + CUDA_NUM_THREADS, + 0, + stream>>>( + num_kernels, + data_col_, + data_im_, + data_offset_, + data_mask_, + channels, + height_im, + width_im, + kernel_h, + kernel_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + channel_per_deformable_group, + batch_size, + 2 * kernel_h * kernel_w * deformable_group, + deformable_group, + height_col, + width_col, + grad_offset_, + grad_mask_); + })); + cudaError_t err = cudaGetLastError(); + if (err != cudaSuccess) { + printf( + "error in modulated_deformable_col2im_coord_cuda: %s\n", + cudaGetErrorString(err)); + } +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/nms_rotated/nms_rotated.h b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/nms_rotated/nms_rotated.h new file mode 100644 index 0000000000000000000000000000000000000000..9c86c8d55cd24fb5322657b9d2f676fc3e1373ba --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/nms_rotated/nms_rotated.h @@ -0,0 +1,39 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#pragma once +#include + +namespace detectron2 { + +at::Tensor nms_rotated_cpu( + const at::Tensor& dets, + const at::Tensor& scores, + const float iou_threshold); + +#ifdef WITH_CUDA +at::Tensor nms_rotated_cuda( + const at::Tensor& dets, + const at::Tensor& scores, + const float iou_threshold); +#endif + +// Interface for Python +// inline is needed to prevent multiple function definitions when this header is +// included by different cpps +inline at::Tensor nms_rotated( + const at::Tensor& dets, + const at::Tensor& scores, + const float iou_threshold) { + assert(dets.device().is_cuda() == scores.device().is_cuda()); + if (dets.device().is_cuda()) { +#ifdef WITH_CUDA + return nms_rotated_cuda( + dets.contiguous(), scores.contiguous(), iou_threshold); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + + return nms_rotated_cpu(dets.contiguous(), scores.contiguous(), iou_threshold); +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/nms_rotated/nms_rotated_cpu.cpp b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/nms_rotated/nms_rotated_cpu.cpp new file mode 100644 index 0000000000000000000000000000000000000000..0658e388df005748c358dcbf3a1ad2a59da6cac8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/nms_rotated/nms_rotated_cpu.cpp @@ -0,0 +1,75 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#include "../box_iou_rotated/box_iou_rotated_utils.h" +#include "nms_rotated.h" + +namespace detectron2 { + +template +at::Tensor nms_rotated_cpu_kernel( + const at::Tensor& dets, + const at::Tensor& scores, + const float iou_threshold) { + // nms_rotated_cpu_kernel is modified from torchvision's nms_cpu_kernel, + // however, the code in this function is much shorter because + // we delegate the IoU computation for rotated boxes to + // the single_box_iou_rotated function in box_iou_rotated_utils.h + AT_ASSERTM(dets.device().is_cpu(), "dets must be a CPU tensor"); + AT_ASSERTM(scores.device().is_cpu(), "scores must be a CPU tensor"); + AT_ASSERTM( + dets.scalar_type() == scores.scalar_type(), + "dets should have the same type as scores"); + + if (dets.numel() == 0) { + return at::empty({0}, dets.options().dtype(at::kLong)); + } + + auto order_t = std::get<1>(scores.sort(0, /* descending=*/true)); + + auto ndets = dets.size(0); + at::Tensor suppressed_t = at::zeros({ndets}, dets.options().dtype(at::kByte)); + at::Tensor keep_t = at::zeros({ndets}, dets.options().dtype(at::kLong)); + + auto suppressed = suppressed_t.data_ptr(); + auto keep = keep_t.data_ptr(); + auto order = order_t.data_ptr(); + + int64_t num_to_keep = 0; + + for (int64_t _i = 0; _i < ndets; _i++) { + auto i = order[_i]; + if (suppressed[i] == 1) { + continue; + } + + keep[num_to_keep++] = i; + + for (int64_t _j = _i + 1; _j < ndets; _j++) { + auto j = order[_j]; + if (suppressed[j] == 1) { + continue; + } + + auto ovr = single_box_iou_rotated( + dets[i].data_ptr(), dets[j].data_ptr()); + if (ovr >= iou_threshold) { + suppressed[j] = 1; + } + } + } + return keep_t.narrow(/*dim=*/0, /*start=*/0, /*length=*/num_to_keep); +} + +at::Tensor nms_rotated_cpu( + // input must be contiguous + const at::Tensor& dets, + const at::Tensor& scores, + const float iou_threshold) { + auto result = at::empty({0}, dets.options()); + + AT_DISPATCH_FLOATING_TYPES(dets.scalar_type(), "nms_rotated", [&] { + result = nms_rotated_cpu_kernel(dets, scores, iou_threshold); + }); + return result; +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/nms_rotated/nms_rotated_cuda.cu b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/nms_rotated/nms_rotated_cuda.cu new file mode 100644 index 0000000000000000000000000000000000000000..40977a0da1761fe807205fbcf8029d56bf75786c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/nms_rotated/nms_rotated_cuda.cu @@ -0,0 +1,139 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#include +#include +#include +#include +#include "../box_iou_rotated/box_iou_rotated_utils.h" + +using namespace detectron2; + +namespace { +int const threadsPerBlock = sizeof(unsigned long long) * 8; +} + +template +__global__ void nms_rotated_cuda_kernel( + const int n_boxes, + const float iou_threshold, + const T* dev_boxes, + unsigned long long* dev_mask) { + // nms_rotated_cuda_kernel is modified from torchvision's nms_cuda_kernel + + const int row_start = blockIdx.y; + const int col_start = blockIdx.x; + + // if (row_start > col_start) return; + + const int row_size = + min(n_boxes - row_start * threadsPerBlock, threadsPerBlock); + const int col_size = + min(n_boxes - col_start * threadsPerBlock, threadsPerBlock); + + // Compared to nms_cuda_kernel, where each box is represented with 4 values + // (x1, y1, x2, y2), each rotated box is represented with 5 values + // (x_center, y_center, width, height, angle_degrees) here. + __shared__ T block_boxes[threadsPerBlock * 5]; + if (threadIdx.x < col_size) { + block_boxes[threadIdx.x * 5 + 0] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 0]; + block_boxes[threadIdx.x * 5 + 1] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 1]; + block_boxes[threadIdx.x * 5 + 2] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 2]; + block_boxes[threadIdx.x * 5 + 3] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 3]; + block_boxes[threadIdx.x * 5 + 4] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 4]; + } + __syncthreads(); + + if (threadIdx.x < row_size) { + const int cur_box_idx = threadsPerBlock * row_start + threadIdx.x; + const T* cur_box = dev_boxes + cur_box_idx * 5; + int i = 0; + unsigned long long t = 0; + int start = 0; + if (row_start == col_start) { + start = threadIdx.x + 1; + } + for (i = start; i < col_size; i++) { + // Instead of devIoU used by original horizontal nms, here + // we use the single_box_iou_rotated function from box_iou_rotated_utils.h + if (single_box_iou_rotated(cur_box, block_boxes + i * 5) > + iou_threshold) { + t |= 1ULL << i; + } + } + const int col_blocks = at::cuda::ATenCeilDiv(n_boxes, threadsPerBlock); + dev_mask[cur_box_idx * col_blocks + col_start] = t; + } +} + +namespace detectron2 { + +at::Tensor nms_rotated_cuda( + // input must be contiguous + const at::Tensor& dets, + const at::Tensor& scores, + float iou_threshold) { + // using scalar_t = float; + AT_ASSERTM(dets.is_cuda(), "dets must be a CUDA tensor"); + AT_ASSERTM(scores.is_cuda(), "scores must be a CUDA tensor"); + at::cuda::CUDAGuard device_guard(dets.device()); + + auto order_t = std::get<1>(scores.sort(0, /* descending=*/true)); + auto dets_sorted = dets.index_select(0, order_t); + + auto dets_num = dets.size(0); + + const int col_blocks = + at::cuda::ATenCeilDiv(static_cast(dets_num), threadsPerBlock); + + at::Tensor mask = + at::empty({dets_num * col_blocks}, dets.options().dtype(at::kLong)); + + dim3 blocks(col_blocks, col_blocks); + dim3 threads(threadsPerBlock); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES( + dets_sorted.scalar_type(), "nms_rotated_kernel_cuda", [&] { + nms_rotated_cuda_kernel<<>>( + dets_num, + iou_threshold, + dets_sorted.data_ptr(), + (unsigned long long*)mask.data_ptr()); + }); + + at::Tensor mask_cpu = mask.to(at::kCPU); + unsigned long long* mask_host = + (unsigned long long*)mask_cpu.data_ptr(); + + std::vector remv(col_blocks); + memset(&remv[0], 0, sizeof(unsigned long long) * col_blocks); + + at::Tensor keep = + at::empty({dets_num}, dets.options().dtype(at::kLong).device(at::kCPU)); + int64_t* keep_out = keep.data_ptr(); + + int num_to_keep = 0; + for (int i = 0; i < dets_num; i++) { + int nblock = i / threadsPerBlock; + int inblock = i % threadsPerBlock; + + if (!(remv[nblock] & (1ULL << inblock))) { + keep_out[num_to_keep++] = i; + unsigned long long* p = mask_host + i * col_blocks; + for (int j = nblock; j < col_blocks; j++) { + remv[j] |= p[j]; + } + } + } + + AT_CUDA_CHECK(cudaGetLastError()); + return order_t.index( + {keep.narrow(/*dim=*/0, /*start=*/0, /*length=*/num_to_keep) + .to(order_t.device(), keep.scalar_type())}); +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/vision.cpp b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/vision.cpp new file mode 100644 index 0000000000000000000000000000000000000000..fa7942e881af704d33a79e8b2ecd1ac5b6f3a7ef --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/csrc/vision.cpp @@ -0,0 +1,102 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +#include +#include "ROIAlign/ROIAlign.h" +#include "ROIAlignRotated/ROIAlignRotated.h" +#include "box_iou_rotated/box_iou_rotated.h" +#include "deformable/deform_conv.h" +#include "nms_rotated/nms_rotated.h" + +namespace detectron2 { + +#ifdef WITH_CUDA +extern int get_cudart_version(); +#endif + +std::string get_cuda_version() { +#ifdef WITH_CUDA + std::ostringstream oss; + + // copied from + // https://github.com/pytorch/pytorch/blob/master/aten/src/ATen/cuda/detail/CUDAHooks.cpp#L231 + auto printCudaStyleVersion = [&](int v) { + oss << (v / 1000) << "." << (v / 10 % 100); + if (v % 10 != 0) { + oss << "." << (v % 10); + } + }; + printCudaStyleVersion(get_cudart_version()); + return oss.str(); +#else + return std::string("not available"); +#endif +} + +// similar to +// https://github.com/pytorch/pytorch/blob/master/aten/src/ATen/Version.cpp +std::string get_compiler_version() { + std::ostringstream ss; +#if defined(__GNUC__) +#ifndef __clang__ + +#if ((__GNUC__ <= 4) && (__GNUC_MINOR__ <= 8)) +#error "GCC >= 4.9 is required!" +#endif + + { ss << "GCC " << __GNUC__ << "." << __GNUC_MINOR__; } +#endif +#endif + +#if defined(__clang_major__) + { + ss << "clang " << __clang_major__ << "." << __clang_minor__ << "." + << __clang_patchlevel__; + } +#endif + +#if defined(_MSC_VER) + { ss << "MSVC " << _MSC_FULL_VER; } +#endif + return ss.str(); +} + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("get_compiler_version", &get_compiler_version, "get_compiler_version"); + m.def("get_cuda_version", &get_cuda_version, "get_cuda_version"); + + m.def("box_iou_rotated", &box_iou_rotated, "IoU for rotated boxes"); + + m.def("deform_conv_forward", &deform_conv_forward, "deform_conv_forward"); + m.def( + "deform_conv_backward_input", + &deform_conv_backward_input, + "deform_conv_backward_input"); + m.def( + "deform_conv_backward_filter", + &deform_conv_backward_filter, + "deform_conv_backward_filter"); + m.def( + "modulated_deform_conv_forward", + &modulated_deform_conv_forward, + "modulated_deform_conv_forward"); + m.def( + "modulated_deform_conv_backward", + &modulated_deform_conv_backward, + "modulated_deform_conv_backward"); + + m.def("nms_rotated", &nms_rotated, "NMS for rotated boxes"); + + m.def("roi_align_forward", &ROIAlign_forward, "ROIAlign_forward"); + m.def("roi_align_backward", &ROIAlign_backward, "ROIAlign_backward"); + + m.def( + "roi_align_rotated_forward", + &ROIAlignRotated_forward, + "Forward pass for Rotated ROI-Align Operator"); + m.def( + "roi_align_rotated_backward", + &ROIAlignRotated_backward, + "Backward pass for Rotated ROI-Align Operator"); +} + +} // namespace detectron2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/deform_conv.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/deform_conv.py new file mode 100644 index 0000000000000000000000000000000000000000..ba8c6498ffdfffa281e1f02037d40cbbb6e66164 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/deform_conv.py @@ -0,0 +1,494 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import math +from functools import lru_cache +import torch +from torch import nn +from torch.autograd import Function +from torch.autograd.function import once_differentiable +from torch.nn.modules.utils import _pair + +from detectron2 import _C + +from .wrappers import _NewEmptyTensorOp + + +class _DeformConv(Function): + @staticmethod + def forward( + ctx, + input, + offset, + weight, + stride=1, + padding=0, + dilation=1, + groups=1, + deformable_groups=1, + im2col_step=64, + ): + if input is not None and input.dim() != 4: + raise ValueError( + "Expected 4D tensor as input, got {}D tensor instead.".format(input.dim()) + ) + ctx.stride = _pair(stride) + ctx.padding = _pair(padding) + ctx.dilation = _pair(dilation) + ctx.groups = groups + ctx.deformable_groups = deformable_groups + ctx.im2col_step = im2col_step + + ctx.save_for_backward(input, offset, weight) + + output = input.new_empty( + _DeformConv._output_size(input, weight, ctx.padding, ctx.dilation, ctx.stride) + ) + + ctx.bufs_ = [input.new_empty(0), input.new_empty(0)] # columns, ones + + if not input.is_cuda: + raise NotImplementedError + else: + cur_im2col_step = _DeformConv._cal_im2col_step(input.shape[0], ctx.im2col_step) + assert (input.shape[0] % cur_im2col_step) == 0, "im2col step must divide batchsize" + + _C.deform_conv_forward( + input, + weight, + offset, + output, + ctx.bufs_[0], + ctx.bufs_[1], + weight.size(3), + weight.size(2), + ctx.stride[1], + ctx.stride[0], + ctx.padding[1], + ctx.padding[0], + ctx.dilation[1], + ctx.dilation[0], + ctx.groups, + ctx.deformable_groups, + cur_im2col_step, + ) + return output + + @staticmethod + @once_differentiable + def backward(ctx, grad_output): + input, offset, weight = ctx.saved_tensors + + grad_input = grad_offset = grad_weight = None + + if not grad_output.is_cuda: + raise NotImplementedError + else: + cur_im2col_step = _DeformConv._cal_im2col_step(input.shape[0], ctx.im2col_step) + assert (input.shape[0] % cur_im2col_step) == 0, "im2col step must divide batchsize" + + if ctx.needs_input_grad[0] or ctx.needs_input_grad[1]: + grad_input = torch.zeros_like(input) + grad_offset = torch.zeros_like(offset) + _C.deform_conv_backward_input( + input, + offset, + grad_output, + grad_input, + grad_offset, + weight, + ctx.bufs_[0], + weight.size(3), + weight.size(2), + ctx.stride[1], + ctx.stride[0], + ctx.padding[1], + ctx.padding[0], + ctx.dilation[1], + ctx.dilation[0], + ctx.groups, + ctx.deformable_groups, + cur_im2col_step, + ) + + if ctx.needs_input_grad[2]: + grad_weight = torch.zeros_like(weight) + _C.deform_conv_backward_filter( + input, + offset, + grad_output, + grad_weight, + ctx.bufs_[0], + ctx.bufs_[1], + weight.size(3), + weight.size(2), + ctx.stride[1], + ctx.stride[0], + ctx.padding[1], + ctx.padding[0], + ctx.dilation[1], + ctx.dilation[0], + ctx.groups, + ctx.deformable_groups, + 1, + cur_im2col_step, + ) + + return grad_input, grad_offset, grad_weight, None, None, None, None, None, None + + @staticmethod + def _output_size(input, weight, padding, dilation, stride): + channels = weight.size(0) + output_size = (input.size(0), channels) + for d in range(input.dim() - 2): + in_size = input.size(d + 2) + pad = padding[d] + kernel = dilation[d] * (weight.size(d + 2) - 1) + 1 + stride_ = stride[d] + output_size += ((in_size + (2 * pad) - kernel) // stride_ + 1,) + if not all(map(lambda s: s > 0, output_size)): + raise ValueError( + "convolution input is too small (output would be {})".format( + "x".join(map(str, output_size)) + ) + ) + return output_size + + @staticmethod + @lru_cache(maxsize=128) + def _cal_im2col_step(input_size, default_size): + """ + Calculate proper im2col step size, which should be divisible by input_size and not larger + than prefer_size. Meanwhile the step size should be as large as possible to be more + efficient. So we choose the largest one among all divisors of input_size which are smaller + than prefer_size. + :param input_size: input batch size . + :param default_size: default preferred im2col step size. + :return: the largest proper step size. + """ + if input_size <= default_size: + return input_size + best_step = 1 + for step in range(2, min(int(math.sqrt(input_size)) + 1, default_size)): + if input_size % step == 0: + if input_size // step <= default_size: + return input_size // step + best_step = step + + return best_step + + +class _ModulatedDeformConv(Function): + @staticmethod + def forward( + ctx, + input, + offset, + mask, + weight, + bias=None, + stride=1, + padding=0, + dilation=1, + groups=1, + deformable_groups=1, + ): + ctx.stride = stride + ctx.padding = padding + ctx.dilation = dilation + ctx.groups = groups + ctx.deformable_groups = deformable_groups + ctx.with_bias = bias is not None + if not ctx.with_bias: + bias = input.new_empty(1) # fake tensor + if not input.is_cuda: + raise NotImplementedError + if ( + weight.requires_grad + or mask.requires_grad + or offset.requires_grad + or input.requires_grad + ): + ctx.save_for_backward(input, offset, mask, weight, bias) + output = input.new_empty(_ModulatedDeformConv._infer_shape(ctx, input, weight)) + ctx._bufs = [input.new_empty(0), input.new_empty(0)] + _C.modulated_deform_conv_forward( + input, + weight, + bias, + ctx._bufs[0], + offset, + mask, + output, + ctx._bufs[1], + weight.shape[2], + weight.shape[3], + ctx.stride, + ctx.stride, + ctx.padding, + ctx.padding, + ctx.dilation, + ctx.dilation, + ctx.groups, + ctx.deformable_groups, + ctx.with_bias, + ) + return output + + @staticmethod + @once_differentiable + def backward(ctx, grad_output): + if not grad_output.is_cuda: + raise NotImplementedError + input, offset, mask, weight, bias = ctx.saved_tensors + grad_input = torch.zeros_like(input) + grad_offset = torch.zeros_like(offset) + grad_mask = torch.zeros_like(mask) + grad_weight = torch.zeros_like(weight) + grad_bias = torch.zeros_like(bias) + _C.modulated_deform_conv_backward( + input, + weight, + bias, + ctx._bufs[0], + offset, + mask, + ctx._bufs[1], + grad_input, + grad_weight, + grad_bias, + grad_offset, + grad_mask, + grad_output, + weight.shape[2], + weight.shape[3], + ctx.stride, + ctx.stride, + ctx.padding, + ctx.padding, + ctx.dilation, + ctx.dilation, + ctx.groups, + ctx.deformable_groups, + ctx.with_bias, + ) + if not ctx.with_bias: + grad_bias = None + + return ( + grad_input, + grad_offset, + grad_mask, + grad_weight, + grad_bias, + None, + None, + None, + None, + None, + ) + + @staticmethod + def _infer_shape(ctx, input, weight): + n = input.size(0) + channels_out = weight.size(0) + height, width = input.shape[2:4] + kernel_h, kernel_w = weight.shape[2:4] + height_out = ( + height + 2 * ctx.padding - (ctx.dilation * (kernel_h - 1) + 1) + ) // ctx.stride + 1 + width_out = ( + width + 2 * ctx.padding - (ctx.dilation * (kernel_w - 1) + 1) + ) // ctx.stride + 1 + return n, channels_out, height_out, width_out + + +deform_conv = _DeformConv.apply +modulated_deform_conv = _ModulatedDeformConv.apply + + +class DeformConv(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + deformable_groups=1, + bias=False, + norm=None, + activation=None, + ): + """ + Deformable convolution from :paper:`deformconv`. + + Arguments are similar to :class:`Conv2D`. Extra arguments: + + Args: + deformable_groups (int): number of groups used in deformable convolution. + norm (nn.Module, optional): a normalization layer + activation (callable(Tensor) -> Tensor): a callable activation function + """ + super(DeformConv, self).__init__() + + assert not bias + assert in_channels % groups == 0, "in_channels {} cannot be divisible by groups {}".format( + in_channels, groups + ) + assert ( + out_channels % groups == 0 + ), "out_channels {} cannot be divisible by groups {}".format(out_channels, groups) + + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = _pair(kernel_size) + self.stride = _pair(stride) + self.padding = _pair(padding) + self.dilation = _pair(dilation) + self.groups = groups + self.deformable_groups = deformable_groups + self.norm = norm + self.activation = activation + + self.weight = nn.Parameter( + torch.Tensor(out_channels, in_channels // self.groups, *self.kernel_size) + ) + self.bias = None + + nn.init.kaiming_uniform_(self.weight, nonlinearity="relu") + + def forward(self, x, offset): + if x.numel() == 0: + # When input is empty, we want to return a empty tensor with "correct" shape, + # So that the following operations will not panic + # if they check for the shape of the tensor. + # This computes the height and width of the output tensor + output_shape = [ + (i + 2 * p - (di * (k - 1) + 1)) // s + 1 + for i, p, di, k, s in zip( + x.shape[-2:], self.padding, self.dilation, self.kernel_size, self.stride + ) + ] + output_shape = [x.shape[0], self.weight.shape[0]] + output_shape + return _NewEmptyTensorOp.apply(x, output_shape) + + x = deform_conv( + x, + offset, + self.weight, + self.stride, + self.padding, + self.dilation, + self.groups, + self.deformable_groups, + ) + if self.norm is not None: + x = self.norm(x) + if self.activation is not None: + x = self.activation(x) + return x + + def extra_repr(self): + tmpstr = "in_channels=" + str(self.in_channels) + tmpstr += ", out_channels=" + str(self.out_channels) + tmpstr += ", kernel_size=" + str(self.kernel_size) + tmpstr += ", stride=" + str(self.stride) + tmpstr += ", padding=" + str(self.padding) + tmpstr += ", dilation=" + str(self.dilation) + tmpstr += ", groups=" + str(self.groups) + tmpstr += ", deformable_groups=" + str(self.deformable_groups) + tmpstr += ", bias=False" + return tmpstr + + +class ModulatedDeformConv(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + deformable_groups=1, + bias=True, + norm=None, + activation=None, + ): + """ + Modulated deformable convolution from :paper:`deformconv2`. + + Arguments are similar to :class:`Conv2D`. Extra arguments: + + Args: + deformable_groups (int): number of groups used in deformable convolution. + norm (nn.Module, optional): a normalization layer + activation (callable(Tensor) -> Tensor): a callable activation function + """ + super(ModulatedDeformConv, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = _pair(kernel_size) + self.stride = stride + self.padding = padding + self.dilation = dilation + self.groups = groups + self.deformable_groups = deformable_groups + self.with_bias = bias + self.norm = norm + self.activation = activation + + self.weight = nn.Parameter( + torch.Tensor(out_channels, in_channels // groups, *self.kernel_size) + ) + if bias: + self.bias = nn.Parameter(torch.Tensor(out_channels)) + else: + self.bias = None + + nn.init.kaiming_uniform_(self.weight, nonlinearity="relu") + if self.bias is not None: + nn.init.constant_(self.bias, 0) + + def forward(self, x, offset, mask): + if x.numel() == 0: + output_shape = [ + (i + 2 * p - (di * (k - 1) + 1)) // s + 1 + for i, p, di, k, s in zip( + x.shape[-2:], self.padding, self.dilation, self.kernel_size, self.stride + ) + ] + output_shape = [x.shape[0], self.weight.shape[0]] + output_shape + return _NewEmptyTensorOp.apply(x, output_shape) + + x = modulated_deform_conv( + x, + offset, + mask, + self.weight, + self.bias, + self.stride, + self.padding, + self.dilation, + self.groups, + self.deformable_groups, + ) + if self.norm is not None: + x = self.norm(x) + if self.activation is not None: + x = self.activation(x) + return x + + def extra_repr(self): + tmpstr = "in_channels=" + str(self.in_channels) + tmpstr += ", out_channels=" + str(self.out_channels) + tmpstr += ", kernel_size=" + str(self.kernel_size) + tmpstr += ", stride=" + str(self.stride) + tmpstr += ", padding=" + str(self.padding) + tmpstr += ", dilation=" + str(self.dilation) + tmpstr += ", groups=" + str(self.groups) + tmpstr += ", deformable_groups=" + str(self.deformable_groups) + tmpstr += ", bias=" + str(self.with_bias) + return tmpstr diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/mask_ops.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/mask_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..0fe115dbbe15c354575c67d7d10f055eab0bdf91 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/mask_ops.py @@ -0,0 +1,248 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import numpy as np +import torch +from PIL import Image +from torch.nn import functional as F + +__all__ = ["paste_masks_in_image"] + + +BYTES_PER_FLOAT = 4 +# TODO: This memory limit may be too much or too little. It would be better to +# determine it based on available resources. +GPU_MEM_LIMIT = 1024 ** 3 # 1 GB memory limit + + +def _do_paste_mask(masks, boxes, img_h, img_w, skip_empty=True): + """ + Args: + masks: N, 1, H, W + boxes: N, 4 + img_h, img_w (int): + skip_empty (bool): only paste masks within the region that + tightly bound all boxes, and returns the results this region only. + An important optimization for CPU. + + Returns: + if skip_empty == False, a mask of shape (N, img_h, img_w) + if skip_empty == True, a mask of shape (N, h', w'), and the slice + object for the corresponding region. + """ + # On GPU, paste all masks together (up to chunk size) + # by using the entire image to sample the masks + # Compared to pasting them one by one, + # this has more operations but is faster on COCO-scale dataset. + device = masks.device + if skip_empty: + x0_int, y0_int = torch.clamp(boxes.min(dim=0).values.floor()[:2] - 1, min=0).to( + dtype=torch.int32 + ) + x1_int = torch.clamp(boxes[:, 2].max().ceil() + 1, max=img_w).to(dtype=torch.int32) + y1_int = torch.clamp(boxes[:, 3].max().ceil() + 1, max=img_h).to(dtype=torch.int32) + else: + x0_int, y0_int = 0, 0 + x1_int, y1_int = img_w, img_h + x0, y0, x1, y1 = torch.split(boxes, 1, dim=1) # each is Nx1 + + N = masks.shape[0] + + img_y = torch.arange(y0_int, y1_int, device=device, dtype=torch.float32) + 0.5 + img_x = torch.arange(x0_int, x1_int, device=device, dtype=torch.float32) + 0.5 + img_y = (img_y - y0) / (y1 - y0) * 2 - 1 + img_x = (img_x - x0) / (x1 - x0) * 2 - 1 + # img_x, img_y have shapes (N, w), (N, h) + + gx = img_x[:, None, :].expand(N, img_y.size(1), img_x.size(1)) + gy = img_y[:, :, None].expand(N, img_y.size(1), img_x.size(1)) + grid = torch.stack([gx, gy], dim=3) + + img_masks = F.grid_sample(masks.to(dtype=torch.float32), grid, align_corners=False) + + if skip_empty: + return img_masks[:, 0], (slice(y0_int, y1_int), slice(x0_int, x1_int)) + else: + return img_masks[:, 0], () + + +def paste_masks_in_image(masks, boxes, image_shape, threshold=0.5): + """ + Paste a set of masks that are of a fixed resolution (e.g., 28 x 28) into an image. + The location, height, and width for pasting each mask is determined by their + corresponding bounding boxes in boxes. + + Note: + This is a complicated but more accurate implementation. In actual deployment, it is + often enough to use a faster but less accurate implementation. + See :func:`paste_mask_in_image_old` in this file for an alternative implementation. + + Args: + masks (tensor): Tensor of shape (Bimg, Hmask, Wmask), where Bimg is the number of + detected object instances in the image and Hmask, Wmask are the mask width and mask + height of the predicted mask (e.g., Hmask = Wmask = 28). Values are in [0, 1]. + boxes (Boxes or Tensor): A Boxes of length Bimg or Tensor of shape (Bimg, 4). + boxes[i] and masks[i] correspond to the same object instance. + image_shape (tuple): height, width + threshold (float): A threshold in [0, 1] for converting the (soft) masks to + binary masks. + + Returns: + img_masks (Tensor): A tensor of shape (Bimg, Himage, Wimage), where Bimg is the + number of detected object instances and Himage, Wimage are the image width + and height. img_masks[i] is a binary mask for object instance i. + """ + + assert masks.shape[-1] == masks.shape[-2], "Only square mask predictions are supported" + N = len(masks) + if N == 0: + return masks.new_empty((0,) + image_shape, dtype=torch.uint8) + if not isinstance(boxes, torch.Tensor): + boxes = boxes.tensor + device = boxes.device + assert len(boxes) == N, boxes.shape + + img_h, img_w = image_shape + + # The actual implementation split the input into chunks, + # and paste them chunk by chunk. + if device.type == "cpu": + # CPU is most efficient when they are pasted one by one with skip_empty=True + # so that it performs minimal number of operations. + num_chunks = N + else: + # GPU benefits from parallelism for larger chunks, but may have memory issue + # int(img_h) because shape may be tensors in tracing + num_chunks = int(np.ceil(N * int(img_h) * int(img_w) * BYTES_PER_FLOAT / GPU_MEM_LIMIT)) + assert ( + num_chunks <= N + ), "Default GPU_MEM_LIMIT in mask_ops.py is too small; try increasing it" + chunks = torch.chunk(torch.arange(N, device=device), num_chunks) + + img_masks = torch.zeros( + N, img_h, img_w, device=device, dtype=torch.bool if threshold >= 0 else torch.uint8 + ) + for inds in chunks: + masks_chunk, spatial_inds = _do_paste_mask( + masks[inds, None, :, :], boxes[inds], img_h, img_w, skip_empty=device.type == "cpu" + ) + + if threshold >= 0: + masks_chunk = (masks_chunk >= threshold).to(dtype=torch.bool) + else: + # for visualization and debugging + masks_chunk = (masks_chunk * 255).to(dtype=torch.uint8) + + img_masks[(inds,) + spatial_inds] = masks_chunk + return img_masks + + +# The below are the original paste function (from Detectron1) which has +# larger quantization error. +# It is faster on CPU, while the aligned one is faster on GPU thanks to grid_sample. + + +def paste_mask_in_image_old(mask, box, img_h, img_w, threshold): + """ + Paste a single mask in an image. + This is a per-box implementation of :func:`paste_masks_in_image`. + This function has larger quantization error due to incorrect pixel + modeling and is not used any more. + + Args: + mask (Tensor): A tensor of shape (Hmask, Wmask) storing the mask of a single + object instance. Values are in [0, 1]. + box (Tensor): A tensor of shape (4, ) storing the x0, y0, x1, y1 box corners + of the object instance. + img_h, img_w (int): Image height and width. + threshold (float): Mask binarization threshold in [0, 1]. + + Returns: + im_mask (Tensor): + The resized and binarized object mask pasted into the original + image plane (a tensor of shape (img_h, img_w)). + """ + # Conversion from continuous box coordinates to discrete pixel coordinates + # via truncation (cast to int32). This determines which pixels to paste the + # mask onto. + box = box.to(dtype=torch.int32) # Continuous to discrete coordinate conversion + # An example (1D) box with continuous coordinates (x0=0.7, x1=4.3) will map to + # a discrete coordinates (x0=0, x1=4). Note that box is mapped to 5 = x1 - x0 + 1 + # pixels (not x1 - x0 pixels). + samples_w = box[2] - box[0] + 1 # Number of pixel samples, *not* geometric width + samples_h = box[3] - box[1] + 1 # Number of pixel samples, *not* geometric height + + # Resample the mask from it's original grid to the new samples_w x samples_h grid + mask = Image.fromarray(mask.cpu().numpy()) + mask = mask.resize((samples_w, samples_h), resample=Image.BILINEAR) + mask = np.array(mask, copy=False) + + if threshold >= 0: + mask = np.array(mask > threshold, dtype=np.uint8) + mask = torch.from_numpy(mask) + else: + # for visualization and debugging, we also + # allow it to return an unmodified mask + mask = torch.from_numpy(mask * 255).to(torch.uint8) + + im_mask = torch.zeros((img_h, img_w), dtype=torch.uint8) + x_0 = max(box[0], 0) + x_1 = min(box[2] + 1, img_w) + y_0 = max(box[1], 0) + y_1 = min(box[3] + 1, img_h) + + im_mask[y_0:y_1, x_0:x_1] = mask[ + (y_0 - box[1]) : (y_1 - box[1]), (x_0 - box[0]) : (x_1 - box[0]) + ] + return im_mask + + +# Our pixel modeling requires extrapolation for any continuous +# coordinate < 0.5 or > length - 0.5. When sampling pixels on the masks, +# we would like this extrapolation to be an interpolation between boundary values and zero, +# instead of using absolute zero or boundary values. +# Therefore `paste_mask_in_image_old` is often used with zero padding around the masks like this: +# masks, scale = pad_masks(masks[:, 0, :, :], 1) +# boxes = scale_boxes(boxes.tensor, scale) + + +def pad_masks(masks, padding): + """ + Args: + masks (tensor): A tensor of shape (B, M, M) representing B masks. + padding (int): Number of cells to pad on all sides. + + Returns: + The padded masks and the scale factor of the padding size / original size. + """ + B = masks.shape[0] + M = masks.shape[-1] + pad2 = 2 * padding + scale = float(M + pad2) / M + padded_masks = masks.new_zeros((B, M + pad2, M + pad2)) + padded_masks[:, padding:-padding, padding:-padding] = masks + return padded_masks, scale + + +def scale_boxes(boxes, scale): + """ + Args: + boxes (tensor): A tensor of shape (B, 4) representing B boxes with 4 + coords representing the corners x0, y0, x1, y1, + scale (float): The box scaling factor. + + Returns: + Scaled boxes. + """ + w_half = (boxes[:, 2] - boxes[:, 0]) * 0.5 + h_half = (boxes[:, 3] - boxes[:, 1]) * 0.5 + x_c = (boxes[:, 2] + boxes[:, 0]) * 0.5 + y_c = (boxes[:, 3] + boxes[:, 1]) * 0.5 + + w_half *= scale + h_half *= scale + + scaled_boxes = torch.zeros_like(boxes) + scaled_boxes[:, 0] = x_c - w_half + scaled_boxes[:, 2] = x_c + w_half + scaled_boxes[:, 1] = y_c - h_half + scaled_boxes[:, 3] = y_c + h_half + return scaled_boxes diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/nms.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/nms.py new file mode 100644 index 0000000000000000000000000000000000000000..aafe29b3aa551caeeda769dd17b8834b08c7f11c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/nms.py @@ -0,0 +1,146 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import torch +from torchvision.ops import boxes as box_ops +from torchvision.ops import nms # BC-compat + + +def batched_nms(boxes, scores, idxs, iou_threshold): + """ + Same as torchvision.ops.boxes.batched_nms, but safer. + """ + assert boxes.shape[-1] == 4 + # TODO may need better strategy. + # Investigate after having a fully-cuda NMS op. + if len(boxes) < 40000: + return box_ops.batched_nms(boxes, scores, idxs, iou_threshold) + + result_mask = scores.new_zeros(scores.size(), dtype=torch.bool) + for id in torch.unique(idxs).cpu().tolist(): + mask = (idxs == id).nonzero().view(-1) + keep = nms(boxes[mask], scores[mask], iou_threshold) + result_mask[mask[keep]] = True + keep = result_mask.nonzero().view(-1) + keep = keep[scores[keep].argsort(descending=True)] + return keep + + +# Note: this function (nms_rotated) might be moved into +# torchvision/ops/boxes.py in the future +def nms_rotated(boxes, scores, iou_threshold): + """ + Performs non-maximum suppression (NMS) on the rotated boxes according + to their intersection-over-union (IoU). + + Rotated NMS iteratively removes lower scoring rotated boxes which have an + IoU greater than iou_threshold with another (higher scoring) rotated box. + + Note that RotatedBox (5, 3, 4, 2, -90) covers exactly the same region as + RotatedBox (5, 3, 4, 2, 90) does, and their IoU will be 1. However, they + can be representing completely different objects in certain tasks, e.g., OCR. + + As for the question of whether rotated-NMS should treat them as faraway boxes + even though their IOU is 1, it depends on the application and/or ground truth annotation. + + As an extreme example, consider a single character v and the square box around it. + + If the angle is 0 degree, the object (text) would be read as 'v'; + + If the angle is 90 degrees, the object (text) would become '>'; + + If the angle is 180 degrees, the object (text) would become '^'; + + If the angle is 270/-90 degrees, the object (text) would become '<' + + All of these cases have IoU of 1 to each other, and rotated NMS that only + uses IoU as criterion would only keep one of them with the highest score - + which, practically, still makes sense in most cases because typically + only one of theses orientations is the correct one. Also, it does not matter + as much if the box is only used to classify the object (instead of transcribing + them with a sequential OCR recognition model) later. + + On the other hand, when we use IoU to filter proposals that are close to the + ground truth during training, we should definitely take the angle into account if + we know the ground truth is labeled with the strictly correct orientation (as in, + upside-down words are annotated with -180 degrees even though they can be covered + with a 0/90/-90 degree box, etc.) + + The way the original dataset is annotated also matters. For example, if the dataset + is a 4-point polygon dataset that does not enforce ordering of vertices/orientation, + we can estimate a minimum rotated bounding box to this polygon, but there's no way + we can tell the correct angle with 100% confidence (as shown above, there could be 4 different + rotated boxes, with angles differed by 90 degrees to each other, covering the exactly + same region). In that case we have to just use IoU to determine the box + proximity (as many detection benchmarks (even for text) do) unless there're other + assumptions we can make (like width is always larger than height, or the object is not + rotated by more than 90 degrees CCW/CW, etc.) + + In summary, not considering angles in rotated NMS seems to be a good option for now, + but we should be aware of its implications. + + Args: + boxes (Tensor[N, 5]): Rotated boxes to perform NMS on. They are expected to be in + (x_center, y_center, width, height, angle_degrees) format. + scores (Tensor[N]): Scores for each one of the rotated boxes + iou_threshold (float): Discards all overlapping rotated boxes with IoU < iou_threshold + + Returns: + keep (Tensor): int64 tensor with the indices of the elements that have been kept + by Rotated NMS, sorted in decreasing order of scores + """ + from detectron2 import _C + + return _C.nms_rotated(boxes, scores, iou_threshold) + + +# Note: this function (batched_nms_rotated) might be moved into +# torchvision/ops/boxes.py in the future +def batched_nms_rotated(boxes, scores, idxs, iou_threshold): + """ + Performs non-maximum suppression in a batched fashion. + + Each index value correspond to a category, and NMS + will not be applied between elements of different categories. + + Args: + boxes (Tensor[N, 5]): + boxes where NMS will be performed. They + are expected to be in (x_ctr, y_ctr, width, height, angle_degrees) format + scores (Tensor[N]): + scores for each one of the boxes + idxs (Tensor[N]): + indices of the categories for each one of the boxes. + iou_threshold (float): + discards all overlapping boxes + with IoU < iou_threshold + + Returns: + Tensor: + int64 tensor with the indices of the elements that have been kept + by NMS, sorted in decreasing order of scores + """ + assert boxes.shape[-1] == 5 + + if boxes.numel() == 0: + return torch.empty((0,), dtype=torch.int64, device=boxes.device) + # Strategy: in order to perform NMS independently per class, + # we add an offset to all the boxes. The offset is dependent + # only on the class idx, and is large enough so that boxes + # from different classes do not overlap + + # Note that batched_nms in torchvision/ops/boxes.py only uses max_coordinate, + # which won't handle negative coordinates correctly. + # Here by using min_coordinate we can make sure the negative coordinates are + # correctly handled. + max_coordinate = ( + torch.max(boxes[:, 0], boxes[:, 1]) + torch.max(boxes[:, 2], boxes[:, 3]) / 2 + ).max() + min_coordinate = ( + torch.min(boxes[:, 0], boxes[:, 1]) - torch.max(boxes[:, 2], boxes[:, 3]) / 2 + ).min() + offsets = idxs.to(boxes) * (max_coordinate - min_coordinate + 1) + boxes_for_nms = boxes.clone() # avoid modifying the original values in boxes + boxes_for_nms[:, :2] += offsets[:, None] + keep = nms_rotated(boxes_for_nms, scores, iou_threshold) + return keep diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/roi_align.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/roi_align.py new file mode 100644 index 0000000000000000000000000000000000000000..f8c4ce1d747ec77329fab34436f5efa0e958ef32 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/roi_align.py @@ -0,0 +1,105 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from torch import nn +from torch.autograd import Function +from torch.autograd.function import once_differentiable +from torch.nn.modules.utils import _pair + +from detectron2 import _C + + +class _ROIAlign(Function): + @staticmethod + def forward(ctx, input, roi, output_size, spatial_scale, sampling_ratio, aligned): + ctx.save_for_backward(roi) + ctx.output_size = _pair(output_size) + ctx.spatial_scale = spatial_scale + ctx.sampling_ratio = sampling_ratio + ctx.input_shape = input.size() + ctx.aligned = aligned + output = _C.roi_align_forward( + input, roi, spatial_scale, output_size[0], output_size[1], sampling_ratio, aligned + ) + return output + + @staticmethod + @once_differentiable + def backward(ctx, grad_output): + (rois,) = ctx.saved_tensors + output_size = ctx.output_size + spatial_scale = ctx.spatial_scale + sampling_ratio = ctx.sampling_ratio + bs, ch, h, w = ctx.input_shape + grad_input = _C.roi_align_backward( + grad_output, + rois, + spatial_scale, + output_size[0], + output_size[1], + bs, + ch, + h, + w, + sampling_ratio, + ctx.aligned, + ) + return grad_input, None, None, None, None, None + + +roi_align = _ROIAlign.apply + + +class ROIAlign(nn.Module): + def __init__(self, output_size, spatial_scale, sampling_ratio, aligned=True): + """ + Args: + output_size (tuple): h, w + spatial_scale (float): scale the input boxes by this number + sampling_ratio (int): number of inputs samples to take for each output + sample. 0 to take samples densely. + aligned (bool): if False, use the legacy implementation in + Detectron. If True, align the results more perfectly. + + Note: + The meaning of aligned=True: + + Given a continuous coordinate c, its two neighboring pixel indices (in our + pixel model) are computed by floor(c - 0.5) and ceil(c - 0.5). For example, + c=1.3 has pixel neighbors with discrete indices [0] and [1] (which are sampled + from the underlying signal at continuous coordinates 0.5 and 1.5). But the original + roi_align (aligned=False) does not subtract the 0.5 when computing neighboring + pixel indices and therefore it uses pixels with a slightly incorrect alignment + (relative to our pixel model) when performing bilinear interpolation. + + With `aligned=True`, + we first appropriately scale the ROI and then shift it by -0.5 + prior to calling roi_align. This produces the correct neighbors; see + detectron2/tests/test_roi_align.py for verification. + + The difference does not make a difference to the model's performance if + ROIAlign is used together with conv layers. + """ + super(ROIAlign, self).__init__() + self.output_size = output_size + self.spatial_scale = spatial_scale + self.sampling_ratio = sampling_ratio + self.aligned = aligned + + def forward(self, input, rois): + """ + Args: + input: NCHW images + rois: Bx5 boxes. First column is the index into N. The other 4 columns are xyxy. + """ + assert rois.dim() == 2 and rois.size(1) == 5 + return roi_align( + input, rois, self.output_size, self.spatial_scale, self.sampling_ratio, self.aligned + ) + + def __repr__(self): + tmpstr = self.__class__.__name__ + "(" + tmpstr += "output_size=" + str(self.output_size) + tmpstr += ", spatial_scale=" + str(self.spatial_scale) + tmpstr += ", sampling_ratio=" + str(self.sampling_ratio) + tmpstr += ", aligned=" + str(self.aligned) + tmpstr += ")" + return tmpstr diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/roi_align_rotated.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/roi_align_rotated.py new file mode 100644 index 0000000000000000000000000000000000000000..6ed87e69d5e738f8dbaa7c73c5c8de65343de0fd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/roi_align_rotated.py @@ -0,0 +1,88 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from torch import nn +from torch.autograd import Function +from torch.autograd.function import once_differentiable +from torch.nn.modules.utils import _pair + +from detectron2 import _C + + +class _ROIAlignRotated(Function): + @staticmethod + def forward(ctx, input, roi, output_size, spatial_scale, sampling_ratio): + ctx.save_for_backward(roi) + ctx.output_size = _pair(output_size) + ctx.spatial_scale = spatial_scale + ctx.sampling_ratio = sampling_ratio + ctx.input_shape = input.size() + output = _C.roi_align_rotated_forward( + input, roi, spatial_scale, output_size[0], output_size[1], sampling_ratio + ) + return output + + @staticmethod + @once_differentiable + def backward(ctx, grad_output): + (rois,) = ctx.saved_tensors + output_size = ctx.output_size + spatial_scale = ctx.spatial_scale + sampling_ratio = ctx.sampling_ratio + bs, ch, h, w = ctx.input_shape + grad_input = _C.roi_align_rotated_backward( + grad_output, + rois, + spatial_scale, + output_size[0], + output_size[1], + bs, + ch, + h, + w, + sampling_ratio, + ) + return grad_input, None, None, None, None, None + + +roi_align_rotated = _ROIAlignRotated.apply + + +class ROIAlignRotated(nn.Module): + def __init__(self, output_size, spatial_scale, sampling_ratio): + """ + Args: + output_size (tuple): h, w + spatial_scale (float): scale the input boxes by this number + sampling_ratio (int): number of inputs samples to take for each output + sample. 0 to take samples densely. + + Note: + ROIAlignRotated supports continuous coordinate by default: + Given a continuous coordinate c, its two neighboring pixel indices (in our + pixel model) are computed by floor(c - 0.5) and ceil(c - 0.5). For example, + c=1.3 has pixel neighbors with discrete indices [0] and [1] (which are sampled + from the underlying signal at continuous coordinates 0.5 and 1.5). + """ + super(ROIAlignRotated, self).__init__() + self.output_size = output_size + self.spatial_scale = spatial_scale + self.sampling_ratio = sampling_ratio + + def forward(self, input, rois): + """ + Args: + input: NCHW images + rois: Bx6 boxes. First column is the index into N. + The other 5 columns are (x_ctr, y_ctr, width, height, angle_degrees). + """ + assert rois.dim() == 2 and rois.size(1) == 6 + return roi_align_rotated( + input, rois, self.output_size, self.spatial_scale, self.sampling_ratio + ) + + def __repr__(self): + tmpstr = self.__class__.__name__ + "(" + tmpstr += "output_size=" + str(self.output_size) + tmpstr += ", spatial_scale=" + str(self.spatial_scale) + tmpstr += ", sampling_ratio=" + str(self.sampling_ratio) + tmpstr += ")" + return tmpstr diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/rotated_boxes.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/rotated_boxes.py new file mode 100644 index 0000000000000000000000000000000000000000..ea9b08583da79aae871b500bcffc19f8a352da6e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/rotated_boxes.py @@ -0,0 +1,22 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from __future__ import absolute_import, division, print_function, unicode_literals + +from detectron2 import _C + + +def pairwise_iou_rotated(boxes1, boxes2): + """ + Return intersection-over-union (Jaccard index) of boxes. + + Both sets of boxes are expected to be in + (x_center, y_center, width, height, angle) format. + + Arguments: + boxes1 (Tensor[N, 5]) + boxes2 (Tensor[M, 5]) + + Returns: + iou (Tensor[N, M]): the NxM matrix containing the pairwise + IoU values for every element in boxes1 and boxes2 + """ + return _C.box_iou_rotated(boxes1, boxes2) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/shape_spec.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/shape_spec.py new file mode 100644 index 0000000000000000000000000000000000000000..ed7f0d08268a2342cfb8246cc032686f2343ef8f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/shape_spec.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from collections import namedtuple + + +class ShapeSpec(namedtuple("_ShapeSpec", ["channels", "height", "width", "stride"])): + """ + A simple structure that contains basic shape specification about a tensor. + It is often used as the auxiliary inputs/outputs of models, + to obtain the shape inference ability among pytorch modules. + + Attributes: + channels: + height: + width: + stride: + """ + + def __new__(cls, *, channels=None, height=None, width=None, stride=None): + return super().__new__(cls, channels, height, width, stride) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/wrappers.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..7e3935e90c61f02e000568af79ed458dd491fed7 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/layers/wrappers.py @@ -0,0 +1,215 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +Wrappers around on some nn functions, mainly to support empty tensors. + +Ideally, add support directly in PyTorch to empty tensors in those functions. + +These can be removed once https://github.com/pytorch/pytorch/issues/12013 +is implemented +""" + +import math +import torch +from torch.nn.modules.utils import _ntuple + +TORCH_VERSION = tuple(int(x) for x in torch.__version__.split(".")[:2]) + + +def cat(tensors, dim=0): + """ + Efficient version of torch.cat that avoids a copy if there is only a single element in a list + """ + assert isinstance(tensors, (list, tuple)) + if len(tensors) == 1: + return tensors[0] + return torch.cat(tensors, dim) + + +class _NewEmptyTensorOp(torch.autograd.Function): + @staticmethod + def forward(ctx, x, new_shape): + ctx.shape = x.shape + return x.new_empty(new_shape) + + @staticmethod + def backward(ctx, grad): + shape = ctx.shape + return _NewEmptyTensorOp.apply(grad, shape), None + + +class Conv2d(torch.nn.Conv2d): + """ + A wrapper around :class:`torch.nn.Conv2d` to support empty inputs and more features. + """ + + def __init__(self, *args, **kwargs): + """ + Extra keyword arguments supported in addition to those in `torch.nn.Conv2d`: + + Args: + norm (nn.Module, optional): a normalization layer + activation (callable(Tensor) -> Tensor): a callable activation function + + It assumes that norm layer is used before activation. + """ + norm = kwargs.pop("norm", None) + activation = kwargs.pop("activation", None) + super().__init__(*args, **kwargs) + + self.norm = norm + self.activation = activation + + def forward(self, x): + if x.numel() == 0 and self.training: + # https://github.com/pytorch/pytorch/issues/12013 + assert not isinstance( + self.norm, torch.nn.SyncBatchNorm + ), "SyncBatchNorm does not support empty inputs!" + + if x.numel() == 0 and TORCH_VERSION <= (1, 4): + assert not isinstance( + self.norm, torch.nn.GroupNorm + ), "GroupNorm does not support empty inputs in PyTorch <=1.4!" + # When input is empty, we want to return a empty tensor with "correct" shape, + # So that the following operations will not panic + # if they check for the shape of the tensor. + # This computes the height and width of the output tensor + output_shape = [ + (i + 2 * p - (di * (k - 1) + 1)) // s + 1 + for i, p, di, k, s in zip( + x.shape[-2:], self.padding, self.dilation, self.kernel_size, self.stride + ) + ] + output_shape = [x.shape[0], self.weight.shape[0]] + output_shape + empty = _NewEmptyTensorOp.apply(x, output_shape) + if self.training: + # This is to make DDP happy. + # DDP expects all workers to have gradient w.r.t the same set of parameters. + _dummy = sum(x.view(-1)[0] for x in self.parameters()) * 0.0 + return empty + _dummy + else: + return empty + + x = super().forward(x) + if self.norm is not None: + x = self.norm(x) + if self.activation is not None: + x = self.activation(x) + return x + + +if TORCH_VERSION > (1, 4): + ConvTranspose2d = torch.nn.ConvTranspose2d +else: + + class ConvTranspose2d(torch.nn.ConvTranspose2d): + """ + A wrapper around :class:`torch.nn.ConvTranspose2d` to support zero-size tensor. + """ + + def forward(self, x): + if x.numel() > 0: + return super(ConvTranspose2d, self).forward(x) + # get output shape + + # When input is empty, we want to return a empty tensor with "correct" shape, + # So that the following operations will not panic + # if they check for the shape of the tensor. + # This computes the height and width of the output tensor + output_shape = [ + (i - 1) * d - 2 * p + (di * (k - 1) + 1) + op + for i, p, di, k, d, op in zip( + x.shape[-2:], + self.padding, + self.dilation, + self.kernel_size, + self.stride, + self.output_padding, + ) + ] + output_shape = [x.shape[0], self.out_channels] + output_shape + # This is to make DDP happy. + # DDP expects all workers to have gradient w.r.t the same set of parameters. + _dummy = sum(x.view(-1)[0] for x in self.parameters()) * 0.0 + return _NewEmptyTensorOp.apply(x, output_shape) + _dummy + + +if TORCH_VERSION > (1, 4): + BatchNorm2d = torch.nn.BatchNorm2d +else: + + class BatchNorm2d(torch.nn.BatchNorm2d): + """ + A wrapper around :class:`torch.nn.BatchNorm2d` to support zero-size tensor. + """ + + def forward(self, x): + if x.numel() > 0: + return super(BatchNorm2d, self).forward(x) + # get output shape + output_shape = x.shape + return _NewEmptyTensorOp.apply(x, output_shape) + + +if TORCH_VERSION > (1, 5): + Linear = torch.nn.Linear +else: + + class Linear(torch.nn.Linear): + """ + A wrapper around :class:`torch.nn.Linear` to support empty inputs and more features. + Because of https://github.com/pytorch/pytorch/issues/34202 + """ + + def forward(self, x): + if x.numel() == 0: + output_shape = [x.shape[0], self.weight.shape[0]] + + empty = _NewEmptyTensorOp.apply(x, output_shape) + if self.training: + # This is to make DDP happy. + # DDP expects all workers to have gradient w.r.t the same set of parameters. + _dummy = sum(x.view(-1)[0] for x in self.parameters()) * 0.0 + return empty + _dummy + else: + return empty + + x = super().forward(x) + return x + + +def interpolate(input, size=None, scale_factor=None, mode="nearest", align_corners=None): + """ + A wrapper around :func:`torch.nn.functional.interpolate` to support zero-size tensor. + """ + if TORCH_VERSION > (1, 4) or input.numel() > 0: + return torch.nn.functional.interpolate( + input, size, scale_factor, mode, align_corners=align_corners + ) + + def _check_size_scale_factor(dim): + if size is None and scale_factor is None: + raise ValueError("either size or scale_factor should be defined") + if size is not None and scale_factor is not None: + raise ValueError("only one of size or scale_factor should be defined") + if ( + scale_factor is not None + and isinstance(scale_factor, tuple) + and len(scale_factor) != dim + ): + raise ValueError( + "scale_factor shape must match input shape. " + "Input is {}D, scale_factor size is {}".format(dim, len(scale_factor)) + ) + + def _output_size(dim): + _check_size_scale_factor(dim) + if size is not None: + return size + scale_factors = _ntuple(dim)(scale_factor) + # math.floor might return float in py2.7 + return [int(math.floor(input.size(i + 2) * scale_factors[i])) for i in range(dim)] + + output_shape = tuple(_output_size(2)) + output_shape = input.shape[:-2] + output_shape + return _NewEmptyTensorOp.apply(input, output_shape) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/model_zoo/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/model_zoo/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..886616f8e11ef31ea85d7a7ba9a75308befceedf --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/model_zoo/__init__.py @@ -0,0 +1,9 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +Model Zoo API for Detectron2: a collection of functions to create common model architectures and +optionally load pre-trained weights as released in +`MODEL_ZOO.md `_. +""" +from .model_zoo import get, get_config_file, get_checkpoint_url + +__all__ = ["get_checkpoint_url", "get", "get_config_file"] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/model_zoo/model_zoo.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/model_zoo/model_zoo.py new file mode 100644 index 0000000000000000000000000000000000000000..68d0ce5dc442864474bb1086bf04d6e40708c190 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/model_zoo/model_zoo.py @@ -0,0 +1,150 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import os +import pkg_resources +import torch + +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import get_cfg +from detectron2.modeling import build_model + + +class _ModelZooUrls(object): + """ + Mapping from names to officially released Detectron2 pre-trained models. + """ + + S3_PREFIX = "https://dl.fbaipublicfiles.com/detectron2/" + + # format: {config_path.yaml} -> model_id/model_final_{commit}.pkl + CONFIG_PATH_TO_URL_SUFFIX = { + # COCO Detection with Faster R-CNN + "COCO-Detection/faster_rcnn_R_50_C4_1x.yaml": "137257644/model_final_721ade.pkl", + "COCO-Detection/faster_rcnn_R_50_DC5_1x.yaml": "137847829/model_final_51d356.pkl", + "COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml": "137257794/model_final_b275ba.pkl", + "COCO-Detection/faster_rcnn_R_50_C4_3x.yaml": "137849393/model_final_f97cb7.pkl", + "COCO-Detection/faster_rcnn_R_50_DC5_3x.yaml": "137849425/model_final_68d202.pkl", + "COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml": "137849458/model_final_280758.pkl", + "COCO-Detection/faster_rcnn_R_101_C4_3x.yaml": "138204752/model_final_298dad.pkl", + "COCO-Detection/faster_rcnn_R_101_DC5_3x.yaml": "138204841/model_final_3e0943.pkl", + "COCO-Detection/faster_rcnn_R_101_FPN_3x.yaml": "137851257/model_final_f6e8b1.pkl", + "COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x.yaml": "139173657/model_final_68b088.pkl", + # COCO Detection with RetinaNet + "COCO-Detection/retinanet_R_50_FPN_1x.yaml": "137593951/model_final_b796dc.pkl", + "COCO-Detection/retinanet_R_50_FPN_3x.yaml": "137849486/model_final_4cafe0.pkl", + "COCO-Detection/retinanet_R_101_FPN_3x.yaml": "138363263/model_final_59f53c.pkl", + # COCO Detection with RPN and Fast R-CNN + "COCO-Detection/rpn_R_50_C4_1x.yaml": "137258005/model_final_450694.pkl", + "COCO-Detection/rpn_R_50_FPN_1x.yaml": "137258492/model_final_02ce48.pkl", + "COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml": "137635226/model_final_e5f7ce.pkl", + # COCO Instance Segmentation Baselines with Mask R-CNN + "COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.yaml": "137259246/model_final_9243eb.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_1x.yaml": "137260150/model_final_4f86c3.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml": "137260431/model_final_a54504.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml": "137849525/model_final_4ce675.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml": "137849551/model_final_84107b.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml": "137849600/model_final_f10217.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_101_C4_3x.yaml": "138363239/model_final_a2914c.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_101_DC5_3x.yaml": "138363294/model_final_0464b7.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_101_FPN_3x.yaml": "138205316/model_final_a3ec72.pkl", + "COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml": "139653917/model_final_2d9806.pkl", # noqa + # COCO Person Keypoint Detection Baselines with Keypoint R-CNN + "COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.yaml": "137261548/model_final_04e291.pkl", + "COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml": "137849621/model_final_a6e10b.pkl", + "COCO-Keypoints/keypoint_rcnn_R_101_FPN_3x.yaml": "138363331/model_final_997cc7.pkl", + "COCO-Keypoints/keypoint_rcnn_X_101_32x8d_FPN_3x.yaml": "139686956/model_final_5ad38f.pkl", + # COCO Panoptic Segmentation Baselines with Panoptic FPN + "COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.yaml": "139514544/model_final_dbfeb4.pkl", + "COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml": "139514569/model_final_c10459.pkl", + "COCO-PanopticSegmentation/panoptic_fpn_R_101_3x.yaml": "139514519/model_final_cafdb1.pkl", + # LVIS Instance Segmentation Baselines with Mask R-CNN + "LVIS-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml": "144219072/model_final_571f7c.pkl", + "LVIS-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml": "144219035/model_final_824ab5.pkl", + "LVIS-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml": "144219108/model_final_5e3439.pkl", # noqa + # Cityscapes & Pascal VOC Baselines + "Cityscapes/mask_rcnn_R_50_FPN.yaml": "142423278/model_final_af9cf5.pkl", + "PascalVOC-Detection/faster_rcnn_R_50_C4.yaml": "142202221/model_final_b1acc2.pkl", + # Other Settings + "Misc/mask_rcnn_R_50_FPN_1x_dconv_c3-c5.yaml": "138602867/model_final_65c703.pkl", + "Misc/mask_rcnn_R_50_FPN_3x_dconv_c3-c5.yaml": "144998336/model_final_821d0b.pkl", + "Misc/cascade_mask_rcnn_R_50_FPN_1x.yaml": "138602847/model_final_e9d89b.pkl", + "Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml": "144998488/model_final_480dd8.pkl", + "Misc/mask_rcnn_R_50_FPN_3x_syncbn.yaml": "169527823/model_final_3b3c51.pkl", + "Misc/mask_rcnn_R_50_FPN_3x_gn.yaml": "138602888/model_final_dc5d9e.pkl", + "Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml": "138602908/model_final_01ca85.pkl", + "Misc/panoptic_fpn_R_101_dconv_cascade_gn_3x.yaml": "139797668/model_final_be35db.pkl", + "Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml": "18131413/model_0039999_e76410.pkl", # noqa + # D1 Comparisons + "Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x.yaml": "137781054/model_final_7ab50c.pkl", # noqa + "Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml": "137781281/model_final_62ca52.pkl", # noqa + "Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x.yaml": "137781195/model_final_cce136.pkl", + } + + +def get_checkpoint_url(config_path): + """ + Returns the URL to the model trained using the given config + + Args: + config_path (str): config file name relative to detectron2's "configs/" + directory, e.g., "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml" + + Returns: + str: a URL to the model + """ + name = config_path.replace(".yaml", "") + if config_path in _ModelZooUrls.CONFIG_PATH_TO_URL_SUFFIX: + suffix = _ModelZooUrls.CONFIG_PATH_TO_URL_SUFFIX[config_path] + return _ModelZooUrls.S3_PREFIX + name + "/" + suffix + raise RuntimeError("{} not available in Model Zoo!".format(name)) + + +def get_config_file(config_path): + """ + Returns path to a builtin config file. + + Args: + config_path (str): config file name relative to detectron2's "configs/" + directory, e.g., "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml" + + Returns: + str: the real path to the config file. + """ + cfg_file = pkg_resources.resource_filename( + "detectron2.model_zoo", os.path.join("configs", config_path) + ) + if not os.path.exists(cfg_file): + raise RuntimeError("{} not available in Model Zoo!".format(config_path)) + return cfg_file + + +def get(config_path, trained: bool = False): + """ + Get a model specified by relative path under Detectron2's official ``configs/`` directory. + + Args: + config_path (str): config file name relative to detectron2's "configs/" + directory, e.g., "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml" + trained (bool): If True, will initialize the model with the trained model zoo weights. + If False, the checkpoint specified in the config file's ``MODEL.WEIGHTS`` is used + instead; this will typically (though not always) initialize a subset of weights using + an ImageNet pre-trained model, while randomly initializing the other weights. + + Example: + + .. code-block:: python + + from detectron2 import model_zoo + model = model_zoo.get("COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml", trained=True) + """ + cfg_file = get_config_file(config_path) + + cfg = get_cfg() + cfg.merge_from_file(cfg_file) + if trained: + cfg.MODEL.WEIGHTS = get_checkpoint_url(config_path) + if not torch.cuda.is_available(): + cfg.MODEL.DEVICE = "cpu" + + model = build_model(cfg) + DetectionCheckpointer(model).load(cfg.MODEL.WEIGHTS) + return model diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9e23fe4a7037c8ece8f4c553b4cfda1631b79c9c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/__init__.py @@ -0,0 +1,56 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import torch + +from detectron2.layers import ShapeSpec + +from .anchor_generator import build_anchor_generator, ANCHOR_GENERATOR_REGISTRY +from .backbone import ( + BACKBONE_REGISTRY, + FPN, + Backbone, + ResNet, + ResNetBlockBase, + build_backbone, + build_resnet_backbone, + make_stage, +) +from .meta_arch import ( + META_ARCH_REGISTRY, + SEM_SEG_HEADS_REGISTRY, + GeneralizedRCNN, + PanopticFPN, + ProposalNetwork, + RetinaNet, + SemanticSegmentor, + build_model, + build_sem_seg_head, +) +from .postprocessing import detector_postprocess +from .proposal_generator import ( + PROPOSAL_GENERATOR_REGISTRY, + build_proposal_generator, + RPN_HEAD_REGISTRY, + build_rpn_head, +) +from .roi_heads import ( + ROI_BOX_HEAD_REGISTRY, + ROI_HEADS_REGISTRY, + ROI_KEYPOINT_HEAD_REGISTRY, + ROI_MASK_HEAD_REGISTRY, + ROIHeads, + StandardROIHeads, + BaseMaskRCNNHead, + BaseKeypointRCNNHead, + build_box_head, + build_keypoint_head, + build_mask_head, + build_roi_heads, +) +from .test_time_augmentation import DatasetMapperTTA, GeneralizedRCNNWithTTA + +_EXCLUDE = {"torch", "ShapeSpec"} +__all__ = [k for k in globals().keys() if k not in _EXCLUDE and not k.startswith("_")] + +assert ( + torch.Tensor([1]) == torch.Tensor([2]) +).dtype == torch.bool, "Your Pytorch is too old. Please update to contain https://github.com/pytorch/pytorch/pull/21113" diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/anchor_generator.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/anchor_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..93927bc1c16106710bc1ca1da4d186f7710e1606 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/anchor_generator.py @@ -0,0 +1,382 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import math +from typing import List +import torch +from torch import nn + +from detectron2.config import configurable +from detectron2.layers import ShapeSpec +from detectron2.structures import Boxes, RotatedBoxes +from detectron2.utils.registry import Registry + +ANCHOR_GENERATOR_REGISTRY = Registry("ANCHOR_GENERATOR") +ANCHOR_GENERATOR_REGISTRY.__doc__ = """ +Registry for modules that creates object detection anchors for feature maps. + +The registered object will be called with `obj(cfg, input_shape)`. +""" + + +class BufferList(nn.Module): + """ + Similar to nn.ParameterList, but for buffers + """ + + def __init__(self, buffers=None): + super(BufferList, self).__init__() + if buffers is not None: + self.extend(buffers) + + def extend(self, buffers): + offset = len(self) + for i, buffer in enumerate(buffers): + self.register_buffer(str(offset + i), buffer) + return self + + def __len__(self): + return len(self._buffers) + + def __iter__(self): + return iter(self._buffers.values()) + + +def _create_grid_offsets(size: List[int], stride: int, offset: float, device: torch.device): + grid_height, grid_width = size + shifts_x = torch.arange( + offset * stride, grid_width * stride, step=stride, dtype=torch.float32, device=device + ) + shifts_y = torch.arange( + offset * stride, grid_height * stride, step=stride, dtype=torch.float32, device=device + ) + + shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x) + shift_x = shift_x.reshape(-1) + shift_y = shift_y.reshape(-1) + return shift_x, shift_y + + +def _broadcast_params(params, num_features, name): + """ + If one size (or aspect ratio) is specified and there are multiple feature + maps, we "broadcast" anchors of that single size (or aspect ratio) + over all feature maps. + + If params is list[float], or list[list[float]] with len(params) == 1, repeat + it num_features time. + + Returns: + list[list[float]]: param for each feature + """ + assert isinstance( + params, (list, tuple) + ), f"{name} in anchor generator has to be a list! Got {params}." + assert len(params), f"{name} in anchor generator cannot be empty!" + if not isinstance(params[0], (list, tuple)): # list[float] + return [params] * num_features + if len(params) == 1: + return list(params) * num_features + assert len(params) == num_features, ( + f"Got {name} of length {len(params)} in anchor generator, " + f"but the number of input features is {num_features}!" + ) + return params + + +@ANCHOR_GENERATOR_REGISTRY.register() +class DefaultAnchorGenerator(nn.Module): + """ + Compute anchors in the standard ways described in + "Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks". + """ + + box_dim: int = 4 + """ + the dimension of each anchor box. + """ + + @configurable + def __init__(self, *, sizes, aspect_ratios, strides, offset=0.5): + """ + This interface is experimental. + + Args: + sizes (list[list[float]] or list[float]): + If sizes is list[list[float]], sizes[i] is the list of anchor sizes + (i.e. sqrt of anchor area) to use for the i-th feature map. + If sizes is list[float], the sizes are used for all feature maps. + Anchor sizes are given in absolute lengths in units of + the input image; they do not dynamically scale if the input image size changes. + aspect_ratios (list[list[float]] or list[float]): list of aspect ratios + (i.e. height / width) to use for anchors. Same "broadcast" rule for `sizes` applies. + strides (list[int]): stride of each input feature. + offset (float): Relative offset between the center of the first anchor and the top-left + corner of the image. Value has to be in [0, 1). + Recommend to use 0.5, which means half stride. + """ + super().__init__() + + self.strides = strides + self.num_features = len(self.strides) + sizes = _broadcast_params(sizes, self.num_features, "sizes") + aspect_ratios = _broadcast_params(aspect_ratios, self.num_features, "aspect_ratios") + self.cell_anchors = self._calculate_anchors(sizes, aspect_ratios) + + self.offset = offset + assert 0.0 <= self.offset < 1.0, self.offset + + @classmethod + def from_config(cls, cfg, input_shape: List[ShapeSpec]): + return { + "sizes": cfg.MODEL.ANCHOR_GENERATOR.SIZES, + "aspect_ratios": cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS, + "strides": [x.stride for x in input_shape], + "offset": cfg.MODEL.ANCHOR_GENERATOR.OFFSET, + } + + def _calculate_anchors(self, sizes, aspect_ratios): + cell_anchors = [ + self.generate_cell_anchors(s, a).float() for s, a in zip(sizes, aspect_ratios) + ] + return BufferList(cell_anchors) + + @property + def num_cell_anchors(self): + """ + Alias of `num_anchors`. + """ + return self.num_anchors + + @property + def num_anchors(self): + """ + Returns: + list[int]: Each int is the number of anchors at every pixel + location, on that feature map. + For example, if at every pixel we use anchors of 3 aspect + ratios and 5 sizes, the number of anchors is 15. + (See also ANCHOR_GENERATOR.SIZES and ANCHOR_GENERATOR.ASPECT_RATIOS in config) + + In standard RPN models, `num_anchors` on every feature map is the same. + """ + return [len(cell_anchors) for cell_anchors in self.cell_anchors] + + def _grid_anchors(self, grid_sizes: List[List[int]]): + """ + Returns: + list[Tensor]: #featuremap tensors, each is (#locations x #cell_anchors) x 4 + """ + anchors = [] + for size, stride, base_anchors in zip(grid_sizes, self.strides, self.cell_anchors): + shift_x, shift_y = _create_grid_offsets(size, stride, self.offset, base_anchors.device) + shifts = torch.stack((shift_x, shift_y, shift_x, shift_y), dim=1) + + anchors.append((shifts.view(-1, 1, 4) + base_anchors.view(1, -1, 4)).reshape(-1, 4)) + + return anchors + + def generate_cell_anchors(self, sizes=(32, 64, 128, 256, 512), aspect_ratios=(0.5, 1, 2)): + """ + Generate a tensor storing canonical anchor boxes, which are all anchor + boxes of different sizes and aspect_ratios centered at (0, 0). + We can later build the set of anchors for a full feature map by + shifting and tiling these tensors (see `meth:_grid_anchors`). + + Args: + sizes (tuple[float]): + aspect_ratios (tuple[float]]): + + Returns: + Tensor of shape (len(sizes) * len(aspect_ratios), 4) storing anchor boxes + in XYXY format. + """ + + # This is different from the anchor generator defined in the original Faster R-CNN + # code or Detectron. They yield the same AP, however the old version defines cell + # anchors in a less natural way with a shift relative to the feature grid and + # quantization that results in slightly different sizes for different aspect ratios. + # See also https://github.com/facebookresearch/Detectron/issues/227 + + anchors = [] + for size in sizes: + area = size ** 2.0 + for aspect_ratio in aspect_ratios: + # s * s = w * h + # a = h / w + # ... some algebra ... + # w = sqrt(s * s / a) + # h = a * w + w = math.sqrt(area / aspect_ratio) + h = aspect_ratio * w + x0, y0, x1, y1 = -w / 2.0, -h / 2.0, w / 2.0, h / 2.0 + anchors.append([x0, y0, x1, y1]) + return torch.tensor(anchors) + + def forward(self, features): + """ + Args: + features (list[Tensor]): list of backbone feature maps on which to generate anchors. + + Returns: + list[Boxes]: a list of Boxes containing all the anchors for each feature map + (i.e. the cell anchors repeated over all locations in the feature map). + The number of anchors of each feature map is Hi x Wi x num_cell_anchors, + where Hi, Wi are resolution of the feature map divided by anchor stride. + """ + grid_sizes = [feature_map.shape[-2:] for feature_map in features] + anchors_over_all_feature_maps = self._grid_anchors(grid_sizes) + return [Boxes(x) for x in anchors_over_all_feature_maps] + + +@ANCHOR_GENERATOR_REGISTRY.register() +class RotatedAnchorGenerator(nn.Module): + """ + Compute rotated anchors used by Rotated RPN (RRPN), described in + "Arbitrary-Oriented Scene Text Detection via Rotation Proposals". + """ + + box_dim: int = 5 + """ + the dimension of each anchor box. + """ + + @configurable + def __init__(self, *, sizes, aspect_ratios, strides, angles, offset=0.5): + """ + This interface is experimental. + + Args: + sizes (list[list[float]] or list[float]): + If sizes is list[list[float]], sizes[i] is the list of anchor sizes + (i.e. sqrt of anchor area) to use for the i-th feature map. + If sizes is list[float], the sizes are used for all feature maps. + Anchor sizes are given in absolute lengths in units of + the input image; they do not dynamically scale if the input image size changes. + aspect_ratios (list[list[float]] or list[float]): list of aspect ratios + (i.e. height / width) to use for anchors. Same "broadcast" rule for `sizes` applies. + strides (list[int]): stride of each input feature. + angles (list[list[float]] or list[float]): list of angles (in degrees CCW) + to use for anchors. Same "broadcast" rule for `sizes` applies. + offset (float): Relative offset between the center of the first anchor and the top-left + corner of the image. Value has to be in [0, 1). + Recommend to use 0.5, which means half stride. + """ + super().__init__() + + self.strides = strides + self.num_features = len(self.strides) + sizes = _broadcast_params(sizes, self.num_features, "sizes") + aspect_ratios = _broadcast_params(aspect_ratios, self.num_features, "aspect_ratios") + angles = _broadcast_params(angles, self.num_features, "angles") + self.cell_anchors = self._calculate_anchors(sizes, aspect_ratios, angles) + + self.offset = offset + assert 0.0 <= self.offset < 1.0, self.offset + + @classmethod + def from_config(cls, cfg, input_shape: List[ShapeSpec]): + return { + "sizes": cfg.MODEL.ANCHOR_GENERATOR.SIZES, + "aspect_ratios": cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS, + "strides": [x.stride for x in input_shape], + "offset": cfg.MODEL.ANCHOR_GENERATOR.OFFSET, + "angles": cfg.MODEL.ANCHOR_GENERATOR.ANGLES, + } + + def _calculate_anchors(self, sizes, aspect_ratios, angles): + cell_anchors = [ + self.generate_cell_anchors(size, aspect_ratio, angle).float() + for size, aspect_ratio, angle in zip(sizes, aspect_ratios, angles) + ] + return BufferList(cell_anchors) + + @property + def num_cell_anchors(self): + """ + Alias of `num_anchors`. + """ + return self.num_anchors + + @property + def num_anchors(self): + """ + Returns: + list[int]: Each int is the number of anchors at every pixel + location, on that feature map. + For example, if at every pixel we use anchors of 3 aspect + ratios, 2 sizes and 5 angles, the number of anchors is 30. + (See also ANCHOR_GENERATOR.SIZES, ANCHOR_GENERATOR.ASPECT_RATIOS + and ANCHOR_GENERATOR.ANGLES in config) + + In standard RRPN models, `num_anchors` on every feature map is the same. + """ + return [len(cell_anchors) for cell_anchors in self.cell_anchors] + + def _grid_anchors(self, grid_sizes): + anchors = [] + for size, stride, base_anchors in zip(grid_sizes, self.strides, self.cell_anchors): + shift_x, shift_y = _create_grid_offsets(size, stride, self.offset, base_anchors.device) + zeros = torch.zeros_like(shift_x) + shifts = torch.stack((shift_x, shift_y, zeros, zeros, zeros), dim=1) + + anchors.append((shifts.view(-1, 1, 5) + base_anchors.view(1, -1, 5)).reshape(-1, 5)) + + return anchors + + def generate_cell_anchors( + self, + sizes=(32, 64, 128, 256, 512), + aspect_ratios=(0.5, 1, 2), + angles=(-90, -60, -30, 0, 30, 60, 90), + ): + """ + Generate a tensor storing canonical anchor boxes, which are all anchor + boxes of different sizes, aspect_ratios, angles centered at (0, 0). + We can later build the set of anchors for a full feature map by + shifting and tiling these tensors (see `meth:_grid_anchors`). + + Args: + sizes (tuple[float]): + aspect_ratios (tuple[float]]): + angles (tuple[float]]): + + Returns: + Tensor of shape (len(sizes) * len(aspect_ratios) * len(angles), 5) + storing anchor boxes in (x_ctr, y_ctr, w, h, angle) format. + """ + anchors = [] + for size in sizes: + area = size ** 2.0 + for aspect_ratio in aspect_ratios: + # s * s = w * h + # a = h / w + # ... some algebra ... + # w = sqrt(s * s / a) + # h = a * w + w = math.sqrt(area / aspect_ratio) + h = aspect_ratio * w + anchors.extend([0, 0, w, h, a] for a in angles) + + return torch.tensor(anchors) + + def forward(self, features): + """ + Args: + features (list[Tensor]): list of backbone feature maps on which to generate anchors. + + Returns: + list[RotatedBoxes]: a list of Boxes containing all the anchors for each feature map + (i.e. the cell anchors repeated over all locations in the feature map). + The number of anchors of each feature map is Hi x Wi x num_cell_anchors, + where Hi, Wi are resolution of the feature map divided by anchor stride. + """ + grid_sizes = [feature_map.shape[-2:] for feature_map in features] + anchors_over_all_feature_maps = self._grid_anchors(grid_sizes) + return [RotatedBoxes(x) for x in anchors_over_all_feature_maps] + + +def build_anchor_generator(cfg, input_shape): + """ + Built an anchor generator from `cfg.MODEL.ANCHOR_GENERATOR.NAME`. + """ + anchor_generator = cfg.MODEL.ANCHOR_GENERATOR.NAME + return ANCHOR_GENERATOR_REGISTRY.get(anchor_generator)(cfg, input_shape) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d477fb1e596f77b4c24f2b2c66b528bf2f83b00e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/__init__.py @@ -0,0 +1,9 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .build import build_backbone, BACKBONE_REGISTRY # noqa F401 isort:skip + +from .backbone import Backbone +from .fpn import FPN +from .resnet import ResNet, ResNetBlockBase, build_resnet_backbone, make_stage + +__all__ = [k for k in globals().keys() if not k.startswith("_")] +# TODO can expose more resnet blocks after careful consideration diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/backbone.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/backbone.py new file mode 100644 index 0000000000000000000000000000000000000000..66dee4a6565e6c45ed17d0880fcc37eac8f75c3a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/backbone.py @@ -0,0 +1,53 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from abc import ABCMeta, abstractmethod +import torch.nn as nn + +from detectron2.layers import ShapeSpec + +__all__ = ["Backbone"] + + +class Backbone(nn.Module, metaclass=ABCMeta): + """ + Abstract base class for network backbones. + """ + + def __init__(self): + """ + The `__init__` method of any subclass can specify its own set of arguments. + """ + super().__init__() + + @abstractmethod + def forward(self): + """ + Subclasses must override this method, but adhere to the same return type. + + Returns: + dict[str->Tensor]: mapping from feature name (e.g., "res2") to tensor + """ + pass + + @property + def size_divisibility(self): + """ + Some backbones require the input height and width to be divisible by a + specific integer. This is typically true for encoder / decoder type networks + with lateral connection (e.g., FPN) for which feature maps need to match + dimension in the "bottom up" and "top down" paths. Set to 0 if no specific + input size divisibility is required. + """ + return 0 + + def output_shape(self): + """ + Returns: + dict[str->ShapeSpec] + """ + # this is a backward-compatible default + return { + name: ShapeSpec( + channels=self._out_feature_channels[name], stride=self._out_feature_strides[name] + ) + for name in self._out_features + } diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/build.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/build.py new file mode 100644 index 0000000000000000000000000000000000000000..3d2ecae783257418708b572e298a23e167dabb26 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/build.py @@ -0,0 +1,33 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from detectron2.layers import ShapeSpec +from detectron2.utils.registry import Registry + +from .backbone import Backbone + +BACKBONE_REGISTRY = Registry("BACKBONE") +BACKBONE_REGISTRY.__doc__ = """ +Registry for backbones, which extract feature maps from images + +The registered object must be a callable that accepts two arguments: + +1. A :class:`detectron2.config.CfgNode` +2. A :class:`detectron2.layers.ShapeSpec`, which contains the input shape specification. + +It must returns an instance of :class:`Backbone`. +""" + + +def build_backbone(cfg, input_shape=None): + """ + Build a backbone from `cfg.MODEL.BACKBONE.NAME`. + + Returns: + an instance of :class:`Backbone` + """ + if input_shape is None: + input_shape = ShapeSpec(channels=len(cfg.MODEL.PIXEL_MEAN)) + + backbone_name = cfg.MODEL.BACKBONE.NAME + backbone = BACKBONE_REGISTRY.get(backbone_name)(cfg, input_shape) + assert isinstance(backbone, Backbone) + return backbone diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/fpn.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..338b5f5286ce233f17aa41f50a5a0a8fb819b8d3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/fpn.py @@ -0,0 +1,245 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import math +import fvcore.nn.weight_init as weight_init +import torch.nn.functional as F +from torch import nn + +from detectron2.layers import Conv2d, ShapeSpec, get_norm + +from .backbone import Backbone +from .build import BACKBONE_REGISTRY +from .resnet import build_resnet_backbone + +__all__ = ["build_resnet_fpn_backbone", "build_retinanet_resnet_fpn_backbone", "FPN"] + + +class FPN(Backbone): + """ + This module implements :paper:`FPN`. + It creates pyramid features built on top of some input feature maps. + """ + + def __init__( + self, bottom_up, in_features, out_channels, norm="", top_block=None, fuse_type="sum" + ): + """ + Args: + bottom_up (Backbone): module representing the bottom up subnetwork. + Must be a subclass of :class:`Backbone`. The multi-scale feature + maps generated by the bottom up network, and listed in `in_features`, + are used to generate FPN levels. + in_features (list[str]): names of the input feature maps coming + from the backbone to which FPN is attached. For example, if the + backbone produces ["res2", "res3", "res4"], any *contiguous* sublist + of these may be used; order must be from high to low resolution. + out_channels (int): number of channels in the output feature maps. + norm (str): the normalization to use. + top_block (nn.Module or None): if provided, an extra operation will + be performed on the output of the last (smallest resolution) + FPN output, and the result will extend the result list. The top_block + further downsamples the feature map. It must have an attribute + "num_levels", meaning the number of extra FPN levels added by + this block, and "in_feature", which is a string representing + its input feature (e.g., p5). + fuse_type (str): types for fusing the top down features and the lateral + ones. It can be "sum" (default), which sums up element-wise; or "avg", + which takes the element-wise mean of the two. + """ + super(FPN, self).__init__() + assert isinstance(bottom_up, Backbone) + + # Feature map strides and channels from the bottom up network (e.g. ResNet) + input_shapes = bottom_up.output_shape() + in_strides = [input_shapes[f].stride for f in in_features] + in_channels = [input_shapes[f].channels for f in in_features] + + _assert_strides_are_log2_contiguous(in_strides) + lateral_convs = [] + output_convs = [] + + use_bias = norm == "" + for idx, in_channels in enumerate(in_channels): + lateral_norm = get_norm(norm, out_channels) + output_norm = get_norm(norm, out_channels) + + lateral_conv = Conv2d( + in_channels, out_channels, kernel_size=1, bias=use_bias, norm=lateral_norm + ) + output_conv = Conv2d( + out_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + bias=use_bias, + norm=output_norm, + ) + weight_init.c2_xavier_fill(lateral_conv) + weight_init.c2_xavier_fill(output_conv) + stage = int(math.log2(in_strides[idx])) + self.add_module("fpn_lateral{}".format(stage), lateral_conv) + self.add_module("fpn_output{}".format(stage), output_conv) + + lateral_convs.append(lateral_conv) + output_convs.append(output_conv) + # Place convs into top-down order (from low to high resolution) + # to make the top-down computation in forward clearer. + self.lateral_convs = lateral_convs[::-1] + self.output_convs = output_convs[::-1] + self.top_block = top_block + self.in_features = in_features + self.bottom_up = bottom_up + # Return feature names are "p", like ["p2", "p3", ..., "p6"] + self._out_feature_strides = {"p{}".format(int(math.log2(s))): s for s in in_strides} + # top block output feature maps. + if self.top_block is not None: + for s in range(stage, stage + self.top_block.num_levels): + self._out_feature_strides["p{}".format(s + 1)] = 2 ** (s + 1) + + self._out_features = list(self._out_feature_strides.keys()) + self._out_feature_channels = {k: out_channels for k in self._out_features} + self._size_divisibility = in_strides[-1] + assert fuse_type in {"avg", "sum"} + self._fuse_type = fuse_type + + @property + def size_divisibility(self): + return self._size_divisibility + + def forward(self, x): + """ + Args: + input (dict[str->Tensor]): mapping feature map name (e.g., "res5") to + feature map tensor for each feature level in high to low resolution order. + + Returns: + dict[str->Tensor]: + mapping from feature map name to FPN feature map tensor + in high to low resolution order. Returned feature names follow the FPN + paper convention: "p", where stage has stride = 2 ** stage e.g., + ["p2", "p3", ..., "p6"]. + """ + # Reverse feature maps into top-down order (from low to high resolution) + bottom_up_features = self.bottom_up(x) + x = [bottom_up_features[f] for f in self.in_features[::-1]] + results = [] + prev_features = self.lateral_convs[0](x[0]) + results.append(self.output_convs[0](prev_features)) + for features, lateral_conv, output_conv in zip( + x[1:], self.lateral_convs[1:], self.output_convs[1:] + ): + top_down_features = F.interpolate(prev_features, scale_factor=2, mode="nearest") + lateral_features = lateral_conv(features) + prev_features = lateral_features + top_down_features + if self._fuse_type == "avg": + prev_features /= 2 + results.insert(0, output_conv(prev_features)) + + if self.top_block is not None: + top_block_in_feature = bottom_up_features.get(self.top_block.in_feature, None) + if top_block_in_feature is None: + top_block_in_feature = results[self._out_features.index(self.top_block.in_feature)] + results.extend(self.top_block(top_block_in_feature)) + assert len(self._out_features) == len(results) + return dict(zip(self._out_features, results)) + + def output_shape(self): + return { + name: ShapeSpec( + channels=self._out_feature_channels[name], stride=self._out_feature_strides[name] + ) + for name in self._out_features + } + + +def _assert_strides_are_log2_contiguous(strides): + """ + Assert that each stride is 2x times its preceding stride, i.e. "contiguous in log2". + """ + for i, stride in enumerate(strides[1:], 1): + assert stride == 2 * strides[i - 1], "Strides {} {} are not log2 contiguous".format( + stride, strides[i - 1] + ) + + +class LastLevelMaxPool(nn.Module): + """ + This module is used in the original FPN to generate a downsampled + P6 feature from P5. + """ + + def __init__(self): + super().__init__() + self.num_levels = 1 + self.in_feature = "p5" + + def forward(self, x): + return [F.max_pool2d(x, kernel_size=1, stride=2, padding=0)] + + +class LastLevelP6P7(nn.Module): + """ + This module is used in RetinaNet to generate extra layers, P6 and P7 from + C5 feature. + """ + + def __init__(self, in_channels, out_channels, in_feature="res5"): + super().__init__() + self.num_levels = 2 + self.in_feature = in_feature + self.p6 = nn.Conv2d(in_channels, out_channels, 3, 2, 1) + self.p7 = nn.Conv2d(out_channels, out_channels, 3, 2, 1) + for module in [self.p6, self.p7]: + weight_init.c2_xavier_fill(module) + + def forward(self, c5): + p6 = self.p6(c5) + p7 = self.p7(F.relu(p6)) + return [p6, p7] + + +@BACKBONE_REGISTRY.register() +def build_resnet_fpn_backbone(cfg, input_shape: ShapeSpec): + """ + Args: + cfg: a detectron2 CfgNode + + Returns: + backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`. + """ + bottom_up = build_resnet_backbone(cfg, input_shape) + in_features = cfg.MODEL.FPN.IN_FEATURES + out_channels = cfg.MODEL.FPN.OUT_CHANNELS + backbone = FPN( + bottom_up=bottom_up, + in_features=in_features, + out_channels=out_channels, + norm=cfg.MODEL.FPN.NORM, + top_block=LastLevelMaxPool(), + fuse_type=cfg.MODEL.FPN.FUSE_TYPE, + ) + return backbone + + +@BACKBONE_REGISTRY.register() +def build_retinanet_resnet_fpn_backbone(cfg, input_shape: ShapeSpec): + """ + Args: + cfg: a detectron2 CfgNode + + Returns: + backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`. + """ + bottom_up = build_resnet_backbone(cfg, input_shape) + in_features = cfg.MODEL.FPN.IN_FEATURES + out_channels = cfg.MODEL.FPN.OUT_CHANNELS + in_channels_p6p7 = bottom_up.output_shape()["res5"].channels + backbone = FPN( + bottom_up=bottom_up, + in_features=in_features, + out_channels=out_channels, + norm=cfg.MODEL.FPN.NORM, + top_block=LastLevelP6P7(in_channels_p6p7, out_channels), + fuse_type=cfg.MODEL.FPN.FUSE_TYPE, + ) + return backbone diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/resnet.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..f1faae012f346166a311902826fb9e4b61e24e54 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/backbone/resnet.py @@ -0,0 +1,591 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import numpy as np +import fvcore.nn.weight_init as weight_init +import torch +import torch.nn.functional as F +from torch import nn + +from detectron2.layers import ( + CNNBlockBase, + Conv2d, + DeformConv, + ModulatedDeformConv, + ShapeSpec, + get_norm, +) + +from .backbone import Backbone +from .build import BACKBONE_REGISTRY + +__all__ = [ + "ResNetBlockBase", + "BasicBlock", + "BottleneckBlock", + "DeformBottleneckBlock", + "BasicStem", + "ResNet", + "make_stage", + "build_resnet_backbone", +] + + +ResNetBlockBase = CNNBlockBase +""" +Alias for backward compatibiltiy. +""" + + +class BasicBlock(CNNBlockBase): + """ + The basic residual block for ResNet-18 and ResNet-34 defined in :paper:`ResNet`, + with two 3x3 conv layers and a projection shortcut if needed. + """ + + def __init__(self, in_channels, out_channels, *, stride=1, norm="BN"): + """ + Args: + in_channels (int): Number of input channels. + out_channels (int): Number of output channels. + stride (int): Stride for the first conv. + norm (str or callable): normalization for all conv layers. + See :func:`layers.get_norm` for supported format. + """ + super().__init__(in_channels, out_channels, stride) + + if in_channels != out_channels: + self.shortcut = Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=stride, + bias=False, + norm=get_norm(norm, out_channels), + ) + else: + self.shortcut = None + + self.conv1 = Conv2d( + in_channels, + out_channels, + kernel_size=3, + stride=stride, + padding=1, + bias=False, + norm=get_norm(norm, out_channels), + ) + + self.conv2 = Conv2d( + out_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + bias=False, + norm=get_norm(norm, out_channels), + ) + + for layer in [self.conv1, self.conv2, self.shortcut]: + if layer is not None: # shortcut can be None + weight_init.c2_msra_fill(layer) + + def forward(self, x): + out = self.conv1(x) + out = F.relu_(out) + out = self.conv2(out) + + if self.shortcut is not None: + shortcut = self.shortcut(x) + else: + shortcut = x + + out += shortcut + out = F.relu_(out) + return out + + +class BottleneckBlock(CNNBlockBase): + """ + The standard bottleneck residual block used by ResNet-50, 101 and 152 + defined in :paper:`ResNet`. It contains 3 conv layers with kernels + 1x1, 3x3, 1x1, and a projection shortcut if needed. + """ + + def __init__( + self, + in_channels, + out_channels, + *, + bottleneck_channels, + stride=1, + num_groups=1, + norm="BN", + stride_in_1x1=False, + dilation=1, + ): + """ + Args: + bottleneck_channels (int): number of output channels for the 3x3 + "bottleneck" conv layers. + num_groups (int): number of groups for the 3x3 conv layer. + norm (str or callable): normalization for all conv layers. + See :func:`layers.get_norm` for supported format. + stride_in_1x1 (bool): when stride>1, whether to put stride in the + first 1x1 convolution or the bottleneck 3x3 convolution. + dilation (int): the dilation rate of the 3x3 conv layer. + """ + super().__init__(in_channels, out_channels, stride) + + if in_channels != out_channels: + self.shortcut = Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=stride, + bias=False, + norm=get_norm(norm, out_channels), + ) + else: + self.shortcut = None + + # The original MSRA ResNet models have stride in the first 1x1 conv + # The subsequent fb.torch.resnet and Caffe2 ResNe[X]t implementations have + # stride in the 3x3 conv + stride_1x1, stride_3x3 = (stride, 1) if stride_in_1x1 else (1, stride) + + self.conv1 = Conv2d( + in_channels, + bottleneck_channels, + kernel_size=1, + stride=stride_1x1, + bias=False, + norm=get_norm(norm, bottleneck_channels), + ) + + self.conv2 = Conv2d( + bottleneck_channels, + bottleneck_channels, + kernel_size=3, + stride=stride_3x3, + padding=1 * dilation, + bias=False, + groups=num_groups, + dilation=dilation, + norm=get_norm(norm, bottleneck_channels), + ) + + self.conv3 = Conv2d( + bottleneck_channels, + out_channels, + kernel_size=1, + bias=False, + norm=get_norm(norm, out_channels), + ) + + for layer in [self.conv1, self.conv2, self.conv3, self.shortcut]: + if layer is not None: # shortcut can be None + weight_init.c2_msra_fill(layer) + + # Zero-initialize the last normalization in each residual branch, + # so that at the beginning, the residual branch starts with zeros, + # and each residual block behaves like an identity. + # See Sec 5.1 in "Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour": + # "For BN layers, the learnable scaling coefficient γ is initialized + # to be 1, except for each residual block's last BN + # where γ is initialized to be 0." + + # nn.init.constant_(self.conv3.norm.weight, 0) + # TODO this somehow hurts performance when training GN models from scratch. + # Add it as an option when we need to use this code to train a backbone. + + def forward(self, x): + out = self.conv1(x) + out = F.relu_(out) + + out = self.conv2(out) + out = F.relu_(out) + + out = self.conv3(out) + + if self.shortcut is not None: + shortcut = self.shortcut(x) + else: + shortcut = x + + out += shortcut + out = F.relu_(out) + return out + + +class DeformBottleneckBlock(ResNetBlockBase): + """ + Similar to :class:`BottleneckBlock`, but with :paper:`deformable conv ` + in the 3x3 convolution. + """ + + def __init__( + self, + in_channels, + out_channels, + *, + bottleneck_channels, + stride=1, + num_groups=1, + norm="BN", + stride_in_1x1=False, + dilation=1, + deform_modulated=False, + deform_num_groups=1, + ): + super().__init__(in_channels, out_channels, stride) + self.deform_modulated = deform_modulated + + if in_channels != out_channels: + self.shortcut = Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=stride, + bias=False, + norm=get_norm(norm, out_channels), + ) + else: + self.shortcut = None + + stride_1x1, stride_3x3 = (stride, 1) if stride_in_1x1 else (1, stride) + + self.conv1 = Conv2d( + in_channels, + bottleneck_channels, + kernel_size=1, + stride=stride_1x1, + bias=False, + norm=get_norm(norm, bottleneck_channels), + ) + + if deform_modulated: + deform_conv_op = ModulatedDeformConv + # offset channels are 2 or 3 (if with modulated) * kernel_size * kernel_size + offset_channels = 27 + else: + deform_conv_op = DeformConv + offset_channels = 18 + + self.conv2_offset = Conv2d( + bottleneck_channels, + offset_channels * deform_num_groups, + kernel_size=3, + stride=stride_3x3, + padding=1 * dilation, + dilation=dilation, + ) + self.conv2 = deform_conv_op( + bottleneck_channels, + bottleneck_channels, + kernel_size=3, + stride=stride_3x3, + padding=1 * dilation, + bias=False, + groups=num_groups, + dilation=dilation, + deformable_groups=deform_num_groups, + norm=get_norm(norm, bottleneck_channels), + ) + + self.conv3 = Conv2d( + bottleneck_channels, + out_channels, + kernel_size=1, + bias=False, + norm=get_norm(norm, out_channels), + ) + + for layer in [self.conv1, self.conv2, self.conv3, self.shortcut]: + if layer is not None: # shortcut can be None + weight_init.c2_msra_fill(layer) + + nn.init.constant_(self.conv2_offset.weight, 0) + nn.init.constant_(self.conv2_offset.bias, 0) + + def forward(self, x): + out = self.conv1(x) + out = F.relu_(out) + + if self.deform_modulated: + offset_mask = self.conv2_offset(out) + offset_x, offset_y, mask = torch.chunk(offset_mask, 3, dim=1) + offset = torch.cat((offset_x, offset_y), dim=1) + mask = mask.sigmoid() + out = self.conv2(out, offset, mask) + else: + offset = self.conv2_offset(out) + out = self.conv2(out, offset) + out = F.relu_(out) + + out = self.conv3(out) + + if self.shortcut is not None: + shortcut = self.shortcut(x) + else: + shortcut = x + + out += shortcut + out = F.relu_(out) + return out + + +def make_stage(block_class, num_blocks, first_stride, *, in_channels, out_channels, **kwargs): + """ + Create a list of blocks just like those in a ResNet stage. + + Args: + block_class (type): a subclass of ResNetBlockBase + num_blocks (int): + first_stride (int): the stride of the first block. The other blocks will have stride=1. + in_channels (int): input channels of the entire stage. + out_channels (int): output channels of **every block** in the stage. + kwargs: other arguments passed to the constructor of every block. + + Returns: + list[nn.Module]: a list of block module. + """ + assert "stride" not in kwargs, "Stride of blocks in make_stage cannot be changed." + blocks = [] + for i in range(num_blocks): + blocks.append( + block_class( + in_channels=in_channels, + out_channels=out_channels, + stride=first_stride if i == 0 else 1, + **kwargs, + ) + ) + in_channels = out_channels + return blocks + + +class BasicStem(CNNBlockBase): + """ + The standard ResNet stem (layers before the first residual block). + """ + + def __init__(self, in_channels=3, out_channels=64, norm="BN"): + """ + Args: + norm (str or callable): norm after the first conv layer. + See :func:`layers.get_norm` for supported format. + """ + super().__init__(in_channels, out_channels, 4) + self.in_channels = in_channels + self.conv1 = Conv2d( + in_channels, + out_channels, + kernel_size=7, + stride=2, + padding=3, + bias=False, + norm=get_norm(norm, out_channels), + ) + weight_init.c2_msra_fill(self.conv1) + + def forward(self, x): + x = self.conv1(x) + x = F.relu_(x) + x = F.max_pool2d(x, kernel_size=3, stride=2, padding=1) + return x + + +class ResNet(Backbone): + """ + Implement :paper:`ResNet`. + """ + + def __init__(self, stem, stages, num_classes=None, out_features=None): + """ + Args: + stem (nn.Module): a stem module + stages (list[list[CNNBlockBase]]): several (typically 4) stages, + each contains multiple :class:`CNNBlockBase`. + num_classes (None or int): if None, will not perform classification. + Otherwise, will create a linear layer. + out_features (list[str]): name of the layers whose outputs should + be returned in forward. Can be anything in "stem", "linear", or "res2" ... + If None, will return the output of the last layer. + """ + super(ResNet, self).__init__() + self.stem = stem + self.num_classes = num_classes + + current_stride = self.stem.stride + self._out_feature_strides = {"stem": current_stride} + self._out_feature_channels = {"stem": self.stem.out_channels} + + self.stages_and_names = [] + for i, blocks in enumerate(stages): + assert len(blocks) > 0, len(blocks) + for block in blocks: + assert isinstance(block, CNNBlockBase), block + + name = "res" + str(i + 2) + stage = nn.Sequential(*blocks) + + self.add_module(name, stage) + self.stages_and_names.append((stage, name)) + + self._out_feature_strides[name] = current_stride = int( + current_stride * np.prod([k.stride for k in blocks]) + ) + self._out_feature_channels[name] = curr_channels = blocks[-1].out_channels + + if num_classes is not None: + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.linear = nn.Linear(curr_channels, num_classes) + + # Sec 5.1 in "Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour": + # "The 1000-way fully-connected layer is initialized by + # drawing weights from a zero-mean Gaussian with standard deviation of 0.01." + nn.init.normal_(self.linear.weight, std=0.01) + name = "linear" + + if out_features is None: + out_features = [name] + self._out_features = out_features + assert len(self._out_features) + children = [x[0] for x in self.named_children()] + for out_feature in self._out_features: + assert out_feature in children, "Available children: {}".format(", ".join(children)) + + def forward(self, x): + outputs = {} + x = self.stem(x) + if "stem" in self._out_features: + outputs["stem"] = x + for stage, name in self.stages_and_names: + x = stage(x) + if name in self._out_features: + outputs[name] = x + if self.num_classes is not None: + x = self.avgpool(x) + x = torch.flatten(x, 1) + x = self.linear(x) + if "linear" in self._out_features: + outputs["linear"] = x + return outputs + + def output_shape(self): + return { + name: ShapeSpec( + channels=self._out_feature_channels[name], stride=self._out_feature_strides[name] + ) + for name in self._out_features + } + + def freeze(self, freeze_at=0): + """ + Freeze the first several stages of the ResNet. Commonly used in + fine-tuning. + + Layers that produce the same feature map spatial size are defined as one + "stage" by :paper:`FPN`. + + Args: + freeze_at (int): number of stages to freeze. + `1` means freezing the stem. `2` means freezing the stem and + one residual stage, etc. + + Returns: + nn.Module: this ResNet itself + """ + if freeze_at >= 1: + self.stem.freeze() + for idx, (stage, _) in enumerate(self.stages_and_names, start=2): + if freeze_at >= idx: + for block in stage.children(): + block.freeze() + return self + + +@BACKBONE_REGISTRY.register() +def build_resnet_backbone(cfg, input_shape): + """ + Create a ResNet instance from config. + + Returns: + ResNet: a :class:`ResNet` instance. + """ + # need registration of new blocks/stems? + norm = cfg.MODEL.RESNETS.NORM + stem = BasicStem( + in_channels=input_shape.channels, + out_channels=cfg.MODEL.RESNETS.STEM_OUT_CHANNELS, + norm=norm, + ) + + # fmt: off + freeze_at = cfg.MODEL.BACKBONE.FREEZE_AT + out_features = cfg.MODEL.RESNETS.OUT_FEATURES + depth = cfg.MODEL.RESNETS.DEPTH + num_groups = cfg.MODEL.RESNETS.NUM_GROUPS + width_per_group = cfg.MODEL.RESNETS.WIDTH_PER_GROUP + bottleneck_channels = num_groups * width_per_group + in_channels = cfg.MODEL.RESNETS.STEM_OUT_CHANNELS + out_channels = cfg.MODEL.RESNETS.RES2_OUT_CHANNELS + stride_in_1x1 = cfg.MODEL.RESNETS.STRIDE_IN_1X1 + res5_dilation = cfg.MODEL.RESNETS.RES5_DILATION + deform_on_per_stage = cfg.MODEL.RESNETS.DEFORM_ON_PER_STAGE + deform_modulated = cfg.MODEL.RESNETS.DEFORM_MODULATED + deform_num_groups = cfg.MODEL.RESNETS.DEFORM_NUM_GROUPS + # fmt: on + assert res5_dilation in {1, 2}, "res5_dilation cannot be {}.".format(res5_dilation) + + num_blocks_per_stage = { + 18: [2, 2, 2, 2], + 34: [3, 4, 6, 3], + 50: [3, 4, 6, 3], + 101: [3, 4, 23, 3], + 152: [3, 8, 36, 3], + }[depth] + + if depth in [18, 34]: + assert out_channels == 64, "Must set MODEL.RESNETS.RES2_OUT_CHANNELS = 64 for R18/R34" + assert not any( + deform_on_per_stage + ), "MODEL.RESNETS.DEFORM_ON_PER_STAGE unsupported for R18/R34" + assert res5_dilation == 1, "Must set MODEL.RESNETS.RES5_DILATION = 1 for R18/R34" + assert num_groups == 1, "Must set MODEL.RESNETS.NUM_GROUPS = 1 for R18/R34" + + stages = [] + + # Avoid creating variables without gradients + # It consumes extra memory and may cause allreduce to fail + out_stage_idx = [{"res2": 2, "res3": 3, "res4": 4, "res5": 5}[f] for f in out_features] + max_stage_idx = max(out_stage_idx) + for idx, stage_idx in enumerate(range(2, max_stage_idx + 1)): + dilation = res5_dilation if stage_idx == 5 else 1 + first_stride = 1 if idx == 0 or (stage_idx == 5 and dilation == 2) else 2 + stage_kargs = { + "num_blocks": num_blocks_per_stage[idx], + "first_stride": first_stride, + "in_channels": in_channels, + "out_channels": out_channels, + "norm": norm, + } + # Use BasicBlock for R18 and R34. + if depth in [18, 34]: + stage_kargs["block_class"] = BasicBlock + else: + stage_kargs["bottleneck_channels"] = bottleneck_channels + stage_kargs["stride_in_1x1"] = stride_in_1x1 + stage_kargs["dilation"] = dilation + stage_kargs["num_groups"] = num_groups + if deform_on_per_stage[idx]: + stage_kargs["block_class"] = DeformBottleneckBlock + stage_kargs["deform_modulated"] = deform_modulated + stage_kargs["deform_num_groups"] = deform_num_groups + else: + stage_kargs["block_class"] = BottleneckBlock + blocks = make_stage(**stage_kargs) + in_channels = out_channels + out_channels *= 2 + bottleneck_channels *= 2 + stages.append(blocks) + return ResNet(stem, stages, out_features=out_features).freeze(freeze_at) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/box_regression.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/box_regression.py new file mode 100644 index 0000000000000000000000000000000000000000..88426fddf36812f33def8fb434bebce53db3a4b4 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/box_regression.py @@ -0,0 +1,247 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import math +from typing import Tuple +import torch + +# Value for clamping large dw and dh predictions. The heuristic is that we clamp +# such that dw and dh are no larger than what would transform a 16px box into a +# 1000px box (based on a small anchor, 16px, and a typical image size, 1000px). +_DEFAULT_SCALE_CLAMP = math.log(1000.0 / 16) + + +__all__ = ["Box2BoxTransform", "Box2BoxTransformRotated"] + + +def apply_deltas_broadcast(box2box_transform, deltas, boxes): + """ + Apply transform deltas to boxes. Similar to `box2box_transform.apply_deltas`, + but allow broadcasting boxes when the second dimension of deltas is a multiple + of box dimension. + + Args: + box2box_transform (Box2BoxTransform or Box2BoxTransformRotated): the transform to apply + deltas (Tensor): tensor of shape (N,B) or (N,KxB) + boxes (Tensor): tensor of shape (N,B) + + Returns: + Tensor: same shape as deltas. + """ + assert deltas.dim() == boxes.dim() == 2, f"{deltas.shape}, {boxes.shape}" + N, B = boxes.shape + assert ( + deltas.shape[1] % B == 0 + ), f"Second dim of deltas should be a multiple of {B}. Got {deltas.shape}" + K = deltas.shape[1] // B + ret = box2box_transform.apply_deltas( + deltas.view(N * K, B), boxes.unsqueeze(1).expand(N, K, B).reshape(N * K, B) + ) + return ret.view(N, K * B) + + +@torch.jit.script +class Box2BoxTransform(object): + """ + The box-to-box transform defined in R-CNN. The transformation is parameterized + by 4 deltas: (dx, dy, dw, dh). The transformation scales the box's width and height + by exp(dw), exp(dh) and shifts a box's center by the offset (dx * width, dy * height). + """ + + def __init__( + self, weights: Tuple[float, float, float, float], scale_clamp: float = _DEFAULT_SCALE_CLAMP + ): + """ + Args: + weights (4-element tuple): Scaling factors that are applied to the + (dx, dy, dw, dh) deltas. In Fast R-CNN, these were originally set + such that the deltas have unit variance; now they are treated as + hyperparameters of the system. + scale_clamp (float): When predicting deltas, the predicted box scaling + factors (dw and dh) are clamped such that they are <= scale_clamp. + """ + self.weights = weights + self.scale_clamp = scale_clamp + + def get_deltas(self, src_boxes, target_boxes): + """ + Get box regression transformation deltas (dx, dy, dw, dh) that can be used + to transform the `src_boxes` into the `target_boxes`. That is, the relation + ``target_boxes == self.apply_deltas(deltas, src_boxes)`` is true (unless + any delta is too large and is clamped). + + Args: + src_boxes (Tensor): source boxes, e.g., object proposals + target_boxes (Tensor): target of the transformation, e.g., ground-truth + boxes. + """ + assert isinstance(src_boxes, torch.Tensor), type(src_boxes) + assert isinstance(target_boxes, torch.Tensor), type(target_boxes) + + src_widths = src_boxes[:, 2] - src_boxes[:, 0] + src_heights = src_boxes[:, 3] - src_boxes[:, 1] + src_ctr_x = src_boxes[:, 0] + 0.5 * src_widths + src_ctr_y = src_boxes[:, 1] + 0.5 * src_heights + + target_widths = target_boxes[:, 2] - target_boxes[:, 0] + target_heights = target_boxes[:, 3] - target_boxes[:, 1] + target_ctr_x = target_boxes[:, 0] + 0.5 * target_widths + target_ctr_y = target_boxes[:, 1] + 0.5 * target_heights + + wx, wy, ww, wh = self.weights + dx = wx * (target_ctr_x - src_ctr_x) / src_widths + dy = wy * (target_ctr_y - src_ctr_y) / src_heights + dw = ww * torch.log(target_widths / src_widths) + dh = wh * torch.log(target_heights / src_heights) + + deltas = torch.stack((dx, dy, dw, dh), dim=1) + assert (src_widths > 0).all().item(), "Input boxes to Box2BoxTransform are not valid!" + return deltas + + def apply_deltas(self, deltas, boxes): + """ + Apply transformation `deltas` (dx, dy, dw, dh) to `boxes`. + + Args: + deltas (Tensor): transformation deltas of shape (N, k*4), where k >= 1. + deltas[i] represents k potentially different class-specific + box transformations for the single box boxes[i]. + boxes (Tensor): boxes to transform, of shape (N, 4) + """ + boxes = boxes.to(deltas.dtype) + + widths = boxes[:, 2] - boxes[:, 0] + heights = boxes[:, 3] - boxes[:, 1] + ctr_x = boxes[:, 0] + 0.5 * widths + ctr_y = boxes[:, 1] + 0.5 * heights + + wx, wy, ww, wh = self.weights + dx = deltas[:, 0::4] / wx + dy = deltas[:, 1::4] / wy + dw = deltas[:, 2::4] / ww + dh = deltas[:, 3::4] / wh + + # Prevent sending too large values into torch.exp() + dw = torch.clamp(dw, max=self.scale_clamp) + dh = torch.clamp(dh, max=self.scale_clamp) + + pred_ctr_x = dx * widths[:, None] + ctr_x[:, None] + pred_ctr_y = dy * heights[:, None] + ctr_y[:, None] + pred_w = torch.exp(dw) * widths[:, None] + pred_h = torch.exp(dh) * heights[:, None] + + pred_boxes = torch.zeros_like(deltas) + pred_boxes[:, 0::4] = pred_ctr_x - 0.5 * pred_w # x1 + pred_boxes[:, 1::4] = pred_ctr_y - 0.5 * pred_h # y1 + pred_boxes[:, 2::4] = pred_ctr_x + 0.5 * pred_w # x2 + pred_boxes[:, 3::4] = pred_ctr_y + 0.5 * pred_h # y2 + return pred_boxes + + +@torch.jit.script +class Box2BoxTransformRotated(object): + """ + The box-to-box transform defined in Rotated R-CNN. The transformation is parameterized + by 5 deltas: (dx, dy, dw, dh, da). The transformation scales the box's width and height + by exp(dw), exp(dh), shifts a box's center by the offset (dx * width, dy * height), + and rotate a box's angle by da (radians). + Note: angles of deltas are in radians while angles of boxes are in degrees. + """ + + def __init__( + self, + weights: Tuple[float, float, float, float, float], + scale_clamp: float = _DEFAULT_SCALE_CLAMP, + ): + """ + Args: + weights (5-element tuple): Scaling factors that are applied to the + (dx, dy, dw, dh, da) deltas. These are treated as + hyperparameters of the system. + scale_clamp (float): When predicting deltas, the predicted box scaling + factors (dw and dh) are clamped such that they are <= scale_clamp. + """ + self.weights = weights + self.scale_clamp = scale_clamp + + def get_deltas(self, src_boxes, target_boxes): + """ + Get box regression transformation deltas (dx, dy, dw, dh, da) that can be used + to transform the `src_boxes` into the `target_boxes`. That is, the relation + ``target_boxes == self.apply_deltas(deltas, src_boxes)`` is true (unless + any delta is too large and is clamped). + + Args: + src_boxes (Tensor): Nx5 source boxes, e.g., object proposals + target_boxes (Tensor): Nx5 target of the transformation, e.g., ground-truth + boxes. + """ + assert isinstance(src_boxes, torch.Tensor), type(src_boxes) + assert isinstance(target_boxes, torch.Tensor), type(target_boxes) + + src_ctr_x, src_ctr_y, src_widths, src_heights, src_angles = torch.unbind(src_boxes, dim=1) + + target_ctr_x, target_ctr_y, target_widths, target_heights, target_angles = torch.unbind( + target_boxes, dim=1 + ) + + wx, wy, ww, wh, wa = self.weights + dx = wx * (target_ctr_x - src_ctr_x) / src_widths + dy = wy * (target_ctr_y - src_ctr_y) / src_heights + dw = ww * torch.log(target_widths / src_widths) + dh = wh * torch.log(target_heights / src_heights) + # Angles of deltas are in radians while angles of boxes are in degrees. + # the conversion to radians serve as a way to normalize the values + da = target_angles - src_angles + da = (da + 180.0) % 360.0 - 180.0 # make it in [-180, 180) + da *= wa * math.pi / 180.0 + + deltas = torch.stack((dx, dy, dw, dh, da), dim=1) + assert ( + (src_widths > 0).all().item() + ), "Input boxes to Box2BoxTransformRotated are not valid!" + return deltas + + def apply_deltas(self, deltas, boxes): + """ + Apply transformation `deltas` (dx, dy, dw, dh, da) to `boxes`. + + Args: + deltas (Tensor): transformation deltas of shape (N, 5). + deltas[i] represents box transformation for the single box boxes[i]. + boxes (Tensor): boxes to transform, of shape (N, 5) + """ + assert deltas.shape[1] == 5 and boxes.shape[1] == 5 + + boxes = boxes.to(deltas.dtype) + + ctr_x = boxes[:, 0] + ctr_y = boxes[:, 1] + widths = boxes[:, 2] + heights = boxes[:, 3] + angles = boxes[:, 4] + + wx, wy, ww, wh, wa = self.weights + + dx = deltas[:, 0] / wx + dy = deltas[:, 1] / wy + dw = deltas[:, 2] / ww + dh = deltas[:, 3] / wh + da = deltas[:, 4] / wa + + # Prevent sending too large values into torch.exp() + dw = torch.clamp(dw, max=self.scale_clamp) + dh = torch.clamp(dh, max=self.scale_clamp) + + pred_boxes = torch.zeros_like(deltas) + pred_boxes[:, 0] = dx * widths + ctr_x # x_ctr + pred_boxes[:, 1] = dy * heights + ctr_y # y_ctr + pred_boxes[:, 2] = torch.exp(dw) * widths # width + pred_boxes[:, 3] = torch.exp(dh) * heights # height + + # Following original RRPN implementation, + # angles of deltas are in radians while angles of boxes are in degrees. + pred_angle = da * 180.0 / math.pi + angles + pred_angle = (pred_angle + 180.0) % 360.0 - 180.0 # make it in [-180, 180) + + pred_boxes[:, 4] = pred_angle + + return pred_boxes diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/matcher.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/matcher.py new file mode 100644 index 0000000000000000000000000000000000000000..2911f8c1937749dec4dbe64aa3e8491a631e03f2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/matcher.py @@ -0,0 +1,123 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from typing import List +import torch + + +class Matcher(object): + """ + This class assigns to each predicted "element" (e.g., a box) a ground-truth + element. Each predicted element will have exactly zero or one matches; each + ground-truth element may be matched to zero or more predicted elements. + + The matching is determined by the MxN match_quality_matrix, that characterizes + how well each (ground-truth, prediction)-pair match each other. For example, + if the elements are boxes, this matrix may contain box intersection-over-union + overlap values. + + The matcher returns (a) a vector of length N containing the index of the + ground-truth element m in [0, M) that matches to prediction n in [0, N). + (b) a vector of length N containing the labels for each prediction. + """ + + def __init__( + self, thresholds: List[float], labels: List[int], allow_low_quality_matches: bool = False + ): + """ + Args: + thresholds (list): a list of thresholds used to stratify predictions + into levels. + labels (list): a list of values to label predictions belonging at + each level. A label can be one of {-1, 0, 1} signifying + {ignore, negative class, positive class}, respectively. + allow_low_quality_matches (bool): if True, produce additional matches + for predictions with maximum match quality lower than high_threshold. + See set_low_quality_matches_ for more details. + + For example, + thresholds = [0.3, 0.5] + labels = [0, -1, 1] + All predictions with iou < 0.3 will be marked with 0 and + thus will be considered as false positives while training. + All predictions with 0.3 <= iou < 0.5 will be marked with -1 and + thus will be ignored. + All predictions with 0.5 <= iou will be marked with 1 and + thus will be considered as true positives. + """ + # Add -inf and +inf to first and last position in thresholds + thresholds = thresholds[:] + assert thresholds[0] > 0 + thresholds.insert(0, -float("inf")) + thresholds.append(float("inf")) + assert all(low <= high for (low, high) in zip(thresholds[:-1], thresholds[1:])) + assert all(l in [-1, 0, 1] for l in labels) + assert len(labels) == len(thresholds) - 1 + self.thresholds = thresholds + self.labels = labels + self.allow_low_quality_matches = allow_low_quality_matches + + def __call__(self, match_quality_matrix): + """ + Args: + match_quality_matrix (Tensor[float]): an MxN tensor, containing the + pairwise quality between M ground-truth elements and N predicted + elements. All elements must be >= 0 (due to the us of `torch.nonzero` + for selecting indices in :meth:`set_low_quality_matches_`). + + Returns: + matches (Tensor[int64]): a vector of length N, where matches[i] is a matched + ground-truth index in [0, M) + match_labels (Tensor[int8]): a vector of length N, where pred_labels[i] indicates + whether a prediction is a true or false positive or ignored + """ + assert match_quality_matrix.dim() == 2 + if match_quality_matrix.numel() == 0: + default_matches = match_quality_matrix.new_full( + (match_quality_matrix.size(1),), 0, dtype=torch.int64 + ) + # When no gt boxes exist, we define IOU = 0 and therefore set labels + # to `self.labels[0]`, which usually defaults to background class 0 + # To choose to ignore instead, can make labels=[-1,0,-1,1] + set appropriate thresholds + default_match_labels = match_quality_matrix.new_full( + (match_quality_matrix.size(1),), self.labels[0], dtype=torch.int8 + ) + return default_matches, default_match_labels + + assert torch.all(match_quality_matrix >= 0) + + # match_quality_matrix is M (gt) x N (predicted) + # Max over gt elements (dim 0) to find best gt candidate for each prediction + matched_vals, matches = match_quality_matrix.max(dim=0) + + match_labels = matches.new_full(matches.size(), 1, dtype=torch.int8) + + for (l, low, high) in zip(self.labels, self.thresholds[:-1], self.thresholds[1:]): + low_high = (matched_vals >= low) & (matched_vals < high) + match_labels[low_high] = l + + if self.allow_low_quality_matches: + self.set_low_quality_matches_(match_labels, match_quality_matrix) + + return matches, match_labels + + def set_low_quality_matches_(self, match_labels, match_quality_matrix): + """ + Produce additional matches for predictions that have only low-quality matches. + Specifically, for each ground-truth G find the set of predictions that have + maximum overlap with it (including ties); for each prediction in that set, if + it is unmatched, then match it to the ground-truth G. + + This function implements the RPN assignment case (i) in Sec. 3.1.2 of + :paper:`Faster R-CNN`. + """ + # For each gt, find the prediction with which it has highest quality + highest_quality_foreach_gt, _ = match_quality_matrix.max(dim=1) + # Find the highest quality match available, even if it is low, including ties. + # Note that the matches qualities must be positive due to the use of + # `torch.nonzero`. + _, pred_inds_with_highest_quality = torch.nonzero( + match_quality_matrix == highest_quality_foreach_gt[:, None], as_tuple=True + ) + # If an anchor was labeled positive only due to a low-quality match + # with gt_A, but it has larger overlap with gt_B, it's matched index will still be gt_B. + # This follows the implementation in Detectron, and is found to have no significant impact. + match_labels[pred_inds_with_highest_quality] = 1 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..96ef9b582c2ed38525102ebb589a750cf6b9fa54 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/__init__.py @@ -0,0 +1,11 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from .build import META_ARCH_REGISTRY, build_model # isort:skip + +from .panoptic_fpn import PanopticFPN + +# import all the meta_arch, so they will be registered +from .rcnn import GeneralizedRCNN, ProposalNetwork +from .retinanet import RetinaNet +from .semantic_seg import SEM_SEG_HEADS_REGISTRY, SemanticSegmentor, build_sem_seg_head diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/build.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/build.py new file mode 100644 index 0000000000000000000000000000000000000000..630389dfca822f295447abd5e8424186d02e0465 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/build.py @@ -0,0 +1,23 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import torch + +from detectron2.utils.registry import Registry + +META_ARCH_REGISTRY = Registry("META_ARCH") # noqa F401 isort:skip +META_ARCH_REGISTRY.__doc__ = """ +Registry for meta-architectures, i.e. the whole model. + +The registered object will be called with `obj(cfg)` +and expected to return a `nn.Module` object. +""" + + +def build_model(cfg): + """ + Build the whole model architecture, defined by ``cfg.MODEL.META_ARCHITECTURE``. + Note that it does not load any weights from ``cfg``. + """ + meta_arch = cfg.MODEL.META_ARCHITECTURE + model = META_ARCH_REGISTRY.get(meta_arch)(cfg) + model.to(torch.device(cfg.MODEL.DEVICE)) + return model diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/panoptic_fpn.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/panoptic_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..c5f92f701f2da3aff6602ad2388307874102fc5c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/panoptic_fpn.py @@ -0,0 +1,218 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import torch +from torch import nn + +from detectron2.structures import ImageList + +from ..backbone import build_backbone +from ..postprocessing import detector_postprocess, sem_seg_postprocess +from ..proposal_generator import build_proposal_generator +from ..roi_heads import build_roi_heads +from .build import META_ARCH_REGISTRY +from .semantic_seg import build_sem_seg_head + +__all__ = ["PanopticFPN"] + + +@META_ARCH_REGISTRY.register() +class PanopticFPN(nn.Module): + """ + Implement the paper :paper:`PanopticFPN`. + """ + + def __init__(self, cfg): + super().__init__() + + self.instance_loss_weight = cfg.MODEL.PANOPTIC_FPN.INSTANCE_LOSS_WEIGHT + + # options when combining instance & semantic outputs + self.combine_on = cfg.MODEL.PANOPTIC_FPN.COMBINE.ENABLED + self.combine_overlap_threshold = cfg.MODEL.PANOPTIC_FPN.COMBINE.OVERLAP_THRESH + self.combine_stuff_area_limit = cfg.MODEL.PANOPTIC_FPN.COMBINE.STUFF_AREA_LIMIT + self.combine_instances_confidence_threshold = ( + cfg.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH + ) + + self.backbone = build_backbone(cfg) + self.proposal_generator = build_proposal_generator(cfg, self.backbone.output_shape()) + self.roi_heads = build_roi_heads(cfg, self.backbone.output_shape()) + self.sem_seg_head = build_sem_seg_head(cfg, self.backbone.output_shape()) + + self.register_buffer("pixel_mean", torch.Tensor(cfg.MODEL.PIXEL_MEAN).view(-1, 1, 1)) + self.register_buffer("pixel_std", torch.Tensor(cfg.MODEL.PIXEL_STD).view(-1, 1, 1)) + + @property + def device(self): + return self.pixel_mean.device + + def forward(self, batched_inputs): + """ + Args: + batched_inputs: a list, batched outputs of :class:`DatasetMapper`. + Each item in the list contains the inputs for one image. + + For now, each item in the list is a dict that contains: + + * "image": Tensor, image in (C, H, W) format. + * "instances": Instances + * "sem_seg": semantic segmentation ground truth. + * Other information that's included in the original dicts, such as: + "height", "width" (int): the output resolution of the model, used in inference. + See :meth:`postprocess` for details. + + Returns: + list[dict]: + each dict is the results for one image. The dict contains the following keys: + + * "instances": see :meth:`GeneralizedRCNN.forward` for its format. + * "sem_seg": see :meth:`SemanticSegmentor.forward` for its format. + * "panoptic_seg": available when `PANOPTIC_FPN.COMBINE.ENABLED`. + See the return value of + :func:`combine_semantic_and_instance_outputs` for its format. + """ + images = [x["image"].to(self.device) for x in batched_inputs] + images = [(x - self.pixel_mean) / self.pixel_std for x in images] + images = ImageList.from_tensors(images, self.backbone.size_divisibility) + features = self.backbone(images.tensor) + + if "proposals" in batched_inputs[0]: + proposals = [x["proposals"].to(self.device) for x in batched_inputs] + proposal_losses = {} + + if "sem_seg" in batched_inputs[0]: + gt_sem_seg = [x["sem_seg"].to(self.device) for x in batched_inputs] + gt_sem_seg = ImageList.from_tensors( + gt_sem_seg, self.backbone.size_divisibility, self.sem_seg_head.ignore_value + ).tensor + else: + gt_sem_seg = None + sem_seg_results, sem_seg_losses = self.sem_seg_head(features, gt_sem_seg) + + if "instances" in batched_inputs[0]: + gt_instances = [x["instances"].to(self.device) for x in batched_inputs] + else: + gt_instances = None + if self.proposal_generator: + proposals, proposal_losses = self.proposal_generator(images, features, gt_instances) + detector_results, detector_losses = self.roi_heads( + images, features, proposals, gt_instances + ) + + if self.training: + losses = {} + losses.update(sem_seg_losses) + losses.update({k: v * self.instance_loss_weight for k, v in detector_losses.items()}) + losses.update(proposal_losses) + return losses + + processed_results = [] + for sem_seg_result, detector_result, input_per_image, image_size in zip( + sem_seg_results, detector_results, batched_inputs, images.image_sizes + ): + height = input_per_image.get("height", image_size[0]) + width = input_per_image.get("width", image_size[1]) + sem_seg_r = sem_seg_postprocess(sem_seg_result, image_size, height, width) + detector_r = detector_postprocess(detector_result, height, width) + + processed_results.append({"sem_seg": sem_seg_r, "instances": detector_r}) + + if self.combine_on: + panoptic_r = combine_semantic_and_instance_outputs( + detector_r, + sem_seg_r.argmax(dim=0), + self.combine_overlap_threshold, + self.combine_stuff_area_limit, + self.combine_instances_confidence_threshold, + ) + processed_results[-1]["panoptic_seg"] = panoptic_r + return processed_results + + +def combine_semantic_and_instance_outputs( + instance_results, + semantic_results, + overlap_threshold, + stuff_area_limit, + instances_confidence_threshold, +): + """ + Implement a simple combining logic following + "combine_semantic_and_instance_predictions.py" in panopticapi + to produce panoptic segmentation outputs. + + Args: + instance_results: output of :func:`detector_postprocess`. + semantic_results: an (H, W) tensor, each is the contiguous semantic + category id + + Returns: + panoptic_seg (Tensor): of shape (height, width) where the values are ids for each segment. + segments_info (list[dict]): Describe each segment in `panoptic_seg`. + Each dict contains keys "id", "category_id", "isthing". + """ + panoptic_seg = torch.zeros_like(semantic_results, dtype=torch.int32) + + # sort instance outputs by scores + sorted_inds = torch.argsort(-instance_results.scores) + + current_segment_id = 0 + segments_info = [] + + instance_masks = instance_results.pred_masks.to(dtype=torch.bool, device=panoptic_seg.device) + + # Add instances one-by-one, check for overlaps with existing ones + for inst_id in sorted_inds: + score = instance_results.scores[inst_id].item() + if score < instances_confidence_threshold: + break + mask = instance_masks[inst_id] # H,W + mask_area = mask.sum().item() + + if mask_area == 0: + continue + + intersect = (mask > 0) & (panoptic_seg > 0) + intersect_area = intersect.sum().item() + + if intersect_area * 1.0 / mask_area > overlap_threshold: + continue + + if intersect_area > 0: + mask = mask & (panoptic_seg == 0) + + current_segment_id += 1 + panoptic_seg[mask] = current_segment_id + segments_info.append( + { + "id": current_segment_id, + "isthing": True, + "score": score, + "category_id": instance_results.pred_classes[inst_id].item(), + "instance_id": inst_id.item(), + } + ) + + # Add semantic results to remaining empty areas + semantic_labels = torch.unique(semantic_results).cpu().tolist() + for semantic_label in semantic_labels: + if semantic_label == 0: # 0 is a special "thing" class + continue + mask = (semantic_results == semantic_label) & (panoptic_seg == 0) + mask_area = mask.sum().item() + if mask_area < stuff_area_limit: + continue + + current_segment_id += 1 + panoptic_seg[mask] = current_segment_id + segments_info.append( + { + "id": current_segment_id, + "isthing": False, + "category_id": semantic_label, + "area": mask_area, + } + ) + + return panoptic_seg, segments_info diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/rcnn.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..b15ea8a38e5ddfbb4049c89917f055295e396b4f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/rcnn.py @@ -0,0 +1,263 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import numpy as np +import torch +from torch import nn + +from detectron2.structures import ImageList +from detectron2.utils.events import get_event_storage +from detectron2.utils.logger import log_first_n + +from ..backbone import build_backbone +from ..postprocessing import detector_postprocess +from ..proposal_generator import build_proposal_generator +from ..roi_heads import build_roi_heads +from .build import META_ARCH_REGISTRY + +__all__ = ["GeneralizedRCNN", "ProposalNetwork"] + + +@META_ARCH_REGISTRY.register() +class GeneralizedRCNN(nn.Module): + """ + Generalized R-CNN. Any models that contains the following three components: + 1. Per-image feature extraction (aka backbone) + 2. Region proposal generation + 3. Per-region feature extraction and prediction + """ + + def __init__(self, cfg): + super().__init__() + + self.backbone = build_backbone(cfg) + self.proposal_generator = build_proposal_generator(cfg, self.backbone.output_shape()) + self.roi_heads = build_roi_heads(cfg, self.backbone.output_shape()) + self.vis_period = cfg.VIS_PERIOD + self.input_format = cfg.INPUT.FORMAT + + assert len(cfg.MODEL.PIXEL_MEAN) == len(cfg.MODEL.PIXEL_STD) + self.register_buffer("pixel_mean", torch.Tensor(cfg.MODEL.PIXEL_MEAN).view(-1, 1, 1)) + self.register_buffer("pixel_std", torch.Tensor(cfg.MODEL.PIXEL_STD).view(-1, 1, 1)) + + @property + def device(self): + return self.pixel_mean.device + + def visualize_training(self, batched_inputs, proposals): + """ + A function used to visualize images and proposals. It shows ground truth + bounding boxes on the original image and up to 20 predicted object + proposals on the original image. Users can implement different + visualization functions for different models. + + Args: + batched_inputs (list): a list that contains input to the model. + proposals (list): a list that contains predicted proposals. Both + batched_inputs and proposals should have the same length. + """ + from detectron2.utils.visualizer import Visualizer + + storage = get_event_storage() + max_vis_prop = 20 + + for input, prop in zip(batched_inputs, proposals): + img = input["image"].cpu().numpy() + assert img.shape[0] == 3, "Images should have 3 channels." + if self.input_format == "BGR": + img = img[::-1, :, :] + img = img.transpose(1, 2, 0) + v_gt = Visualizer(img, None) + v_gt = v_gt.overlay_instances(boxes=input["instances"].gt_boxes) + anno_img = v_gt.get_image() + box_size = min(len(prop.proposal_boxes), max_vis_prop) + v_pred = Visualizer(img, None) + v_pred = v_pred.overlay_instances( + boxes=prop.proposal_boxes[0:box_size].tensor.cpu().numpy() + ) + prop_img = v_pred.get_image() + vis_img = np.concatenate((anno_img, prop_img), axis=1) + vis_img = vis_img.transpose(2, 0, 1) + vis_name = "Left: GT bounding boxes; Right: Predicted proposals" + storage.put_image(vis_name, vis_img) + break # only visualize one image in a batch + + def forward(self, batched_inputs): + """ + Args: + batched_inputs: a list, batched outputs of :class:`DatasetMapper` . + Each item in the list contains the inputs for one image. + For now, each item in the list is a dict that contains: + + * image: Tensor, image in (C, H, W) format. + * instances (optional): groundtruth :class:`Instances` + * proposals (optional): :class:`Instances`, precomputed proposals. + + Other information that's included in the original dicts, such as: + + * "height", "width" (int): the output resolution of the model, used in inference. + See :meth:`postprocess` for details. + + Returns: + list[dict]: + Each dict is the output for one input image. + The dict contains one key "instances" whose value is a :class:`Instances`. + The :class:`Instances` object has the following keys: + "pred_boxes", "pred_classes", "scores", "pred_masks", "pred_keypoints" + """ + if not self.training: + return self.inference(batched_inputs) + + images = self.preprocess_image(batched_inputs) + if "instances" in batched_inputs[0]: + gt_instances = [x["instances"].to(self.device) for x in batched_inputs] + elif "targets" in batched_inputs[0]: + log_first_n( + logging.WARN, "'targets' in the model inputs is now renamed to 'instances'!", n=10 + ) + gt_instances = [x["targets"].to(self.device) for x in batched_inputs] + else: + gt_instances = None + + features = self.backbone(images.tensor) + + if self.proposal_generator: + proposals, proposal_losses = self.proposal_generator(images, features, gt_instances) + else: + assert "proposals" in batched_inputs[0] + proposals = [x["proposals"].to(self.device) for x in batched_inputs] + proposal_losses = {} + + _, detector_losses = self.roi_heads(images, features, proposals, gt_instances) + if self.vis_period > 0: + storage = get_event_storage() + if storage.iter % self.vis_period == 0: + self.visualize_training(batched_inputs, proposals) + + losses = {} + losses.update(detector_losses) + losses.update(proposal_losses) + return losses + + def inference(self, batched_inputs, detected_instances=None, do_postprocess=True): + """ + Run inference on the given inputs. + + Args: + batched_inputs (list[dict]): same as in :meth:`forward` + detected_instances (None or list[Instances]): if not None, it + contains an `Instances` object per image. The `Instances` + object contains "pred_boxes" and "pred_classes" which are + known boxes in the image. + The inference will then skip the detection of bounding boxes, + and only predict other per-ROI outputs. + do_postprocess (bool): whether to apply post-processing on the outputs. + + Returns: + same as in :meth:`forward`. + """ + assert not self.training + + images = self.preprocess_image(batched_inputs) + features = self.backbone(images.tensor) + + if detected_instances is None: + if self.proposal_generator: + proposals, _ = self.proposal_generator(images, features, None) + else: + assert "proposals" in batched_inputs[0] + proposals = [x["proposals"].to(self.device) for x in batched_inputs] + + results, _ = self.roi_heads(images, features, proposals, None) + else: + detected_instances = [x.to(self.device) for x in detected_instances] + results = self.roi_heads.forward_with_given_boxes(features, detected_instances) + + if do_postprocess: + return GeneralizedRCNN._postprocess(results, batched_inputs, images.image_sizes) + else: + return results + + def preprocess_image(self, batched_inputs): + """ + Normalize, pad and batch the input images. + """ + images = [x["image"].to(self.device) for x in batched_inputs] + images = [(x - self.pixel_mean) / self.pixel_std for x in images] + images = ImageList.from_tensors(images, self.backbone.size_divisibility) + return images + + @staticmethod + def _postprocess(instances, batched_inputs, image_sizes): + """ + Rescale the output instances to the target size. + """ + # note: private function; subject to changes + processed_results = [] + for results_per_image, input_per_image, image_size in zip( + instances, batched_inputs, image_sizes + ): + height = input_per_image.get("height", image_size[0]) + width = input_per_image.get("width", image_size[1]) + r = detector_postprocess(results_per_image, height, width) + processed_results.append({"instances": r}) + return processed_results + + +@META_ARCH_REGISTRY.register() +class ProposalNetwork(nn.Module): + """ + A meta architecture that only predicts object proposals. + """ + + def __init__(self, cfg): + super().__init__() + self.backbone = build_backbone(cfg) + self.proposal_generator = build_proposal_generator(cfg, self.backbone.output_shape()) + + self.register_buffer("pixel_mean", torch.Tensor(cfg.MODEL.PIXEL_MEAN).view(-1, 1, 1)) + self.register_buffer("pixel_std", torch.Tensor(cfg.MODEL.PIXEL_STD).view(-1, 1, 1)) + + @property + def device(self): + return self.pixel_mean.device + + def forward(self, batched_inputs): + """ + Args: + Same as in :class:`GeneralizedRCNN.forward` + + Returns: + list[dict]: + Each dict is the output for one input image. + The dict contains one key "proposals" whose value is a + :class:`Instances` with keys "proposal_boxes" and "objectness_logits". + """ + images = [x["image"].to(self.device) for x in batched_inputs] + images = [(x - self.pixel_mean) / self.pixel_std for x in images] + images = ImageList.from_tensors(images, self.backbone.size_divisibility) + features = self.backbone(images.tensor) + + if "instances" in batched_inputs[0]: + gt_instances = [x["instances"].to(self.device) for x in batched_inputs] + elif "targets" in batched_inputs[0]: + log_first_n( + logging.WARN, "'targets' in the model inputs is now renamed to 'instances'!", n=10 + ) + gt_instances = [x["targets"].to(self.device) for x in batched_inputs] + else: + gt_instances = None + proposals, proposal_losses = self.proposal_generator(images, features, gt_instances) + # In training, the proposals are not useful at all but we generate them anyway. + # This makes RPN-only models about 5% slower. + if self.training: + return proposal_losses + + processed_results = [] + for results_per_image, input_per_image, image_size in zip( + proposals, batched_inputs, images.image_sizes + ): + height = input_per_image.get("height", image_size[0]) + width = input_per_image.get("width", image_size[1]) + r = detector_postprocess(results_per_image, height, width) + processed_results.append({"proposals": r}) + return processed_results diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/retinanet.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/retinanet.py new file mode 100644 index 0000000000000000000000000000000000000000..35c42cc25e93bf2841c5e1fcff389f317ed0883a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/retinanet.py @@ -0,0 +1,489 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import math +import numpy as np +from typing import List +import torch +from fvcore.nn import sigmoid_focal_loss_jit, smooth_l1_loss +from torch import nn + +from detectron2.layers import ShapeSpec, batched_nms, cat +from detectron2.structures import Boxes, ImageList, Instances, pairwise_iou +from detectron2.utils.events import get_event_storage +from detectron2.utils.logger import log_first_n + +from ..anchor_generator import build_anchor_generator +from ..backbone import build_backbone +from ..box_regression import Box2BoxTransform +from ..matcher import Matcher +from ..postprocessing import detector_postprocess +from .build import META_ARCH_REGISTRY + +__all__ = ["RetinaNet"] + + +def permute_to_N_HWA_K(tensor, K): + """ + Transpose/reshape a tensor from (N, (A x K), H, W) to (N, (HxWxA), K) + """ + assert tensor.dim() == 4, tensor.shape + N, _, H, W = tensor.shape + tensor = tensor.view(N, -1, K, H, W) + tensor = tensor.permute(0, 3, 4, 1, 2) + tensor = tensor.reshape(N, -1, K) # Size=(N,HWA,K) + return tensor + + +def permute_all_cls_and_box_to_N_HWA_K_and_concat(box_cls, box_delta, num_classes=80): + """ + Rearrange the tensor layout from the network output, i.e.: + list[Tensor]: #lvl tensors of shape (N, A x K, Hi, Wi) + to per-image predictions, i.e.: + Tensor: of shape (N x sum(Hi x Wi x A), K) + """ + # for each feature level, permute the outputs to make them be in the + # same format as the labels. Note that the labels are computed for + # all feature levels concatenated, so we keep the same representation + # for the objectness and the box_delta + box_cls_flattened = [permute_to_N_HWA_K(x, num_classes) for x in box_cls] + box_delta_flattened = [permute_to_N_HWA_K(x, 4) for x in box_delta] + # concatenate on the first dimension (representing the feature levels), to + # take into account the way the labels were generated (with all feature maps + # being concatenated as well) + box_cls = cat(box_cls_flattened, dim=1).view(-1, num_classes) + box_delta = cat(box_delta_flattened, dim=1).view(-1, 4) + return box_cls, box_delta + + +@META_ARCH_REGISTRY.register() +class RetinaNet(nn.Module): + """ + Implement RetinaNet in :paper:`RetinaNet`. + """ + + def __init__(self, cfg): + super().__init__() + + # fmt: off + self.num_classes = cfg.MODEL.RETINANET.NUM_CLASSES + self.in_features = cfg.MODEL.RETINANET.IN_FEATURES + # Loss parameters: + self.focal_loss_alpha = cfg.MODEL.RETINANET.FOCAL_LOSS_ALPHA + self.focal_loss_gamma = cfg.MODEL.RETINANET.FOCAL_LOSS_GAMMA + self.smooth_l1_loss_beta = cfg.MODEL.RETINANET.SMOOTH_L1_LOSS_BETA + # Inference parameters: + self.score_threshold = cfg.MODEL.RETINANET.SCORE_THRESH_TEST + self.topk_candidates = cfg.MODEL.RETINANET.TOPK_CANDIDATES_TEST + self.nms_threshold = cfg.MODEL.RETINANET.NMS_THRESH_TEST + self.max_detections_per_image = cfg.TEST.DETECTIONS_PER_IMAGE + # Vis parameters + self.vis_period = cfg.VIS_PERIOD + self.input_format = cfg.INPUT.FORMAT + # fmt: on + + self.backbone = build_backbone(cfg) + + backbone_shape = self.backbone.output_shape() + feature_shapes = [backbone_shape[f] for f in self.in_features] + self.head = RetinaNetHead(cfg, feature_shapes) + self.anchor_generator = build_anchor_generator(cfg, feature_shapes) + + # Matching and loss + self.box2box_transform = Box2BoxTransform(weights=cfg.MODEL.RPN.BBOX_REG_WEIGHTS) + self.matcher = Matcher( + cfg.MODEL.RETINANET.IOU_THRESHOLDS, + cfg.MODEL.RETINANET.IOU_LABELS, + allow_low_quality_matches=True, + ) + + self.register_buffer("pixel_mean", torch.Tensor(cfg.MODEL.PIXEL_MEAN).view(-1, 1, 1)) + self.register_buffer("pixel_std", torch.Tensor(cfg.MODEL.PIXEL_STD).view(-1, 1, 1)) + + """ + In Detectron1, loss is normalized by number of foreground samples in the batch. + When batch size is 1 per GPU, #foreground has a large variance and + using it lead to lower performance. Here we maintain an EMA of #foreground to + stabilize the normalizer. + """ + self.loss_normalizer = 100 # initialize with any reasonable #fg that's not too small + self.loss_normalizer_momentum = 0.9 + + @property + def device(self): + return self.pixel_mean.device + + def visualize_training(self, batched_inputs, results): + """ + A function used to visualize ground truth images and final network predictions. + It shows ground truth bounding boxes on the original image and up to 20 + predicted object bounding boxes on the original image. + + Args: + batched_inputs (list): a list that contains input to the model. + results (List[Instances]): a list of #images elements. + """ + from detectron2.utils.visualizer import Visualizer + + assert len(batched_inputs) == len( + results + ), "Cannot visualize inputs and results of different sizes" + storage = get_event_storage() + max_boxes = 20 + + image_index = 0 # only visualize a single image + img = batched_inputs[image_index]["image"].cpu().numpy() + assert img.shape[0] == 3, "Images should have 3 channels." + if self.input_format == "BGR": + img = img[::-1, :, :] + img = img.transpose(1, 2, 0) + v_gt = Visualizer(img, None) + v_gt = v_gt.overlay_instances(boxes=batched_inputs[image_index]["instances"].gt_boxes) + anno_img = v_gt.get_image() + processed_results = detector_postprocess(results[image_index], img.shape[0], img.shape[1]) + predicted_boxes = processed_results.pred_boxes.tensor.detach().cpu().numpy() + + v_pred = Visualizer(img, None) + v_pred = v_pred.overlay_instances(boxes=predicted_boxes[0:max_boxes]) + prop_img = v_pred.get_image() + vis_img = np.vstack((anno_img, prop_img)) + vis_img = vis_img.transpose(2, 0, 1) + vis_name = f"Top: GT bounding boxes; Bottom: {max_boxes} Highest Scoring Results" + storage.put_image(vis_name, vis_img) + + def forward(self, batched_inputs): + """ + Args: + batched_inputs: a list, batched outputs of :class:`DatasetMapper` . + Each item in the list contains the inputs for one image. + For now, each item in the list is a dict that contains: + + * image: Tensor, image in (C, H, W) format. + * instances: Instances + + Other information that's included in the original dicts, such as: + + * "height", "width" (int): the output resolution of the model, used in inference. + See :meth:`postprocess` for details. + Returns: + dict[str: Tensor]: + mapping from a named loss to a tensor storing the loss. Used during training only. + """ + images = self.preprocess_image(batched_inputs) + if "instances" in batched_inputs[0]: + gt_instances = [x["instances"].to(self.device) for x in batched_inputs] + elif "targets" in batched_inputs[0]: + log_first_n( + logging.WARN, "'targets' in the model inputs is now renamed to 'instances'!", n=10 + ) + gt_instances = [x["targets"].to(self.device) for x in batched_inputs] + else: + gt_instances = None + + features = self.backbone(images.tensor) + features = [features[f] for f in self.in_features] + box_cls, box_delta = self.head(features) + anchors = self.anchor_generator(features) + + if self.training: + gt_classes, gt_anchors_reg_deltas = self.get_ground_truth(anchors, gt_instances) + losses = self.losses(gt_classes, gt_anchors_reg_deltas, box_cls, box_delta) + + if self.vis_period > 0: + storage = get_event_storage() + if storage.iter % self.vis_period == 0: + results = self.inference(box_cls, box_delta, anchors, images.image_sizes) + self.visualize_training(batched_inputs, results) + + return losses + else: + results = self.inference(box_cls, box_delta, anchors, images.image_sizes) + processed_results = [] + for results_per_image, input_per_image, image_size in zip( + results, batched_inputs, images.image_sizes + ): + height = input_per_image.get("height", image_size[0]) + width = input_per_image.get("width", image_size[1]) + r = detector_postprocess(results_per_image, height, width) + processed_results.append({"instances": r}) + return processed_results + + def losses(self, gt_classes, gt_anchors_deltas, pred_class_logits, pred_anchor_deltas): + """ + Args: + For `gt_classes` and `gt_anchors_deltas` parameters, see + :meth:`RetinaNet.get_ground_truth`. + Their shapes are (N, R) and (N, R, 4), respectively, where R is + the total number of anchors across levels, i.e. sum(Hi x Wi x A) + For `pred_class_logits` and `pred_anchor_deltas`, see + :meth:`RetinaNetHead.forward`. + + Returns: + dict[str, Tensor]: + mapping from a named loss to a scalar tensor + storing the loss. Used during training only. The dict keys are: + "loss_cls" and "loss_box_reg" + """ + pred_class_logits, pred_anchor_deltas = permute_all_cls_and_box_to_N_HWA_K_and_concat( + pred_class_logits, pred_anchor_deltas, self.num_classes + ) # Shapes: (N x R, K) and (N x R, 4), respectively. + + gt_classes = gt_classes.flatten() + gt_anchors_deltas = gt_anchors_deltas.view(-1, 4) + + valid_idxs = gt_classes >= 0 + foreground_idxs = (gt_classes >= 0) & (gt_classes != self.num_classes) + num_foreground = foreground_idxs.sum().item() + get_event_storage().put_scalar("num_foreground", num_foreground) + self.loss_normalizer = ( + self.loss_normalizer_momentum * self.loss_normalizer + + (1 - self.loss_normalizer_momentum) * num_foreground + ) + + gt_classes_target = torch.zeros_like(pred_class_logits) + gt_classes_target[foreground_idxs, gt_classes[foreground_idxs]] = 1 + + # logits loss + loss_cls = sigmoid_focal_loss_jit( + pred_class_logits[valid_idxs], + gt_classes_target[valid_idxs], + alpha=self.focal_loss_alpha, + gamma=self.focal_loss_gamma, + reduction="sum", + ) / max(1, self.loss_normalizer) + + # regression loss + loss_box_reg = smooth_l1_loss( + pred_anchor_deltas[foreground_idxs], + gt_anchors_deltas[foreground_idxs], + beta=self.smooth_l1_loss_beta, + reduction="sum", + ) / max(1, self.loss_normalizer) + + return {"loss_cls": loss_cls, "loss_box_reg": loss_box_reg} + + @torch.no_grad() + def get_ground_truth(self, anchors, targets): + """ + Args: + anchors (list[Boxes]): A list of #feature level Boxes. + The Boxes contains anchors of this image on the specific feature level. + targets (list[Instances]): a list of N `Instances`s. The i-th + `Instances` contains the ground-truth per-instance annotations + for the i-th input image. Specify `targets` during training only. + + Returns: + gt_classes (Tensor): + An integer tensor of shape (N, R) storing ground-truth labels for each anchor. + R is the total number of anchors, i.e. the sum of Hi x Wi x A for all levels. + Anchors with an IoU with some target higher than the foreground threshold + are assigned their corresponding label in the [0, K-1] range. + Anchors whose IoU are below the background threshold are assigned + the label "K". Anchors whose IoU are between the foreground and background + thresholds are assigned a label "-1", i.e. ignore. + gt_anchors_deltas (Tensor): + Shape (N, R, 4). + The last dimension represents ground-truth box2box transform + targets (dx, dy, dw, dh) that map each anchor to its matched ground-truth box. + The values in the tensor are meaningful only when the corresponding + anchor is labeled as foreground. + """ + gt_classes = [] + gt_anchors_deltas = [] + anchors = Boxes.cat(anchors) # Rx4 + + for targets_per_image in targets: + match_quality_matrix = pairwise_iou(targets_per_image.gt_boxes, anchors) + gt_matched_idxs, anchor_labels = self.matcher(match_quality_matrix) + + has_gt = len(targets_per_image) > 0 + if has_gt: + # ground truth box regression + matched_gt_boxes = targets_per_image.gt_boxes[gt_matched_idxs] + gt_anchors_reg_deltas_i = self.box2box_transform.get_deltas( + anchors.tensor, matched_gt_boxes.tensor + ) + + gt_classes_i = targets_per_image.gt_classes[gt_matched_idxs] + # Anchors with label 0 are treated as background. + gt_classes_i[anchor_labels == 0] = self.num_classes + # Anchors with label -1 are ignored. + gt_classes_i[anchor_labels == -1] = -1 + else: + gt_classes_i = torch.zeros_like(gt_matched_idxs) + self.num_classes + gt_anchors_reg_deltas_i = torch.zeros_like(anchors.tensor) + + gt_classes.append(gt_classes_i) + gt_anchors_deltas.append(gt_anchors_reg_deltas_i) + + return torch.stack(gt_classes), torch.stack(gt_anchors_deltas) + + def inference(self, box_cls, box_delta, anchors, image_sizes): + """ + Arguments: + box_cls, box_delta: Same as the output of :meth:`RetinaNetHead.forward` + anchors (list[Boxes]): A list of #feature level Boxes. + The Boxes contain anchors of this image on the specific feature level. + image_sizes (List[torch.Size]): the input image sizes + + Returns: + results (List[Instances]): a list of #images elements. + """ + results = [] + + box_cls = [permute_to_N_HWA_K(x, self.num_classes) for x in box_cls] + box_delta = [permute_to_N_HWA_K(x, 4) for x in box_delta] + # list[Tensor], one per level, each has shape (N, Hi x Wi x A, K or 4) + + for img_idx, image_size in enumerate(image_sizes): + box_cls_per_image = [box_cls_per_level[img_idx] for box_cls_per_level in box_cls] + box_reg_per_image = [box_reg_per_level[img_idx] for box_reg_per_level in box_delta] + results_per_image = self.inference_single_image( + box_cls_per_image, box_reg_per_image, anchors, tuple(image_size) + ) + results.append(results_per_image) + return results + + def inference_single_image(self, box_cls, box_delta, anchors, image_size): + """ + Single-image inference. Return bounding-box detection results by thresholding + on scores and applying non-maximum suppression (NMS). + + Arguments: + box_cls (list[Tensor]): list of #feature levels. Each entry contains + tensor of size (H x W x A, K) + box_delta (list[Tensor]): Same shape as 'box_cls' except that K becomes 4. + anchors (list[Boxes]): list of #feature levels. Each entry contains + a Boxes object, which contains all the anchors for that + image in that feature level. + image_size (tuple(H, W)): a tuple of the image height and width. + + Returns: + Same as `inference`, but for only one image. + """ + boxes_all = [] + scores_all = [] + class_idxs_all = [] + + # Iterate over every feature level + for box_cls_i, box_reg_i, anchors_i in zip(box_cls, box_delta, anchors): + # (HxWxAxK,) + box_cls_i = box_cls_i.flatten().sigmoid_() + + # Keep top k top scoring indices only. + num_topk = min(self.topk_candidates, box_reg_i.size(0)) + # torch.sort is actually faster than .topk (at least on GPUs) + predicted_prob, topk_idxs = box_cls_i.sort(descending=True) + predicted_prob = predicted_prob[:num_topk] + topk_idxs = topk_idxs[:num_topk] + + # filter out the proposals with low confidence score + keep_idxs = predicted_prob > self.score_threshold + predicted_prob = predicted_prob[keep_idxs] + topk_idxs = topk_idxs[keep_idxs] + + anchor_idxs = topk_idxs // self.num_classes + classes_idxs = topk_idxs % self.num_classes + + box_reg_i = box_reg_i[anchor_idxs] + anchors_i = anchors_i[anchor_idxs] + # predict boxes + predicted_boxes = self.box2box_transform.apply_deltas(box_reg_i, anchors_i.tensor) + + boxes_all.append(predicted_boxes) + scores_all.append(predicted_prob) + class_idxs_all.append(classes_idxs) + + boxes_all, scores_all, class_idxs_all = [ + cat(x) for x in [boxes_all, scores_all, class_idxs_all] + ] + keep = batched_nms(boxes_all, scores_all, class_idxs_all, self.nms_threshold) + keep = keep[: self.max_detections_per_image] + + result = Instances(image_size) + result.pred_boxes = Boxes(boxes_all[keep]) + result.scores = scores_all[keep] + result.pred_classes = class_idxs_all[keep] + return result + + def preprocess_image(self, batched_inputs): + """ + Normalize, pad and batch the input images. + """ + images = [x["image"].to(self.device) for x in batched_inputs] + images = [(x - self.pixel_mean) / self.pixel_std for x in images] + images = ImageList.from_tensors(images, self.backbone.size_divisibility) + return images + + +class RetinaNetHead(nn.Module): + """ + The head used in RetinaNet for object classification and box regression. + It has two subnets for the two tasks, with a common structure but separate parameters. + """ + + def __init__(self, cfg, input_shape: List[ShapeSpec]): + super().__init__() + # fmt: off + in_channels = input_shape[0].channels + num_classes = cfg.MODEL.RETINANET.NUM_CLASSES + num_convs = cfg.MODEL.RETINANET.NUM_CONVS + prior_prob = cfg.MODEL.RETINANET.PRIOR_PROB + num_anchors = build_anchor_generator(cfg, input_shape).num_cell_anchors + # fmt: on + assert ( + len(set(num_anchors)) == 1 + ), "Using different number of anchors between levels is not currently supported!" + num_anchors = num_anchors[0] + + cls_subnet = [] + bbox_subnet = [] + for _ in range(num_convs): + cls_subnet.append( + nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1) + ) + cls_subnet.append(nn.ReLU()) + bbox_subnet.append( + nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1) + ) + bbox_subnet.append(nn.ReLU()) + + self.cls_subnet = nn.Sequential(*cls_subnet) + self.bbox_subnet = nn.Sequential(*bbox_subnet) + self.cls_score = nn.Conv2d( + in_channels, num_anchors * num_classes, kernel_size=3, stride=1, padding=1 + ) + self.bbox_pred = nn.Conv2d(in_channels, num_anchors * 4, kernel_size=3, stride=1, padding=1) + + # Initialization + for modules in [self.cls_subnet, self.bbox_subnet, self.cls_score, self.bbox_pred]: + for layer in modules.modules(): + if isinstance(layer, nn.Conv2d): + torch.nn.init.normal_(layer.weight, mean=0, std=0.01) + torch.nn.init.constant_(layer.bias, 0) + + # Use prior in model initialization to improve stability + bias_value = -(math.log((1 - prior_prob) / prior_prob)) + torch.nn.init.constant_(self.cls_score.bias, bias_value) + + def forward(self, features): + """ + Arguments: + features (list[Tensor]): FPN feature map tensors in high to low resolution. + Each tensor in the list correspond to different feature levels. + + Returns: + logits (list[Tensor]): #lvl tensors, each has shape (N, AxK, Hi, Wi). + The tensor predicts the classification probability + at each spatial position for each of the A anchors and K object + classes. + bbox_reg (list[Tensor]): #lvl tensors, each has shape (N, Ax4, Hi, Wi). + The tensor predicts 4-vector (dx,dy,dw,dh) box + regression values for every anchor. These values are the + relative offset between the anchor and the ground truth box. + """ + logits = [] + bbox_reg = [] + for feature in features: + logits.append(self.cls_score(self.cls_subnet(feature))) + bbox_reg.append(self.bbox_pred(self.bbox_subnet(feature))) + return logits, bbox_reg diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/semantic_seg.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/semantic_seg.py new file mode 100644 index 0000000000000000000000000000000000000000..2c41a7235cb9c578e2c6de5835854bdff7493616 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/meta_arch/semantic_seg.py @@ -0,0 +1,186 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import numpy as np +from typing import Dict +import fvcore.nn.weight_init as weight_init +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.layers import Conv2d, ShapeSpec +from detectron2.structures import ImageList +from detectron2.utils.registry import Registry + +from ..backbone import build_backbone +from ..postprocessing import sem_seg_postprocess +from .build import META_ARCH_REGISTRY + +__all__ = ["SemanticSegmentor", "SEM_SEG_HEADS_REGISTRY", "SemSegFPNHead", "build_sem_seg_head"] + + +SEM_SEG_HEADS_REGISTRY = Registry("SEM_SEG_HEADS") +SEM_SEG_HEADS_REGISTRY.__doc__ = """ +Registry for semantic segmentation heads, which make semantic segmentation predictions +from feature maps. +""" + + +@META_ARCH_REGISTRY.register() +class SemanticSegmentor(nn.Module): + """ + Main class for semantic segmentation architectures. + """ + + def __init__(self, cfg): + super().__init__() + self.backbone = build_backbone(cfg) + self.sem_seg_head = build_sem_seg_head(cfg, self.backbone.output_shape()) + self.register_buffer("pixel_mean", torch.Tensor(cfg.MODEL.PIXEL_MEAN).view(-1, 1, 1)) + self.register_buffer("pixel_std", torch.Tensor(cfg.MODEL.PIXEL_STD).view(-1, 1, 1)) + + @property + def device(self): + return self.pixel_mean.device + + def forward(self, batched_inputs): + """ + Args: + batched_inputs: a list, batched outputs of :class:`DatasetMapper`. + Each item in the list contains the inputs for one image. + + For now, each item in the list is a dict that contains: + + * "image": Tensor, image in (C, H, W) format. + * "sem_seg": semantic segmentation ground truth + * Other information that's included in the original dicts, such as: + "height", "width" (int): the output resolution of the model, used in inference. + See :meth:`postprocess` for details. + + Returns: + list[dict]: + Each dict is the output for one input image. + The dict contains one key "sem_seg" whose value is a + Tensor that represents the + per-pixel segmentation prediced by the head. + The prediction has shape KxHxW that represents the logits of + each class for each pixel. + """ + images = [x["image"].to(self.device) for x in batched_inputs] + images = [(x - self.pixel_mean) / self.pixel_std for x in images] + images = ImageList.from_tensors(images, self.backbone.size_divisibility) + + features = self.backbone(images.tensor) + + if "sem_seg" in batched_inputs[0]: + targets = [x["sem_seg"].to(self.device) for x in batched_inputs] + targets = ImageList.from_tensors( + targets, self.backbone.size_divisibility, self.sem_seg_head.ignore_value + ).tensor + else: + targets = None + results, losses = self.sem_seg_head(features, targets) + + if self.training: + return losses + + processed_results = [] + for result, input_per_image, image_size in zip(results, batched_inputs, images.image_sizes): + height = input_per_image.get("height") + width = input_per_image.get("width") + r = sem_seg_postprocess(result, image_size, height, width) + processed_results.append({"sem_seg": r}) + return processed_results + + +def build_sem_seg_head(cfg, input_shape): + """ + Build a semantic segmentation head from `cfg.MODEL.SEM_SEG_HEAD.NAME`. + """ + name = cfg.MODEL.SEM_SEG_HEAD.NAME + return SEM_SEG_HEADS_REGISTRY.get(name)(cfg, input_shape) + + +@SEM_SEG_HEADS_REGISTRY.register() +class SemSegFPNHead(nn.Module): + """ + A semantic segmentation head described in :paper:`PanopticFPN`. + It takes FPN features as input and merges information from all + levels of the FPN into single output. + """ + + def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]): + super().__init__() + + # fmt: off + self.in_features = cfg.MODEL.SEM_SEG_HEAD.IN_FEATURES + feature_strides = {k: v.stride for k, v in input_shape.items()} + feature_channels = {k: v.channels for k, v in input_shape.items()} + self.ignore_value = cfg.MODEL.SEM_SEG_HEAD.IGNORE_VALUE + num_classes = cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES + conv_dims = cfg.MODEL.SEM_SEG_HEAD.CONVS_DIM + self.common_stride = cfg.MODEL.SEM_SEG_HEAD.COMMON_STRIDE + norm = cfg.MODEL.SEM_SEG_HEAD.NORM + self.loss_weight = cfg.MODEL.SEM_SEG_HEAD.LOSS_WEIGHT + # fmt: on + + self.scale_heads = [] + for in_feature in self.in_features: + head_ops = [] + head_length = max( + 1, int(np.log2(feature_strides[in_feature]) - np.log2(self.common_stride)) + ) + for k in range(head_length): + norm_module = nn.GroupNorm(32, conv_dims) if norm == "GN" else None + conv = Conv2d( + feature_channels[in_feature] if k == 0 else conv_dims, + conv_dims, + kernel_size=3, + stride=1, + padding=1, + bias=not norm, + norm=norm_module, + activation=F.relu, + ) + weight_init.c2_msra_fill(conv) + head_ops.append(conv) + if feature_strides[in_feature] != self.common_stride: + head_ops.append( + nn.Upsample(scale_factor=2, mode="bilinear", align_corners=False) + ) + self.scale_heads.append(nn.Sequential(*head_ops)) + self.add_module(in_feature, self.scale_heads[-1]) + self.predictor = Conv2d(conv_dims, num_classes, kernel_size=1, stride=1, padding=0) + weight_init.c2_msra_fill(self.predictor) + + def forward(self, features, targets=None): + """ + Returns: + In training, returns (None, dict of losses) + In inference, returns (CxHxW logits, {}) + """ + x = self.layers(features) + if self.training: + return None, self.losses(x, targets) + else: + x = F.interpolate( + x, scale_factor=self.common_stride, mode="bilinear", align_corners=False + ) + return x, {} + + def layers(self, features): + for i, f in enumerate(self.in_features): + if i == 0: + x = self.scale_heads[i](features[f]) + else: + x = x + self.scale_heads[i](features[f]) + x = self.predictor(x) + return x + + def losses(self, predictions, targets): + predictions = F.interpolate( + predictions, scale_factor=self.common_stride, mode="bilinear", align_corners=False + ) + loss = F.cross_entropy( + predictions, targets, reduction="mean", ignore_index=self.ignore_value + ) + losses = {"loss_sem_seg": loss * self.loss_weight} + return losses diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/poolers.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/poolers.py new file mode 100644 index 0000000000000000000000000000000000000000..678f5afc5680e6bdc9931f0449e2ab334a3a5369 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/poolers.py @@ -0,0 +1,231 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +import math +import sys +import torch +from torch import nn +from torchvision.ops import RoIPool + +from detectron2.layers import ROIAlign, ROIAlignRotated, cat + +__all__ = ["ROIPooler"] + + +def assign_boxes_to_levels(box_lists, min_level, max_level, canonical_box_size, canonical_level): + """ + Map each box in `box_lists` to a feature map level index and return the assignment + vector. + + Args: + box_lists (list[Boxes] | list[RotatedBoxes]): A list of N Boxes or N RotatedBoxes, + where N is the number of images in the batch. + min_level (int): Smallest feature map level index. The input is considered index 0, + the output of stage 1 is index 1, and so. + max_level (int): Largest feature map level index. + canonical_box_size (int): A canonical box size in pixels (sqrt(box area)). + canonical_level (int): The feature map level index on which a canonically-sized box + should be placed. + + Returns: + A tensor of length M, where M is the total number of boxes aggregated over all + N batch images. The memory layout corresponds to the concatenation of boxes + from all images. Each element is the feature map index, as an offset from + `self.min_level`, for the corresponding box (so value i means the box is at + `self.min_level + i`). + """ + eps = sys.float_info.epsilon + box_sizes = torch.sqrt(cat([boxes.area() for boxes in box_lists])) + # Eqn.(1) in FPN paper + level_assignments = torch.floor( + canonical_level + torch.log2(box_sizes / canonical_box_size + eps) + ) + # clamp level to (min, max), in case the box size is too large or too small + # for the available feature maps + level_assignments = torch.clamp(level_assignments, min=min_level, max=max_level) + return level_assignments.to(torch.int64) - min_level + + +def convert_boxes_to_pooler_format(box_lists): + """ + Convert all boxes in `box_lists` to the low-level format used by ROI pooling ops + (see description under Returns). + + Args: + box_lists (list[Boxes] | list[RotatedBoxes]): + A list of N Boxes or N RotatedBoxes, where N is the number of images in the batch. + + Returns: + When input is list[Boxes]: + A tensor of shape (M, 5), where M is the total number of boxes aggregated over all + N batch images. + The 5 columns are (batch index, x0, y0, x1, y1), where batch index + is the index in [0, N) identifying which batch image the box with corners at + (x0, y0, x1, y1) comes from. + When input is list[RotatedBoxes]: + A tensor of shape (M, 6), where M is the total number of boxes aggregated over all + N batch images. + The 6 columns are (batch index, x_ctr, y_ctr, width, height, angle_degrees), + where batch index is the index in [0, N) identifying which batch image the + rotated box (x_ctr, y_ctr, width, height, angle_degrees) comes from. + """ + + def fmt_box_list(box_tensor, batch_index): + repeated_index = torch.full( + (len(box_tensor), 1), batch_index, dtype=box_tensor.dtype, device=box_tensor.device + ) + return cat((repeated_index, box_tensor), dim=1) + + pooler_fmt_boxes = cat( + [fmt_box_list(box_list.tensor, i) for i, box_list in enumerate(box_lists)], dim=0 + ) + + return pooler_fmt_boxes + + +class ROIPooler(nn.Module): + """ + Region of interest feature map pooler that supports pooling from one or more + feature maps. + """ + + def __init__( + self, + output_size, + scales, + sampling_ratio, + pooler_type, + canonical_box_size=224, + canonical_level=4, + ): + """ + Args: + output_size (int, tuple[int] or list[int]): output size of the pooled region, + e.g., 14 x 14. If tuple or list is given, the length must be 2. + scales (list[float]): The scale for each low-level pooling op relative to + the input image. For a feature map with stride s relative to the input + image, scale is defined as a 1 / s. The stride must be power of 2. + When there are multiple scales, they must form a pyramid, i.e. they must be + a monotically decreasing geometric sequence with a factor of 1/2. + sampling_ratio (int): The `sampling_ratio` parameter for the ROIAlign op. + pooler_type (string): Name of the type of pooling operation that should be applied. + For instance, "ROIPool" or "ROIAlignV2". + canonical_box_size (int): A canonical box size in pixels (sqrt(box area)). The default + is heuristically defined as 224 pixels in the FPN paper (based on ImageNet + pre-training). + canonical_level (int): The feature map level index from which a canonically-sized box + should be placed. The default is defined as level 4 (stride=16) in the FPN paper, + i.e., a box of size 224x224 will be placed on the feature with stride=16. + The box placement for all boxes will be determined from their sizes w.r.t + canonical_box_size. For example, a box whose area is 4x that of a canonical box + should be used to pool features from feature level ``canonical_level+1``. + + Note that the actual input feature maps given to this module may not have + sufficiently many levels for the input boxes. If the boxes are too large or too + small for the input feature maps, the closest level will be used. + """ + super().__init__() + + if isinstance(output_size, int): + output_size = (output_size, output_size) + assert len(output_size) == 2 + assert isinstance(output_size[0], int) and isinstance(output_size[1], int) + self.output_size = output_size + + if pooler_type == "ROIAlign": + self.level_poolers = nn.ModuleList( + ROIAlign( + output_size, spatial_scale=scale, sampling_ratio=sampling_ratio, aligned=False + ) + for scale in scales + ) + elif pooler_type == "ROIAlignV2": + self.level_poolers = nn.ModuleList( + ROIAlign( + output_size, spatial_scale=scale, sampling_ratio=sampling_ratio, aligned=True + ) + for scale in scales + ) + elif pooler_type == "ROIPool": + self.level_poolers = nn.ModuleList( + RoIPool(output_size, spatial_scale=scale) for scale in scales + ) + elif pooler_type == "ROIAlignRotated": + self.level_poolers = nn.ModuleList( + ROIAlignRotated(output_size, spatial_scale=scale, sampling_ratio=sampling_ratio) + for scale in scales + ) + else: + raise ValueError("Unknown pooler type: {}".format(pooler_type)) + + # Map scale (defined as 1 / stride) to its feature map level under the + # assumption that stride is a power of 2. + min_level = -(math.log2(scales[0])) + max_level = -(math.log2(scales[-1])) + assert math.isclose(min_level, int(min_level)) and math.isclose( + max_level, int(max_level) + ), "Featuremap stride is not power of 2!" + self.min_level = int(min_level) + self.max_level = int(max_level) + assert ( + len(scales) == self.max_level - self.min_level + 1 + ), "[ROIPooler] Sizes of input featuremaps do not form a pyramid!" + assert 0 < self.min_level and self.min_level <= self.max_level + self.canonical_level = canonical_level + assert canonical_box_size > 0 + self.canonical_box_size = canonical_box_size + + def forward(self, x, box_lists): + """ + Args: + x (list[Tensor]): A list of feature maps of NCHW shape, with scales matching those + used to construct this module. + box_lists (list[Boxes] | list[RotatedBoxes]): + A list of N Boxes or N RotatedBoxes, where N is the number of images in the batch. + The box coordinates are defined on the original image and + will be scaled by the `scales` argument of :class:`ROIPooler`. + + Returns: + Tensor: + A tensor of shape (M, C, output_size, output_size) where M is the total number of + boxes aggregated over all N batch images and C is the number of channels in `x`. + """ + num_level_assignments = len(self.level_poolers) + + assert isinstance(x, list) and isinstance( + box_lists, list + ), "Arguments to pooler must be lists" + assert ( + len(x) == num_level_assignments + ), "unequal value, num_level_assignments={}, but x is list of {} Tensors".format( + num_level_assignments, len(x) + ) + + assert len(box_lists) == x[0].size( + 0 + ), "unequal value, x[0] batch dim 0 is {}, but box_list has length {}".format( + x[0].size(0), len(box_lists) + ) + + pooler_fmt_boxes = convert_boxes_to_pooler_format(box_lists) + + if num_level_assignments == 1: + return self.level_poolers[0](x[0], pooler_fmt_boxes) + + level_assignments = assign_boxes_to_levels( + box_lists, self.min_level, self.max_level, self.canonical_box_size, self.canonical_level + ) + + num_boxes = len(pooler_fmt_boxes) + num_channels = x[0].shape[1] + output_size = self.output_size[0] + + dtype, device = x[0].dtype, x[0].device + output = torch.zeros( + (num_boxes, num_channels, output_size, output_size), dtype=dtype, device=device + ) + + for level, (x_level, pooler) in enumerate(zip(x, self.level_poolers)): + inds = torch.nonzero(level_assignments == level, as_tuple=True)[0] + pooler_fmt_boxes_level = pooler_fmt_boxes[inds] + output[inds] = pooler(x_level, pooler_fmt_boxes_level) + + return output diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/postprocessing.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/postprocessing.py new file mode 100644 index 0000000000000000000000000000000000000000..e85541ff2e25568cdb9c73702f6c9e68a23f6e4c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/postprocessing.py @@ -0,0 +1,79 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from torch.nn import functional as F + +from detectron2.layers import paste_masks_in_image +from detectron2.structures import Instances +from detectron2.utils.memory import retry_if_cuda_oom + + +def detector_postprocess(results, output_height, output_width, mask_threshold=0.5): + """ + Resize the output instances. + The input images are often resized when entering an object detector. + As a result, we often need the outputs of the detector in a different + resolution from its inputs. + + This function will resize the raw outputs of an R-CNN detector + to produce outputs according to the desired output resolution. + + Args: + results (Instances): the raw outputs from the detector. + `results.image_size` contains the input image resolution the detector sees. + This object might be modified in-place. + output_height, output_width: the desired output resolution. + + Returns: + Instances: the resized output from the model, based on the output resolution + """ + scale_x, scale_y = (output_width / results.image_size[1], output_height / results.image_size[0]) + results = Instances((output_height, output_width), **results.get_fields()) + + if results.has("pred_boxes"): + output_boxes = results.pred_boxes + elif results.has("proposal_boxes"): + output_boxes = results.proposal_boxes + + output_boxes.scale(scale_x, scale_y) + output_boxes.clip(results.image_size) + + results = results[output_boxes.nonempty()] + + if results.has("pred_masks"): + results.pred_masks = retry_if_cuda_oom(paste_masks_in_image)( + results.pred_masks[:, 0, :, :], # N, 1, M, M + results.pred_boxes, + results.image_size, + threshold=mask_threshold, + ) + + if results.has("pred_keypoints"): + results.pred_keypoints[:, :, 0] *= scale_x + results.pred_keypoints[:, :, 1] *= scale_y + + return results + + +def sem_seg_postprocess(result, img_size, output_height, output_width): + """ + Return semantic segmentation predictions in the original resolution. + + The input images are often resized when entering semantic segmentor. Moreover, in same + cases, they also padded inside segmentor to be divisible by maximum network stride. + As a result, we often need the predictions of the segmentor in a different + resolution from its inputs. + + Args: + result (Tensor): semantic segmentation prediction logits. A tensor of shape (C, H, W), + where C is the number of classes, and H, W are the height and width of the prediction. + img_size (tuple): image size that segmentor is taking as input. + output_height, output_width: the desired output resolution. + + Returns: + semantic segmentation prediction (Tensor): A tensor of the shape + (C, output_height, output_width) that contains per-pixel soft predictions. + """ + result = result[:, : img_size[0], : img_size[1]].expand(1, -1, -1, -1) + result = F.interpolate( + result, size=(output_height, output_width), mode="bilinear", align_corners=False + )[0] + return result diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..64fb6d46359c05ed3d7aa1ec91fdd6e15b14c932 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .build import PROPOSAL_GENERATOR_REGISTRY, build_proposal_generator +from .rpn import RPN_HEAD_REGISTRY, build_rpn_head, RPN diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/build.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/build.py new file mode 100644 index 0000000000000000000000000000000000000000..7f252bcb982032cd09270c44741772a34ef32277 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/build.py @@ -0,0 +1,24 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from detectron2.utils.registry import Registry + +PROPOSAL_GENERATOR_REGISTRY = Registry("PROPOSAL_GENERATOR") +PROPOSAL_GENERATOR_REGISTRY.__doc__ = """ +Registry for proposal generator, which produces object proposals from feature maps. + +The registered object will be called with `obj(cfg, input_shape)`. +The call should return a `nn.Module` object. +""" + +from . import rpn, rrpn # noqa F401 isort:skip + + +def build_proposal_generator(cfg, input_shape): + """ + Build a proposal generator from `cfg.MODEL.PROPOSAL_GENERATOR.NAME`. + The name can be "PrecomputedProposals" to use no proposal generator. + """ + name = cfg.MODEL.PROPOSAL_GENERATOR.NAME + if name == "PrecomputedProposals": + return None + + return PROPOSAL_GENERATOR_REGISTRY.get(name)(cfg, input_shape) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/proposal_utils.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/proposal_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d4af90525ba07eb8d313460ee2c3f468fe367cff --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/proposal_utils.py @@ -0,0 +1,57 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import math +import torch + +from detectron2.structures import Instances + + +def add_ground_truth_to_proposals(gt_boxes, proposals): + """ + Call `add_ground_truth_to_proposals_single_image` for all images. + + Args: + gt_boxes(list[Boxes]): list of N elements. Element i is a Boxes + representing the gound-truth for image i. + proposals (list[Instances]): list of N elements. Element i is a Instances + representing the proposals for image i. + + Returns: + list[Instances]: list of N Instances. Each is the proposals for the image, + with field "proposal_boxes" and "objectness_logits". + """ + assert gt_boxes is not None + + assert len(proposals) == len(gt_boxes) + if len(proposals) == 0: + return proposals + + return [ + add_ground_truth_to_proposals_single_image(gt_boxes_i, proposals_i) + for gt_boxes_i, proposals_i in zip(gt_boxes, proposals) + ] + + +def add_ground_truth_to_proposals_single_image(gt_boxes, proposals): + """ + Augment `proposals` with ground-truth boxes from `gt_boxes`. + + Args: + Same as `add_ground_truth_to_proposals`, but with gt_boxes and proposals + per image. + + Returns: + Same as `add_ground_truth_to_proposals`, but for only one image. + """ + device = proposals.objectness_logits.device + # Concatenating gt_boxes with proposals requires them to have the same fields + # Assign all ground-truth boxes an objectness logit corresponding to P(object) \approx 1. + gt_logit_value = math.log((1.0 - 1e-10) / (1 - (1.0 - 1e-10))) + + gt_logits = gt_logit_value * torch.ones(len(gt_boxes), device=device) + gt_proposal = Instances(proposals.image_size) + + gt_proposal.proposal_boxes = gt_boxes + gt_proposal.objectness_logits = gt_logits + new_proposals = Instances.cat([proposals, gt_proposal]) + + return new_proposals diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/rpn.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/rpn.py new file mode 100644 index 0000000000000000000000000000000000000000..8eb93b8e6ecf9f14d5b8de5a7e1d2b1560bcacfd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/rpn.py @@ -0,0 +1,285 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from typing import Dict, List +import torch +import torch.nn.functional as F +from torch import nn + +from detectron2.config import configurable +from detectron2.layers import ShapeSpec +from detectron2.structures import Boxes, Instances, pairwise_iou +from detectron2.utils.memory import retry_if_cuda_oom +from detectron2.utils.registry import Registry + +from ..anchor_generator import build_anchor_generator +from ..box_regression import Box2BoxTransform +from ..matcher import Matcher +from ..sampling import subsample_labels +from .build import PROPOSAL_GENERATOR_REGISTRY +from .rpn_outputs import RPNOutputs, find_top_rpn_proposals + +RPN_HEAD_REGISTRY = Registry("RPN_HEAD") +RPN_HEAD_REGISTRY.__doc__ = """ +Registry for RPN heads, which take feature maps and perform +objectness classification and bounding box regression for anchors. + +The registered object will be called with `obj(cfg, input_shape)`. +The call should return a `nn.Module` object. +""" + + +def build_rpn_head(cfg, input_shape): + """ + Build an RPN head defined by `cfg.MODEL.RPN.HEAD_NAME`. + """ + name = cfg.MODEL.RPN.HEAD_NAME + return RPN_HEAD_REGISTRY.get(name)(cfg, input_shape) + + +@RPN_HEAD_REGISTRY.register() +class StandardRPNHead(nn.Module): + """ + Standard RPN classification and regression heads described in :paper:`Faster R-CNN`. + Uses a 3x3 conv to produce a shared hidden state from which one 1x1 conv predicts + objectness logits for each anchor and a second 1x1 conv predicts bounding-box deltas + specifying how to deform each anchor into an object proposal. + """ + + @configurable + def __init__(self, *, in_channels: int, num_anchors: int, box_dim: int = 4): + """ + NOTE: this interface is experimental. + + Args: + in_channels (int): number of input feature channels. When using multiple + input features, they must have the same number of channels. + num_anchors (int): number of anchors to predict for *each spatial position* + on the feature map. The total number of anchors for each + feature map will be `num_anchors * H * W`. + box_dim (int): dimension of a box, which is also the number of box regression + predictions to make for each anchor. An axis aligned box has + box_dim=4, while a rotated box has box_dim=5. + """ + super().__init__() + # 3x3 conv for the hidden representation + self.conv = nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1) + # 1x1 conv for predicting objectness logits + self.objectness_logits = nn.Conv2d(in_channels, num_anchors, kernel_size=1, stride=1) + # 1x1 conv for predicting box2box transform deltas + self.anchor_deltas = nn.Conv2d(in_channels, num_anchors * box_dim, kernel_size=1, stride=1) + + for l in [self.conv, self.objectness_logits, self.anchor_deltas]: + nn.init.normal_(l.weight, std=0.01) + nn.init.constant_(l.bias, 0) + + @classmethod + def from_config(cls, cfg, input_shape): + # Standard RPN is shared across levels: + in_channels = [s.channels for s in input_shape] + assert len(set(in_channels)) == 1, "Each level must have the same channel!" + in_channels = in_channels[0] + + # RPNHead should take the same input as anchor generator + # NOTE: it assumes that creating an anchor generator does not have unwanted side effect. + anchor_generator = build_anchor_generator(cfg, input_shape) + num_anchors = anchor_generator.num_anchors + box_dim = anchor_generator.box_dim + assert ( + len(set(num_anchors)) == 1 + ), "Each level must have the same number of anchors per spatial position" + return {"in_channels": in_channels, "num_anchors": num_anchors[0], "box_dim": box_dim} + + def forward(self, features): + """ + Args: + features (list[Tensor]): list of feature maps + + Returns: + list[Tensor]: A list of L elements. + Element i is a tensor of shape (N, A, Hi, Wi) representing + the predicted objectness logits for all anchors. A is the number of cell anchors. + list[Tensor]: A list of L elements. Element i is a tensor of shape + (N, A*box_dim, Hi, Wi) representing the predicted "deltas" used to transform anchors + to proposals. + """ + pred_objectness_logits = [] + pred_anchor_deltas = [] + for x in features: + t = F.relu(self.conv(x)) + pred_objectness_logits.append(self.objectness_logits(t)) + pred_anchor_deltas.append(self.anchor_deltas(t)) + return pred_objectness_logits, pred_anchor_deltas + + +@PROPOSAL_GENERATOR_REGISTRY.register() +class RPN(nn.Module): + """ + Region Proposal Network, introduced by :paper:`Faster R-CNN`. + """ + + def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]): + super().__init__() + + # fmt: off + self.min_box_side_len = cfg.MODEL.PROPOSAL_GENERATOR.MIN_SIZE + self.in_features = cfg.MODEL.RPN.IN_FEATURES + self.nms_thresh = cfg.MODEL.RPN.NMS_THRESH + self.batch_size_per_image = cfg.MODEL.RPN.BATCH_SIZE_PER_IMAGE + self.positive_fraction = cfg.MODEL.RPN.POSITIVE_FRACTION + self.smooth_l1_beta = cfg.MODEL.RPN.SMOOTH_L1_BETA + self.loss_weight = cfg.MODEL.RPN.LOSS_WEIGHT + # fmt: on + + # Map from self.training state to train/test settings + self.pre_nms_topk = { + True: cfg.MODEL.RPN.PRE_NMS_TOPK_TRAIN, + False: cfg.MODEL.RPN.PRE_NMS_TOPK_TEST, + } + self.post_nms_topk = { + True: cfg.MODEL.RPN.POST_NMS_TOPK_TRAIN, + False: cfg.MODEL.RPN.POST_NMS_TOPK_TEST, + } + self.boundary_threshold = cfg.MODEL.RPN.BOUNDARY_THRESH + + self.anchor_generator = build_anchor_generator( + cfg, [input_shape[f] for f in self.in_features] + ) + self.box2box_transform = Box2BoxTransform(weights=cfg.MODEL.RPN.BBOX_REG_WEIGHTS) + self.anchor_matcher = Matcher( + cfg.MODEL.RPN.IOU_THRESHOLDS, cfg.MODEL.RPN.IOU_LABELS, allow_low_quality_matches=True + ) + self.rpn_head = build_rpn_head(cfg, [input_shape[f] for f in self.in_features]) + + def _subsample_labels(self, label): + """ + Randomly sample a subset of positive and negative examples, and overwrite + the label vector to the ignore value (-1) for all elements that are not + included in the sample. + + Args: + labels (Tensor): a vector of -1, 0, 1. Will be modified in-place and returned. + """ + pos_idx, neg_idx = subsample_labels( + label, self.batch_size_per_image, self.positive_fraction, 0 + ) + # Fill with the ignore label (-1), then set positive and negative labels + label.fill_(-1) + label.scatter_(0, pos_idx, 1) + label.scatter_(0, neg_idx, 0) + return label + + @torch.no_grad() + def label_and_sample_anchors(self, anchors: List[Boxes], gt_instances: List[Instances]): + """ + Args: + anchors (list[Boxes]): anchors for each feature map. + gt_instances: the ground-truth instances for each image. + + Returns: + list[Tensor]: + List of #demo tensors. i-th element is a vector of labels whose length is + the total number of anchors across feature maps. Label values are in {-1, 0, 1}, + with meanings: -1 = ignore; 0 = negative class; 1 = positive class. + list[Tensor]: + i-th element is a Nx4 tensor, where N is the total number of anchors across + feature maps. The values are the matched gt boxes for each anchor. + Values are undefined for those anchors not labeled as 1. + """ + anchors = Boxes.cat(anchors) + + gt_boxes = [x.gt_boxes for x in gt_instances] + image_sizes = [x.image_size for x in gt_instances] + del gt_instances + + gt_labels = [] + matched_gt_boxes = [] + for image_size_i, gt_boxes_i in zip(image_sizes, gt_boxes): + """ + image_size_i: (h, w) for the i-th image + gt_boxes_i: ground-truth boxes for i-th image + """ + + match_quality_matrix = retry_if_cuda_oom(pairwise_iou)(gt_boxes_i, anchors) + matched_idxs, gt_labels_i = retry_if_cuda_oom(self.anchor_matcher)(match_quality_matrix) + # Matching is memory-expensive and may result in CPU tensors. But the result is small + gt_labels_i = gt_labels_i.to(device=gt_boxes_i.device) + del match_quality_matrix + + if self.boundary_threshold >= 0: + # Discard anchors that go out of the boundaries of the image + # NOTE: This is legacy functionality that is turned off by default in Detectron2 + anchors_inside_image = anchors.inside_box(image_size_i, self.boundary_threshold) + gt_labels_i[~anchors_inside_image] = -1 + + # A vector of labels (-1, 0, 1) for each anchor + gt_labels_i = self._subsample_labels(gt_labels_i) + + if len(gt_boxes_i) == 0: + # These values won't be used anyway since the anchor is labeled as background + matched_gt_boxes_i = torch.zeros_like(anchors.tensor) + else: + # TODO wasted indexing computation for ignored boxes + matched_gt_boxes_i = gt_boxes_i[matched_idxs].tensor + + gt_labels.append(gt_labels_i) # N,AHW + matched_gt_boxes.append(matched_gt_boxes_i) + return gt_labels, matched_gt_boxes + + def forward(self, images, features, gt_instances=None): + """ + Args: + images (ImageList): input images of length `N` + features (dict[str: Tensor]): input data as a mapping from feature + map name to tensor. Axis 0 represents the number of images `N` in + the input data; axes 1-3 are channels, height, and width, which may + vary between feature maps (e.g., if a feature pyramid is used). + gt_instances (list[Instances], optional): a length `N` list of `Instances`s. + Each `Instances` stores ground-truth instances for the corresponding image. + + Returns: + proposals: list[Instances]: contains fields "proposal_boxes", "objectness_logits" + loss: dict[Tensor] or None + """ + features = [features[f] for f in self.in_features] + pred_objectness_logits, pred_anchor_deltas = self.rpn_head(features) + anchors = self.anchor_generator(features) + + if self.training: + gt_labels, gt_boxes = self.label_and_sample_anchors(anchors, gt_instances) + else: + gt_labels, gt_boxes = None, None + + outputs = RPNOutputs( + self.box2box_transform, + self.batch_size_per_image, + images, + pred_objectness_logits, + pred_anchor_deltas, + anchors, + gt_labels, + gt_boxes, + self.smooth_l1_beta, + ) + + if self.training: + losses = {k: v * self.loss_weight for k, v in outputs.losses().items()} + else: + losses = {} + + with torch.no_grad(): + # Find the top proposals by applying NMS and removing boxes that + # are too small. The proposals are treated as fixed for approximate + # joint training with roi heads. This approach ignores the derivative + # w.r.t. the proposal boxes’ coordinates that are also network + # responses, so is approximate. + proposals = find_top_rpn_proposals( + outputs.predict_proposals(), + outputs.predict_objectness_logits(), + images, + self.nms_thresh, + self.pre_nms_topk[self.training], + self.post_nms_topk[self.training], + self.min_box_side_len, + self.training, + ) + + return proposals, losses diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/rpn_outputs.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/rpn_outputs.py new file mode 100644 index 0000000000000000000000000000000000000000..44f846f18b30d846d1d87faf7f2aa3b10c2333b8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/rpn_outputs.py @@ -0,0 +1,323 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import itertools +import logging +import torch +import torch.nn.functional as F +from fvcore.nn import smooth_l1_loss + +from detectron2.layers import batched_nms, cat +from detectron2.structures import Boxes, Instances +from detectron2.utils.events import get_event_storage + +logger = logging.getLogger(__name__) + +# TODO: comments for future refactoring of this module +# +# From @rbg: +# This code involves a significant amount of tensor reshaping and permuting. Look for +# ways to simplify this. + +""" +Shape shorthand in this module: + + N: number of images in the minibatch + L: number of feature maps per image on which RPN is run + A: number of cell anchors (must be the same for all feature maps) + Hi, Wi: height and width of the i-th feature map + 4: size of the box parameterization + +Naming convention: + + objectness: refers to the binary classification of an anchor as object vs. not + object. + + deltas: refers to the 4-d (dx, dy, dw, dh) deltas that parameterize the box2box + transform (see :class:`box_regression.Box2BoxTransform`). + + pred_objectness_logits: predicted objectness scores in [-inf, +inf]; use + sigmoid(pred_objectness_logits) to estimate P(object). + + gt_labels: ground-truth binary classification labels for objectness + + pred_anchor_deltas: predicted box2box transform deltas + + gt_anchor_deltas: ground-truth box2box transform deltas +""" + + +def find_top_rpn_proposals( + proposals, + pred_objectness_logits, + images, + nms_thresh, + pre_nms_topk, + post_nms_topk, + min_box_side_len, + training, +): + """ + For each feature map, select the `pre_nms_topk` highest scoring proposals, + apply NMS, clip proposals, and remove small boxes. Return the `post_nms_topk` + highest scoring proposals among all the feature maps if `training` is True, + otherwise, returns the highest `post_nms_topk` scoring proposals for each + feature map. + + Args: + proposals (list[Tensor]): A list of L tensors. Tensor i has shape (N, Hi*Wi*A, 4). + All proposal predictions on the feature maps. + pred_objectness_logits (list[Tensor]): A list of L tensors. Tensor i has shape (N, Hi*Wi*A). + images (ImageList): Input images as an :class:`ImageList`. + nms_thresh (float): IoU threshold to use for NMS + pre_nms_topk (int): number of top k scoring proposals to keep before applying NMS. + When RPN is run on multiple feature maps (as in FPN) this number is per + feature map. + post_nms_topk (int): number of top k scoring proposals to keep after applying NMS. + When RPN is run on multiple feature maps (as in FPN) this number is total, + over all feature maps. + min_box_side_len (float): minimum proposal box side length in pixels (absolute units + wrt input images). + training (bool): True if proposals are to be used in training, otherwise False. + This arg exists only to support a legacy bug; look for the "NB: Legacy bug ..." + comment. + + Returns: + proposals (list[Instances]): list of N Instances. The i-th Instances + stores post_nms_topk object proposals for image i, sorted by their + objectness score in descending order. + """ + image_sizes = images.image_sizes # in (h, w) order + num_images = len(image_sizes) + device = proposals[0].device + + # 1. Select top-k anchor for every level and every image + topk_scores = [] # #lvl Tensor, each of shape N x topk + topk_proposals = [] + level_ids = [] # #lvl Tensor, each of shape (topk,) + batch_idx = torch.arange(num_images, device=device) + for level_id, proposals_i, logits_i in zip( + itertools.count(), proposals, pred_objectness_logits + ): + Hi_Wi_A = logits_i.shape[1] + num_proposals_i = min(pre_nms_topk, Hi_Wi_A) + + # sort is faster than topk (https://github.com/pytorch/pytorch/issues/22812) + # topk_scores_i, topk_idx = logits_i.topk(num_proposals_i, dim=1) + logits_i, idx = logits_i.sort(descending=True, dim=1) + topk_scores_i = logits_i[batch_idx, :num_proposals_i] + topk_idx = idx[batch_idx, :num_proposals_i] + + # each is N x topk + topk_proposals_i = proposals_i[batch_idx[:, None], topk_idx] # N x topk x 4 + + topk_proposals.append(topk_proposals_i) + topk_scores.append(topk_scores_i) + level_ids.append(torch.full((num_proposals_i,), level_id, dtype=torch.int64, device=device)) + + # 2. Concat all levels together + topk_scores = cat(topk_scores, dim=1) + topk_proposals = cat(topk_proposals, dim=1) + level_ids = cat(level_ids, dim=0) + + # 3. For each image, run a per-level NMS, and choose topk results. + results = [] + for n, image_size in enumerate(image_sizes): + boxes = Boxes(topk_proposals[n]) + scores_per_img = topk_scores[n] + lvl = level_ids + + valid_mask = torch.isfinite(boxes.tensor).all(dim=1) & torch.isfinite(scores_per_img) + if not valid_mask.all(): + if training: + raise FloatingPointError( + "Predicted boxes or scores contain Inf/NaN. Training has diverged." + ) + boxes = boxes[valid_mask] + scores_per_img = scores_per_img[valid_mask] + lvl = lvl[valid_mask] + boxes.clip(image_size) + + # filter empty boxes + keep = boxes.nonempty(threshold=min_box_side_len) + if keep.sum().item() != len(boxes): + boxes, scores_per_img, lvl = boxes[keep], scores_per_img[keep], lvl[keep] + + keep = batched_nms(boxes.tensor, scores_per_img, lvl, nms_thresh) + # In Detectron1, there was different behavior during training vs. testing. + # (https://github.com/facebookresearch/Detectron/issues/459) + # During training, topk is over the proposals from *all* images in the training batch. + # During testing, it is over the proposals for each image separately. + # As a result, the training behavior becomes batch-dependent, + # and the configuration "POST_NMS_TOPK_TRAIN" end up relying on the batch size. + # This bug is addressed in Detectron2 to make the behavior independent of batch size. + keep = keep[:post_nms_topk] # keep is already sorted + + res = Instances(image_size) + res.proposal_boxes = boxes[keep] + res.objectness_logits = scores_per_img[keep] + results.append(res) + return results + + +def rpn_losses( + gt_labels, gt_anchor_deltas, pred_objectness_logits, pred_anchor_deltas, smooth_l1_beta +): + """ + Args: + gt_labels (Tensor): shape (N,), each element in {-1, 0, 1} representing + ground-truth objectness labels with: -1 = ignore; 0 = not object; 1 = object. + gt_anchor_deltas (Tensor): shape (N, box_dim), row i represents ground-truth + box2box transform targets (dx, dy, dw, dh) or (dx, dy, dw, dh, da) that map anchor i to + its matched ground-truth box. + pred_objectness_logits (Tensor): shape (N,), each element is a predicted objectness + logit. + pred_anchor_deltas (Tensor): shape (N, box_dim), each row is a predicted box2box + transform (dx, dy, dw, dh) or (dx, dy, dw, dh, da) + smooth_l1_beta (float): The transition point between L1 and L2 loss in + the smooth L1 loss function. When set to 0, the loss becomes L1. When + set to +inf, the loss becomes constant 0. + + Returns: + objectness_loss, localization_loss, both unnormalized (summed over samples). + """ + pos_masks = gt_labels == 1 + localization_loss = smooth_l1_loss( + pred_anchor_deltas[pos_masks], gt_anchor_deltas[pos_masks], smooth_l1_beta, reduction="sum" + ) + + valid_masks = gt_labels >= 0 + objectness_loss = F.binary_cross_entropy_with_logits( + pred_objectness_logits[valid_masks], + gt_labels[valid_masks].to(torch.float32), + reduction="sum", + ) + return objectness_loss, localization_loss + + +class RPNOutputs(object): + def __init__( + self, + box2box_transform, + batch_size_per_image, + images, + pred_objectness_logits, + pred_anchor_deltas, + anchors, + gt_labels=None, + gt_boxes=None, + smooth_l1_beta=0.0, + ): + """ + Args: + box2box_transform (Box2BoxTransform): :class:`Box2BoxTransform` instance for + anchor-proposal transformations. + images (ImageList): :class:`ImageList` instance representing N input images + batch_size_per_image (int): number of proposals to sample when training + pred_objectness_logits (list[Tensor]): A list of L elements. + Element i is a tensor of shape (N, A, Hi, Wi) representing + the predicted objectness logits for anchors. + pred_anchor_deltas (list[Tensor]): A list of L elements. Element i is a tensor of shape + (N, A*4 or 5, Hi, Wi) representing the predicted "deltas" used to transform anchors + to proposals. + anchors (list[Boxes or RotatedBoxes]): A list of Boxes/RotatedBoxes storing the all + the anchors for each feature map. See :meth:`AnchorGenerator.forward`. + gt_labels (list[Tensor]): Available on in training. + See :meth:`RPN.label_and_sample_anchors`. + gt_boxes (list[Boxes or RotatedBoxes]): Available on in training. + See :meth:`RPN.label_and_sample_anchors`. + smooth_l1_beta (float): The transition point between L1 and L2 loss in + the smooth L1 loss function. When set to 0, the loss becomes L1. When + set to +inf, the loss becomes constant 0. + """ + self.box2box_transform = box2box_transform + self.batch_size_per_image = batch_size_per_image + + B = anchors[0].tensor.size(1) # box dimension (4 or 5) + self.pred_objectness_logits = [ + # Reshape: (N, A, Hi, Wi) -> (N, Hi, Wi, A) -> (N, Hi*Wi*A) + score.permute(0, 2, 3, 1).flatten(1) + for score in pred_objectness_logits + ] + + self.pred_anchor_deltas = [ + # Reshape: (N, A*B, Hi, Wi) -> (N, A, B, Hi, Wi) -> (N, Hi, Wi, A, B) + # -> (N, Hi*Wi*A, B) + x.view(x.shape[0], -1, B, x.shape[-2], x.shape[-1]) + .permute(0, 3, 4, 1, 2) + .flatten(1, -2) + for x in pred_anchor_deltas + ] + + self.anchors = anchors + + self.gt_boxes = gt_boxes + self.gt_labels = gt_labels + + self.num_images = len(images) + self.smooth_l1_beta = smooth_l1_beta + + def losses(self): + """ + Return the losses from a set of RPN predictions and their associated ground-truth. + + Returns: + dict[loss name -> loss value]: A dict mapping from loss name to loss value. + Loss names are: `loss_rpn_cls` for objectness classification and + `loss_rpn_loc` for proposal localization. + """ + gt_labels = torch.stack(self.gt_labels) + anchors = self.anchors[0].cat(self.anchors).tensor # Ax(4 or 5) + gt_anchor_deltas = [self.box2box_transform.get_deltas(anchors, k) for k in self.gt_boxes] + gt_anchor_deltas = torch.stack(gt_anchor_deltas) + + # Log the number of positive/negative anchors per-image that's used in training + num_pos_anchors = (gt_labels == 1).sum().item() + num_neg_anchors = (gt_labels == 0).sum().item() + storage = get_event_storage() + storage.put_scalar("rpn/num_pos_anchors", num_pos_anchors / self.num_images) + storage.put_scalar("rpn/num_neg_anchors", num_neg_anchors / self.num_images) + + objectness_loss, localization_loss = rpn_losses( + gt_labels, + gt_anchor_deltas, + # concat on the Hi*Wi*A dimension + cat(self.pred_objectness_logits, dim=1), + cat(self.pred_anchor_deltas, dim=1), + self.smooth_l1_beta, + ) + normalizer = self.batch_size_per_image * self.num_images + return { + "loss_rpn_cls": objectness_loss / normalizer, + "loss_rpn_loc": localization_loss / normalizer, + } + + def predict_proposals(self): + """ + Transform anchors into proposals by applying the predicted anchor deltas. + + Returns: + proposals (list[Tensor]): A list of L tensors. Tensor i has shape + (N, Hi*Wi*A, B), where B is box dimension (4 or 5). + """ + proposals = [] + # For each feature map + for anchors_i, pred_anchor_deltas_i in zip(self.anchors, self.pred_anchor_deltas): + B = anchors_i.tensor.size(1) + N = self.num_images + pred_anchor_deltas_i = pred_anchor_deltas_i.reshape(-1, B) + # Expand anchors to shape (N*Hi*Wi*A, B) + anchors_i = anchors_i.tensor.unsqueeze(0).expand(N, -1, -1).reshape(-1, B) + proposals_i = self.box2box_transform.apply_deltas(pred_anchor_deltas_i, anchors_i) + # Append feature map proposals with shape (N, Hi*Wi*A, B) + proposals.append(proposals_i.view(N, -1, B)) + return proposals + + def predict_objectness_logits(self): + """ + Return objectness logits in the same format as the proposals returned by + :meth:`predict_proposals`. + + Returns: + pred_objectness_logits (list[Tensor]): A list of L tensors. Tensor i has shape + (N, Hi*Wi*A). + """ + return self.pred_objectness_logits diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/rrpn.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/rrpn.py new file mode 100644 index 0000000000000000000000000000000000000000..8c2ac366face34a12af63c9f13e6dbb14f59bf04 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/proposal_generator/rrpn.py @@ -0,0 +1,233 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import itertools +import logging +from typing import Dict, List +import torch + +from detectron2.layers import ShapeSpec, batched_nms_rotated, cat +from detectron2.structures import Instances, RotatedBoxes, pairwise_iou_rotated +from detectron2.utils.memory import retry_if_cuda_oom + +from ..box_regression import Box2BoxTransformRotated +from .build import PROPOSAL_GENERATOR_REGISTRY +from .rpn import RPN +from .rpn_outputs import RPNOutputs + +logger = logging.getLogger(__name__) + + +def find_top_rrpn_proposals( + proposals, + pred_objectness_logits, + images, + nms_thresh, + pre_nms_topk, + post_nms_topk, + min_box_side_len, + training, +): + """ + For each feature map, select the `pre_nms_topk` highest scoring proposals, + apply NMS, clip proposals, and remove small boxes. Return the `post_nms_topk` + highest scoring proposals among all the feature maps if `training` is True, + otherwise, returns the highest `post_nms_topk` scoring proposals for each + feature map. + + Args: + proposals (list[Tensor]): A list of L tensors. Tensor i has shape (N, Hi*Wi*A, 5). + All proposal predictions on the feature maps. + pred_objectness_logits (list[Tensor]): A list of L tensors. Tensor i has shape (N, Hi*Wi*A). + images (ImageList): Input images as an :class:`ImageList`. + nms_thresh (float): IoU threshold to use for NMS + pre_nms_topk (int): number of top k scoring proposals to keep before applying NMS. + When RRPN is run on multiple feature maps (as in FPN) this number is per + feature map. + post_nms_topk (int): number of top k scoring proposals to keep after applying NMS. + When RRPN is run on multiple feature maps (as in FPN) this number is total, + over all feature maps. + min_box_side_len (float): minimum proposal box side length in pixels (absolute units + wrt input images). + training (bool): True if proposals are to be used in training, otherwise False. + This arg exists only to support a legacy bug; look for the "NB: Legacy bug ..." + comment. + + Returns: + proposals (list[Instances]): list of N Instances. The i-th Instances + stores post_nms_topk object proposals for image i. + """ + image_sizes = images.image_sizes # in (h, w) order + num_images = len(image_sizes) + device = proposals[0].device + + # 1. Select top-k anchor for every level and every image + topk_scores = [] # #lvl Tensor, each of shape N x topk + topk_proposals = [] + level_ids = [] # #lvl Tensor, each of shape (topk,) + batch_idx = torch.arange(num_images, device=device) + for level_id, proposals_i, logits_i in zip( + itertools.count(), proposals, pred_objectness_logits + ): + Hi_Wi_A = logits_i.shape[1] + num_proposals_i = min(pre_nms_topk, Hi_Wi_A) + + # sort is faster than topk (https://github.com/pytorch/pytorch/issues/22812) + # topk_scores_i, topk_idx = logits_i.topk(num_proposals_i, dim=1) + logits_i, idx = logits_i.sort(descending=True, dim=1) + topk_scores_i = logits_i[batch_idx, :num_proposals_i] + topk_idx = idx[batch_idx, :num_proposals_i] + + # each is N x topk + topk_proposals_i = proposals_i[batch_idx[:, None], topk_idx] # N x topk x 5 + + topk_proposals.append(topk_proposals_i) + topk_scores.append(topk_scores_i) + level_ids.append(torch.full((num_proposals_i,), level_id, dtype=torch.int64, device=device)) + + # 2. Concat all levels together + topk_scores = cat(topk_scores, dim=1) + topk_proposals = cat(topk_proposals, dim=1) + level_ids = cat(level_ids, dim=0) + + # 3. For each image, run a per-level NMS, and choose topk results. + results = [] + for n, image_size in enumerate(image_sizes): + boxes = RotatedBoxes(topk_proposals[n]) + scores_per_img = topk_scores[n] + valid_mask = torch.isfinite(boxes.tensor).all(dim=1) & torch.isfinite(scores_per_img) + if not valid_mask.all(): + boxes = boxes[valid_mask] + scores_per_img = scores_per_img[valid_mask] + boxes.clip(image_size) + + # filter empty boxes + keep = boxes.nonempty(threshold=min_box_side_len) + lvl = level_ids + if keep.sum().item() != len(boxes): + boxes, scores_per_img, lvl = (boxes[keep], scores_per_img[keep], level_ids[keep]) + + keep = batched_nms_rotated(boxes.tensor, scores_per_img, lvl, nms_thresh) + # In Detectron1, there was different behavior during training vs. testing. + # (https://github.com/facebookresearch/Detectron/issues/459) + # During training, topk is over the proposals from *all* images in the training batch. + # During testing, it is over the proposals for each image separately. + # As a result, the training behavior becomes batch-dependent, + # and the configuration "POST_NMS_TOPK_TRAIN" end up relying on the batch size. + # This bug is addressed in Detectron2 to make the behavior independent of batch size. + keep = keep[:post_nms_topk] + + res = Instances(image_size) + res.proposal_boxes = boxes[keep] + res.objectness_logits = scores_per_img[keep] + results.append(res) + return results + + +@PROPOSAL_GENERATOR_REGISTRY.register() +class RRPN(RPN): + """ + Rotated Region Proposal Network described in :paper:`RRPN`. + """ + + def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]): + super().__init__(cfg, input_shape) + self.box2box_transform = Box2BoxTransformRotated(weights=cfg.MODEL.RPN.BBOX_REG_WEIGHTS) + if self.boundary_threshold >= 0: + raise NotImplementedError( + "boundary_threshold is a legacy option not implemented for RRPN." + ) + + @torch.no_grad() + def label_and_sample_anchors(self, anchors: List[RotatedBoxes], gt_instances: List[Instances]): + """ + Args: + anchors (list[RotatedBoxes]): anchors for each feature map. + gt_instances: the ground-truth instances for each image. + + Returns: + list[Tensor]: + List of #demo tensors. i-th element is a vector of labels whose length is + the total number of anchors across feature maps. Label values are in {-1, 0, 1}, + with meanings: -1 = ignore; 0 = negative class; 1 = positive class. + list[Tensor]: + i-th element is a Nx5 tensor, where N is the total number of anchors across + feature maps. The values are the matched gt boxes for each anchor. + Values are undefined for those anchors not labeled as 1. + """ + anchors = RotatedBoxes.cat(anchors) + + gt_boxes = [x.gt_boxes for x in gt_instances] + del gt_instances + + gt_labels = [] + matched_gt_boxes = [] + for gt_boxes_i in gt_boxes: + """ + gt_boxes_i: ground-truth boxes for i-th image + """ + match_quality_matrix = retry_if_cuda_oom(pairwise_iou_rotated)(gt_boxes_i, anchors) + matched_idxs, gt_labels_i = retry_if_cuda_oom(self.anchor_matcher)(match_quality_matrix) + # Matching is memory-expensive and may result in CPU tensors. But the result is small + gt_labels_i = gt_labels_i.to(device=gt_boxes_i.device) + + # A vector of labels (-1, 0, 1) for each anchor + gt_labels_i = self._subsample_labels(gt_labels_i) + + if len(gt_boxes_i) == 0: + # These values won't be used anyway since the anchor is labeled as background + matched_gt_boxes_i = torch.zeros_like(anchors.tensor) + else: + # TODO wasted indexing computation for ignored boxes + matched_gt_boxes_i = gt_boxes_i[matched_idxs].tensor + + gt_labels.append(gt_labels_i) # N,AHW + matched_gt_boxes.append(matched_gt_boxes_i) + return gt_labels, matched_gt_boxes + + def forward(self, images, features, gt_instances=None): + # same signature as RPN.forward + features = [features[f] for f in self.in_features] + pred_objectness_logits, pred_anchor_deltas = self.rpn_head(features) + anchors = self.anchor_generator(features) + + if self.training: + gt_labels, gt_boxes = self.label_and_sample_anchors(anchors, gt_instances) + else: + gt_labels, gt_boxes = None, None + + outputs = RPNOutputs( + self.box2box_transform, + self.batch_size_per_image, + images, + pred_objectness_logits, + pred_anchor_deltas, + anchors, + gt_labels, + gt_boxes, + self.smooth_l1_beta, + ) + + if self.training: + losses = {k: v * self.loss_weight for k, v in outputs.losses().items()} + else: + losses = {} + + with torch.no_grad(): + # Find the top proposals by applying NMS and removing boxes that + # are too small. The proposals are treated as fixed for approximate + # joint training with roi heads. This approach ignores the derivative + # w.r.t. the proposal boxes’ coordinates that are also network + # responses, so is approximate. + + # Note: this line is the only difference v.s. RPN.forward + proposals = find_top_rrpn_proposals( + outputs.predict_proposals(), + outputs.predict_objectness_logits(), + images, + self.nms_thresh, + self.pre_nms_topk[self.training], + self.post_nms_topk[self.training], + self.min_box_side_len, + self.training, + ) + + return proposals, losses diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a49099aa5cfa58b55c66fe8fa85092eb26d15535 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .box_head import ROI_BOX_HEAD_REGISTRY, build_box_head +from .keypoint_head import ROI_KEYPOINT_HEAD_REGISTRY, build_keypoint_head, BaseKeypointRCNNHead +from .mask_head import ROI_MASK_HEAD_REGISTRY, build_mask_head, BaseMaskRCNNHead +from .roi_heads import ( + ROI_HEADS_REGISTRY, + ROIHeads, + Res5ROIHeads, + StandardROIHeads, + build_roi_heads, + select_foreground_proposals, +) +from .rotated_fast_rcnn import RROIHeads +from .fast_rcnn import FastRCNNOutputLayers + +from . import cascade_rcnn # isort:skip diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/box_head.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/box_head.py new file mode 100644 index 0000000000000000000000000000000000000000..de62d47acfd0ac634daf7db228b43f035cc721f3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/box_head.py @@ -0,0 +1,115 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import numpy as np +from typing import List +import fvcore.nn.weight_init as weight_init +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import configurable +from detectron2.layers import Conv2d, Linear, ShapeSpec, get_norm +from detectron2.utils.registry import Registry + +ROI_BOX_HEAD_REGISTRY = Registry("ROI_BOX_HEAD") +ROI_BOX_HEAD_REGISTRY.__doc__ = """ +Registry for box heads, which make box predictions from per-region features. + +The registered object will be called with `obj(cfg, input_shape)`. +""" + + +@ROI_BOX_HEAD_REGISTRY.register() +class FastRCNNConvFCHead(nn.Module): + """ + A head with several 3x3 conv layers (each followed by norm & relu) and then + several fc layers (each followed by relu). + """ + + @configurable + def __init__( + self, input_shape: ShapeSpec, *, conv_dims: List[int], fc_dims: List[int], conv_norm="" + ): + """ + NOTE: this interface is experimental. + + Args: + input_shape (ShapeSpec): shape of the input feature. + conv_dims (list[int]): the output dimensions of the conv layers + fc_dims (list[int]): the output dimensions of the fc layers + conv_norm (str or callable): normalization for the conv layers. + See :func:`detectron2.layers.get_norm` for supported types. + """ + super().__init__() + assert len(conv_dims) + len(fc_dims) > 0 + + self._output_size = (input_shape.channels, input_shape.height, input_shape.width) + + self.conv_norm_relus = [] + for k, conv_dim in enumerate(conv_dims): + conv = Conv2d( + self._output_size[0], + conv_dim, + kernel_size=3, + padding=1, + bias=not conv_norm, + norm=get_norm(conv_norm, conv_dim), + activation=F.relu, + ) + self.add_module("conv{}".format(k + 1), conv) + self.conv_norm_relus.append(conv) + self._output_size = (conv_dim, self._output_size[1], self._output_size[2]) + + self.fcs = [] + for k, fc_dim in enumerate(fc_dims): + fc = Linear(np.prod(self._output_size), fc_dim) + self.add_module("fc{}".format(k + 1), fc) + self.fcs.append(fc) + self._output_size = fc_dim + + for layer in self.conv_norm_relus: + weight_init.c2_msra_fill(layer) + for layer in self.fcs: + weight_init.c2_xavier_fill(layer) + + @classmethod + def from_config(cls, cfg, input_shape): + num_conv = cfg.MODEL.ROI_BOX_HEAD.NUM_CONV + conv_dim = cfg.MODEL.ROI_BOX_HEAD.CONV_DIM + num_fc = cfg.MODEL.ROI_BOX_HEAD.NUM_FC + fc_dim = cfg.MODEL.ROI_BOX_HEAD.FC_DIM + return { + "input_shape": input_shape, + "conv_dims": [conv_dim] * num_conv, + "fc_dims": [fc_dim] * num_fc, + "conv_norm": cfg.MODEL.ROI_BOX_HEAD.NORM, + } + + def forward(self, x): + for layer in self.conv_norm_relus: + x = layer(x) + if len(self.fcs): + if x.dim() > 2: + x = torch.flatten(x, start_dim=1) + for layer in self.fcs: + x = F.relu(layer(x)) + return x + + @property + def output_shape(self): + """ + Returns: + ShapeSpec: the output feature shape + """ + o = self._output_size + if isinstance(o, int): + return ShapeSpec(channels=o) + else: + return ShapeSpec(channels=o[0], height=o[1], width=o[2]) + + +def build_box_head(cfg, input_shape): + """ + Build a box head defined by `cfg.MODEL.ROI_BOX_HEAD.NAME`. + """ + name = cfg.MODEL.ROI_BOX_HEAD.NAME + return ROI_BOX_HEAD_REGISTRY.get(name)(cfg, input_shape) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/cascade_rcnn.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/cascade_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..b3efdcf70c3b71b935676e103be288484c66f4e2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/cascade_rcnn.py @@ -0,0 +1,298 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from typing import List +import torch +from torch import nn +from torch.autograd.function import Function + +from detectron2.config import configurable +from detectron2.layers import ShapeSpec +from detectron2.structures import Boxes, Instances, pairwise_iou +from detectron2.utils.events import get_event_storage + +from ..box_regression import Box2BoxTransform +from ..matcher import Matcher +from ..poolers import ROIPooler +from .box_head import build_box_head +from .fast_rcnn import FastRCNNOutputLayers, fast_rcnn_inference +from .roi_heads import ROI_HEADS_REGISTRY, StandardROIHeads + + +class _ScaleGradient(Function): + @staticmethod + def forward(ctx, input, scale): + ctx.scale = scale + return input + + @staticmethod + def backward(ctx, grad_output): + return grad_output * ctx.scale, None + + +@ROI_HEADS_REGISTRY.register() +class CascadeROIHeads(StandardROIHeads): + """ + Implement :paper:`Cascade R-CNN`. + """ + + @configurable + def __init__( + self, + *, + box_in_features: List[str], + box_pooler: ROIPooler, + box_heads: List[nn.Module], + box_predictors: List[nn.Module], + proposal_matchers: List[Matcher], + **kwargs, + ): + """ + NOTE: this interface is experimental. + + Args: + box_pooler (ROIPooler): pooler that extracts region features from given boxes + box_heads (list[nn.Module]): box head for each cascade stage + box_predictors (list[nn.Module]): box predictor for each cascade stage + proposal_matchers (list[Matcher]): matcher with different IoU thresholds to + match boxes with ground truth for each stage. The first matcher matches + RPN proposals with ground truth, the other matchers use boxes predicted + by the previous stage as proposals and match them with ground truth. + """ + assert "proposal_matcher" not in kwargs, ( + "CascadeROIHeads takes 'proposal_matchers=' for each stage instead " + "of one 'proposal_matcher='." + ) + # The first matcher matches RPN proposals with ground truth, done in the base class + kwargs["proposal_matcher"] = proposal_matchers[0] + num_stages = self.num_cascade_stages = len(box_heads) + box_heads = nn.ModuleList(box_heads) + box_predictors = nn.ModuleList(box_predictors) + assert len(box_predictors) == num_stages, f"{len(box_predictors)} != {num_stages}!" + assert len(proposal_matchers) == num_stages, f"{len(proposal_matchers)} != {num_stages}!" + super().__init__( + box_in_features=box_in_features, + box_pooler=box_pooler, + box_head=box_heads, + box_predictor=box_predictors, + **kwargs, + ) + self.proposal_matchers = proposal_matchers + + @classmethod + def from_config(cls, cfg, input_shape): + ret = super().from_config(cfg, input_shape) + ret.pop("proposal_matcher") + return ret + + @classmethod + def _init_box_head(cls, cfg, input_shape): + # fmt: off + in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES + pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION + pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features) + sampling_ratio = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO + pooler_type = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE + cascade_bbox_reg_weights = cfg.MODEL.ROI_BOX_CASCADE_HEAD.BBOX_REG_WEIGHTS + cascade_ious = cfg.MODEL.ROI_BOX_CASCADE_HEAD.IOUS + assert len(cascade_bbox_reg_weights) == len(cascade_ious) + assert cfg.MODEL.ROI_BOX_HEAD.CLS_AGNOSTIC_BBOX_REG, \ + "CascadeROIHeads only support class-agnostic regression now!" + assert cascade_ious[0] == cfg.MODEL.ROI_HEADS.IOU_THRESHOLDS[0] + # fmt: on + + in_channels = [input_shape[f].channels for f in in_features] + # Check all channel counts are equal + assert len(set(in_channels)) == 1, in_channels + in_channels = in_channels[0] + + box_pooler = ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type=pooler_type, + ) + pooled_shape = ShapeSpec( + channels=in_channels, width=pooler_resolution, height=pooler_resolution + ) + + box_heads, box_predictors, proposal_matchers = [], [], [] + for match_iou, bbox_reg_weights in zip(cascade_ious, cascade_bbox_reg_weights): + box_head = build_box_head(cfg, pooled_shape) + box_heads.append(box_head) + box_predictors.append( + FastRCNNOutputLayers( + cfg, + box_head.output_shape, + box2box_transform=Box2BoxTransform(weights=bbox_reg_weights), + ) + ) + proposal_matchers.append(Matcher([match_iou], [0, 1], allow_low_quality_matches=False)) + return { + "box_in_features": in_features, + "box_pooler": box_pooler, + "box_heads": box_heads, + "box_predictors": box_predictors, + "proposal_matchers": proposal_matchers, + } + + def forward(self, images, features, proposals, targets=None): + del images + if self.training: + proposals = self.label_and_sample_proposals(proposals, targets) + + if self.training: + # Need targets to box head + losses = self._forward_box(features, proposals, targets) + losses.update(self._forward_mask(features, proposals)) + losses.update(self._forward_keypoint(features, proposals)) + return proposals, losses + else: + pred_instances = self._forward_box(features, proposals) + pred_instances = self.forward_with_given_boxes(features, pred_instances) + return pred_instances, {} + + def _forward_box(self, features, proposals, targets=None): + """ + Args: + features, targets: the same as in + Same as in :meth:`ROIHeads.forward`. + proposals (list[Instances]): the per-image object proposals with + their matching ground truth. + Each has fields "proposal_boxes", and "objectness_logits", + "gt_classes", "gt_boxes". + """ + features = [features[f] for f in self.box_in_features] + head_outputs = [] # (predictor, predictions, proposals) + prev_pred_boxes = None + image_sizes = [x.image_size for x in proposals] + for k in range(self.num_cascade_stages): + if k > 0: + # The output boxes of the previous stage are used to create the input + # proposals of the next stage. + proposals = self._create_proposals_from_boxes(prev_pred_boxes, image_sizes) + if self.training: + proposals = self._match_and_label_boxes(proposals, k, targets) + predictions = self._run_stage(features, proposals, k) + prev_pred_boxes = self.box_predictor[k].predict_boxes(predictions, proposals) + head_outputs.append((self.box_predictor[k], predictions, proposals)) + + if self.training: + losses = {} + storage = get_event_storage() + for stage, (predictor, predictions, proposals) in enumerate(head_outputs): + with storage.name_scope("stage{}".format(stage)): + stage_losses = predictor.losses(predictions, proposals) + losses.update({k + "_stage{}".format(stage): v for k, v in stage_losses.items()}) + return losses + else: + # Each is a list[Tensor] of length #image. Each tensor is Ri x (K+1) + scores_per_stage = [h[0].predict_probs(h[1], h[2]) for h in head_outputs] + + # Average the scores across heads + scores = [ + sum(list(scores_per_image)) * (1.0 / self.num_cascade_stages) + for scores_per_image in zip(*scores_per_stage) + ] + # Use the boxes of the last head + predictor, predictions, proposals = head_outputs[-1] + boxes = predictor.predict_boxes(predictions, proposals) + pred_instances, _ = fast_rcnn_inference( + boxes, + scores, + image_sizes, + predictor.test_score_thresh, + predictor.test_nms_thresh, + predictor.test_topk_per_image, + ) + return pred_instances + + @torch.no_grad() + def _match_and_label_boxes(self, proposals, stage, targets): + """ + Match proposals with groundtruth using the matcher at the given stage. + Label the proposals as foreground or background based on the match. + + Args: + proposals (list[Instances]): One Instances for each image, with + the field "proposal_boxes". + stage (int): the current stage + targets (list[Instances]): the ground truth instances + + Returns: + list[Instances]: the same proposals, but with fields "gt_classes" and "gt_boxes" + """ + num_fg_samples, num_bg_samples = [], [] + for proposals_per_image, targets_per_image in zip(proposals, targets): + match_quality_matrix = pairwise_iou( + targets_per_image.gt_boxes, proposals_per_image.proposal_boxes + ) + # proposal_labels are 0 or 1 + matched_idxs, proposal_labels = self.proposal_matchers[stage](match_quality_matrix) + if len(targets_per_image) > 0: + gt_classes = targets_per_image.gt_classes[matched_idxs] + # Label unmatched proposals (0 label from matcher) as background (label=num_classes) + gt_classes[proposal_labels == 0] = self.num_classes + gt_boxes = targets_per_image.gt_boxes[matched_idxs] + else: + gt_classes = torch.zeros_like(matched_idxs) + self.num_classes + gt_boxes = Boxes( + targets_per_image.gt_boxes.tensor.new_zeros((len(proposals_per_image), 4)) + ) + proposals_per_image.gt_classes = gt_classes + proposals_per_image.gt_boxes = gt_boxes + + num_fg_samples.append((proposal_labels == 1).sum().item()) + num_bg_samples.append(proposal_labels.numel() - num_fg_samples[-1]) + + # Log the number of fg/bg samples in each stage + storage = get_event_storage() + storage.put_scalar( + "stage{}/roi_head/num_fg_samples".format(stage), + sum(num_fg_samples) / len(num_fg_samples), + ) + storage.put_scalar( + "stage{}/roi_head/num_bg_samples".format(stage), + sum(num_bg_samples) / len(num_bg_samples), + ) + return proposals + + def _run_stage(self, features, proposals, stage): + """ + Args: + features (list[Tensor]): #lvl input features to ROIHeads + proposals (list[Instances]): #image Instances, with the field "proposal_boxes" + stage (int): the current stage + + Returns: + Same output as `FastRCNNOutputLayers.forward()`. + """ + box_features = self.box_pooler(features, [x.proposal_boxes for x in proposals]) + # The original implementation averages the losses among heads, + # but scale up the parameter gradients of the heads. + # This is equivalent to adding the losses among heads, + # but scale down the gradients on features. + box_features = _ScaleGradient.apply(box_features, 1.0 / self.num_cascade_stages) + box_features = self.box_head[stage](box_features) + return self.box_predictor[stage](box_features) + + def _create_proposals_from_boxes(self, boxes, image_sizes): + """ + Args: + boxes (list[Tensor]): per-image predicted boxes, each of shape Ri x 4 + image_sizes (list[tuple]): list of image shapes in (h, w) + + Returns: + list[Instances]: per-image proposals with the given boxes. + """ + # Just like RPN, the proposals should not have gradients + boxes = [Boxes(b.detach()) for b in boxes] + proposals = [] + for boxes_per_image, image_size in zip(boxes, image_sizes): + boxes_per_image.clip(image_size) + if self.training: + # do not filter empty boxes at inference time, + # because the scores from each stage need to be aligned and added later + boxes_per_image = boxes_per_image[boxes_per_image.nonempty()] + prop = Instances(image_size) + prop.proposal_boxes = boxes_per_image + proposals.append(prop) + return proposals diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/fast_rcnn.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/fast_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..ca796ace55509efb8a898f580203076bada387f2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/fast_rcnn.py @@ -0,0 +1,510 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import torch +from fvcore.nn import smooth_l1_loss +from torch import nn +from torch.nn import functional as F + +from detectron2.config import configurable +from detectron2.layers import Linear, ShapeSpec, batched_nms, cat +from detectron2.modeling.box_regression import Box2BoxTransform, apply_deltas_broadcast +from detectron2.structures import Boxes, Instances +from detectron2.utils.events import get_event_storage + +__all__ = ["fast_rcnn_inference", "FastRCNNOutputLayers"] + + +logger = logging.getLogger(__name__) + +""" +Shape shorthand in this module: + + N: number of images in the minibatch + R: number of ROIs, combined over all images, in the minibatch + Ri: number of ROIs in image i + K: number of foreground classes. E.g.,there are 80 foreground classes in COCO. + +Naming convention: + + deltas: refers to the 4-d (dx, dy, dw, dh) deltas that parameterize the box2box + transform (see :class:`box_regression.Box2BoxTransform`). + + pred_class_logits: predicted class scores in [-inf, +inf]; use + softmax(pred_class_logits) to estimate P(class). + + gt_classes: ground-truth classification labels in [0, K], where [0, K) represent + foreground object classes and K represents the background class. + + pred_proposal_deltas: predicted box2box transform deltas for transforming proposals + to detection box predictions. + + gt_proposal_deltas: ground-truth box2box transform deltas +""" + + +def fast_rcnn_inference(boxes, scores, image_shapes, score_thresh, nms_thresh, topk_per_image): + """ + Call `fast_rcnn_inference_single_image` for all images. + + Args: + boxes (list[Tensor]): A list of Tensors of predicted class-specific or class-agnostic + boxes for each image. Element i has shape (Ri, K * 4) if doing + class-specific regression, or (Ri, 4) if doing class-agnostic + regression, where Ri is the number of predicted objects for image i. + This is compatible with the output of :meth:`FastRCNNOutputLayers.predict_boxes`. + scores (list[Tensor]): A list of Tensors of predicted class scores for each image. + Element i has shape (Ri, K + 1), where Ri is the number of predicted objects + for image i. Compatible with the output of :meth:`FastRCNNOutputLayers.predict_probs`. + image_shapes (list[tuple]): A list of (width, height) tuples for each image in the batch. + score_thresh (float): Only return detections with a confidence score exceeding this + threshold. + nms_thresh (float): The threshold to use for box non-maximum suppression. Value in [0, 1]. + topk_per_image (int): The number of top scoring detections to return. Set < 0 to return + all detections. + + Returns: + instances: (list[Instances]): A list of N instances, one for each image in the batch, + that stores the topk most confidence detections. + kept_indices: (list[Tensor]): A list of 1D tensor of length of N, each element indicates + the corresponding boxes/scores index in [0, Ri) from the input, for image i. + """ + result_per_image = [ + fast_rcnn_inference_single_image( + boxes_per_image, scores_per_image, image_shape, score_thresh, nms_thresh, topk_per_image + ) + for scores_per_image, boxes_per_image, image_shape in zip(scores, boxes, image_shapes) + ] + return [x[0] for x in result_per_image], [x[1] for x in result_per_image] + + +def fast_rcnn_inference_single_image( + boxes, scores, image_shape, score_thresh, nms_thresh, topk_per_image +): + """ + Single-image inference. Return bounding-box detection results by thresholding + on scores and applying non-maximum suppression (NMS). + + Args: + Same as `fast_rcnn_inference`, but with boxes, scores, and image shapes + per image. + + Returns: + Same as `fast_rcnn_inference`, but for only one image. + """ + valid_mask = torch.isfinite(boxes).all(dim=1) & torch.isfinite(scores).all(dim=1) + if not valid_mask.all(): + boxes = boxes[valid_mask] + scores = scores[valid_mask] + + scores = scores[:, :-1] + num_bbox_reg_classes = boxes.shape[1] // 4 + # Convert to Boxes to use the `clip` function ... + boxes = Boxes(boxes.reshape(-1, 4)) + boxes.clip(image_shape) + boxes = boxes.tensor.view(-1, num_bbox_reg_classes, 4) # R x C x 4 + + # Filter results based on detection scores + filter_mask = scores > score_thresh # R x K + # R' x 2. First column contains indices of the R predictions; + # Second column contains indices of classes. + filter_inds = filter_mask.nonzero() + if num_bbox_reg_classes == 1: + boxes = boxes[filter_inds[:, 0], 0] + else: + boxes = boxes[filter_mask] + scores = scores[filter_mask] + + # Apply per-class NMS + keep = batched_nms(boxes, scores, filter_inds[:, 1], nms_thresh) + if topk_per_image >= 0: + keep = keep[:topk_per_image] + boxes, scores, filter_inds = boxes[keep], scores[keep], filter_inds[keep] + + result = Instances(image_shape) + result.pred_boxes = Boxes(boxes) + result.scores = scores + result.pred_classes = filter_inds[:, 1] + return result, filter_inds[:, 0] + + +class FastRCNNOutputs(object): + """ + A class that stores information about outputs of a Fast R-CNN head. + It provides methods that are used to decode the outputs of a Fast R-CNN head. + """ + + def __init__( + self, + box2box_transform, + pred_class_logits, + pred_proposal_deltas, + proposals, + smooth_l1_beta=0, + ): + """ + Args: + box2box_transform (Box2BoxTransform/Box2BoxTransformRotated): + box2box transform instance for proposal-to-detection transformations. + pred_class_logits (Tensor): A tensor of shape (R, K + 1) storing the predicted class + logits for all R predicted object instances. + Each row corresponds to a predicted object instance. + pred_proposal_deltas (Tensor): A tensor of shape (R, K * B) or (R, B) for + class-specific or class-agnostic regression. It stores the predicted deltas that + transform proposals into final box detections. + B is the box dimension (4 or 5). + When B is 4, each row is [dx, dy, dw, dh (, ....)]. + When B is 5, each row is [dx, dy, dw, dh, da (, ....)]. + proposals (list[Instances]): A list of N Instances, where Instances i stores the + proposals for image i, in the field "proposal_boxes". + When training, each Instances must have ground-truth labels + stored in the field "gt_classes" and "gt_boxes". + The total number of all instances must be equal to R. + smooth_l1_beta (float): The transition point between L1 and L2 loss in + the smooth L1 loss function. When set to 0, the loss becomes L1. When + set to +inf, the loss becomes constant 0. + """ + self.box2box_transform = box2box_transform + self.num_preds_per_image = [len(p) for p in proposals] + self.pred_class_logits = pred_class_logits + self.pred_proposal_deltas = pred_proposal_deltas + self.smooth_l1_beta = smooth_l1_beta + self.image_shapes = [x.image_size for x in proposals] + + if len(proposals): + box_type = type(proposals[0].proposal_boxes) + # cat(..., dim=0) concatenates over all images in the batch + self.proposals = box_type.cat([p.proposal_boxes for p in proposals]) + assert ( + not self.proposals.tensor.requires_grad + ), "Proposals should not require gradients!" + + # The following fields should exist only when training. + if proposals[0].has("gt_boxes"): + self.gt_boxes = box_type.cat([p.gt_boxes for p in proposals]) + assert proposals[0].has("gt_classes") + self.gt_classes = cat([p.gt_classes for p in proposals], dim=0) + else: + self.proposals = Boxes(torch.zeros(0, 4, device=self.pred_proposal_deltas.device)) + self._no_instances = len(proposals) == 0 # no instances found + + def _log_accuracy(self): + """ + Log the accuracy metrics to EventStorage. + """ + num_instances = self.gt_classes.numel() + pred_classes = self.pred_class_logits.argmax(dim=1) + bg_class_ind = self.pred_class_logits.shape[1] - 1 + + fg_inds = (self.gt_classes >= 0) & (self.gt_classes < bg_class_ind) + num_fg = fg_inds.nonzero().numel() + fg_gt_classes = self.gt_classes[fg_inds] + fg_pred_classes = pred_classes[fg_inds] + + num_false_negative = (fg_pred_classes == bg_class_ind).nonzero().numel() + num_accurate = (pred_classes == self.gt_classes).nonzero().numel() + fg_num_accurate = (fg_pred_classes == fg_gt_classes).nonzero().numel() + + storage = get_event_storage() + if num_instances > 0: + storage.put_scalar("fast_rcnn/cls_accuracy", num_accurate / num_instances) + if num_fg > 0: + storage.put_scalar("fast_rcnn/fg_cls_accuracy", fg_num_accurate / num_fg) + storage.put_scalar("fast_rcnn/false_negative", num_false_negative / num_fg) + + def softmax_cross_entropy_loss(self): + """ + Compute the softmax cross entropy loss for box classification. + + Returns: + scalar Tensor + """ + if self._no_instances: + return 0.0 * self.pred_class_logits.sum() + else: + self._log_accuracy() + return F.cross_entropy(self.pred_class_logits, self.gt_classes, reduction="mean") + + def smooth_l1_loss(self): + """ + Compute the smooth L1 loss for box regression. + + Returns: + scalar Tensor + """ + if self._no_instances: + return 0.0 * self.pred_proposal_deltas.sum() + gt_proposal_deltas = self.box2box_transform.get_deltas( + self.proposals.tensor, self.gt_boxes.tensor + ) + box_dim = gt_proposal_deltas.size(1) # 4 or 5 + cls_agnostic_bbox_reg = self.pred_proposal_deltas.size(1) == box_dim + device = self.pred_proposal_deltas.device + + bg_class_ind = self.pred_class_logits.shape[1] - 1 + + # Box delta loss is only computed between the prediction for the gt class k + # (if 0 <= k < bg_class_ind) and the target; there is no loss defined on predictions + # for non-gt classes and background. + # Empty fg_inds produces a valid loss of zero as long as the size_average + # arg to smooth_l1_loss is False (otherwise it uses torch.mean internally + # and would produce a nan loss). + fg_inds = torch.nonzero( + (self.gt_classes >= 0) & (self.gt_classes < bg_class_ind), as_tuple=True + )[0] + if cls_agnostic_bbox_reg: + # pred_proposal_deltas only corresponds to foreground class for agnostic + gt_class_cols = torch.arange(box_dim, device=device) + else: + fg_gt_classes = self.gt_classes[fg_inds] + # pred_proposal_deltas for class k are located in columns [b * k : b * k + b], + # where b is the dimension of box representation (4 or 5) + # Note that compared to Detectron1, + # we do not perform bounding box regression for background classes. + gt_class_cols = box_dim * fg_gt_classes[:, None] + torch.arange(box_dim, device=device) + + loss_box_reg = smooth_l1_loss( + self.pred_proposal_deltas[fg_inds[:, None], gt_class_cols], + gt_proposal_deltas[fg_inds], + self.smooth_l1_beta, + reduction="sum", + ) + # The loss is normalized using the total number of regions (R), not the number + # of foreground regions even though the box regression loss is only defined on + # foreground regions. Why? Because doing so gives equal training influence to + # each foreground example. To see how, consider two different minibatches: + # (1) Contains a single foreground region + # (2) Contains 100 foreground regions + # If we normalize by the number of foreground regions, the single example in + # minibatch (1) will be given 100 times as much influence as each foreground + # example in minibatch (2). Normalizing by the total number of regions, R, + # means that the single example in minibatch (1) and each of the 100 examples + # in minibatch (2) are given equal influence. + loss_box_reg = loss_box_reg / self.gt_classes.numel() + return loss_box_reg + + def _predict_boxes(self): + """ + Returns: + Tensor: A Tensors of predicted class-specific or class-agnostic boxes + for all images in a batch. Element i has shape (Ri, K * B) or (Ri, B), where Ri is + the number of predicted objects for image i and B is the box dimension (4 or 5) + """ + return apply_deltas_broadcast( + self.box2box_transform, self.pred_proposal_deltas, self.proposals.tensor + ) + + """ + A subclass is expected to have the following methods because + they are used to query information about the head predictions. + """ + + def losses(self): + """ + Compute the default losses for box head in Fast(er) R-CNN, + with softmax cross entropy loss and smooth L1 loss. + + Returns: + A dict of losses (scalar tensors) containing keys "loss_cls" and "loss_box_reg". + """ + return { + "loss_cls": self.softmax_cross_entropy_loss(), + "loss_box_reg": self.smooth_l1_loss(), + } + + def predict_boxes(self): + """ + Deprecated + """ + return self._predict_boxes().split(self.num_preds_per_image, dim=0) + + def predict_probs(self): + """ + Deprecated + """ + probs = F.softmax(self.pred_class_logits, dim=-1) + return probs.split(self.num_preds_per_image, dim=0) + + def inference(self, score_thresh, nms_thresh, topk_per_image): + """ + Deprecated + """ + boxes = self.predict_boxes() + scores = self.predict_probs() + image_shapes = self.image_shapes + return fast_rcnn_inference( + boxes, scores, image_shapes, score_thresh, nms_thresh, topk_per_image + ) + + +class FastRCNNOutputLayers(nn.Module): + """ + Two linear layers for predicting Fast R-CNN outputs: + (1) proposal-to-detection box regression deltas + (2) classification scores + """ + + @configurable + def __init__( + self, + input_shape, + *, + box2box_transform, + num_classes, + cls_agnostic_bbox_reg=False, + smooth_l1_beta=0.0, + test_score_thresh=0.0, + test_nms_thresh=0.5, + test_topk_per_image=100, + ): + """ + NOTE: this interface is experimental. + + Args: + input_shape (ShapeSpec): shape of the input feature to this module + box2box_transform (Box2BoxTransform or Box2BoxTransformRotated): + num_classes (int): number of foreground classes + cls_agnostic_bbox_reg (bool): whether to use class agnostic for bbox regression + smooth_l1_beta (float): transition point from L1 to L2 loss. + test_score_thresh (float): threshold to filter predictions results. + test_nms_thresh (float): NMS threshold for prediction results. + test_topk_per_image (int): number of top predictions to produce per image. + """ + super().__init__() + if isinstance(input_shape, int): # some backward compatibility + input_shape = ShapeSpec(channels=input_shape) + input_size = input_shape.channels * (input_shape.width or 1) * (input_shape.height or 1) + # The prediction layer for num_classes foreground classes and one background class + # (hence + 1) + self.cls_score = Linear(input_size, num_classes + 1) + num_bbox_reg_classes = 1 if cls_agnostic_bbox_reg else num_classes + box_dim = len(box2box_transform.weights) + self.bbox_pred = Linear(input_size, num_bbox_reg_classes * box_dim) + + nn.init.normal_(self.cls_score.weight, std=0.01) + nn.init.normal_(self.bbox_pred.weight, std=0.001) + for l in [self.cls_score, self.bbox_pred]: + nn.init.constant_(l.bias, 0) + + self.box2box_transform = box2box_transform + self.smooth_l1_beta = smooth_l1_beta + self.test_score_thresh = test_score_thresh + self.test_nms_thresh = test_nms_thresh + self.test_topk_per_image = test_topk_per_image + + @classmethod + def from_config(cls, cfg, input_shape): + return { + "input_shape": input_shape, + "box2box_transform": Box2BoxTransform(weights=cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS), + # fmt: off + "num_classes" : cfg.MODEL.ROI_HEADS.NUM_CLASSES, + "cls_agnostic_bbox_reg" : cfg.MODEL.ROI_BOX_HEAD.CLS_AGNOSTIC_BBOX_REG, + "smooth_l1_beta" : cfg.MODEL.ROI_BOX_HEAD.SMOOTH_L1_BETA, + "test_score_thresh" : cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST, + "test_nms_thresh" : cfg.MODEL.ROI_HEADS.NMS_THRESH_TEST, + "test_topk_per_image" : cfg.TEST.DETECTIONS_PER_IMAGE + # fmt: on + } + + def forward(self, x): + """ + Returns: + Tensor: Nx(K+1) scores for each box + Tensor: Nx4 or Nx(Kx4) bounding box regression deltas. + """ + if x.dim() > 2: + x = torch.flatten(x, start_dim=1) + scores = self.cls_score(x) + proposal_deltas = self.bbox_pred(x) + return scores, proposal_deltas + + # TODO: move the implementation to this class. + def losses(self, predictions, proposals): + """ + Args: + predictions: return values of :meth:`forward()`. + proposals (list[Instances]): proposals that match the features + that were used to compute predictions. + """ + scores, proposal_deltas = predictions + return FastRCNNOutputs( + self.box2box_transform, scores, proposal_deltas, proposals, self.smooth_l1_beta + ).losses() + + def inference(self, predictions, proposals): + """ + Returns: + list[Instances]: same as `fast_rcnn_inference`. + list[Tensor]: same as `fast_rcnn_inference`. + """ + boxes = self.predict_boxes(predictions, proposals) + scores = self.predict_probs(predictions, proposals) + image_shapes = [x.image_size for x in proposals] + return fast_rcnn_inference( + boxes, + scores, + image_shapes, + self.test_score_thresh, + self.test_nms_thresh, + self.test_topk_per_image, + ) + + def predict_boxes_for_gt_classes(self, predictions, proposals): + """ + Returns: + list[Tensor]: A list of Tensors of predicted boxes for GT classes in case of + class-specific box head. Element i of the list has shape (Ri, B), where Ri is + the number of predicted objects for image i and B is the box dimension (4 or 5) + """ + if not len(proposals): + return [] + scores, proposal_deltas = predictions + proposal_boxes = [p.proposal_boxes for p in proposals] + proposal_boxes = proposal_boxes[0].cat(proposal_boxes).tensor + N, B = proposal_boxes.shape + predict_boxes = apply_deltas_broadcast( + self.box2box_transform, proposal_deltas, proposal_boxes + ) # Nx(KxB) + + K = predict_boxes.shape[1] // B + if K > 1: + gt_classes = torch.cat([p.gt_classes for p in proposals], dim=0) + # Some proposals are ignored or have a background class. Their gt_classes + # cannot be used as index. + gt_classes = gt_classes.clamp_(0, K - 1) + + predict_boxes = predict_boxes.view(N, K, B)[ + torch.arange(N, dtype=torch.long, device=predict_boxes.device), gt_classes + ] + num_prop_per_image = [len(p) for p in proposals] + return predict_boxes.split(num_prop_per_image) + + def predict_boxes(self, predictions, proposals): + """ + Returns: + list[Tensor]: A list of Tensors of predicted class-specific or class-agnostic boxes + for each image. Element i has shape (Ri, K * B) or (Ri, B), where Ri is + the number of predicted objects for image i and B is the box dimension (4 or 5) + """ + if not len(proposals): + return [] + _, proposal_deltas = predictions + num_prop_per_image = [len(p) for p in proposals] + proposal_boxes = [p.proposal_boxes for p in proposals] + proposal_boxes = proposal_boxes[0].cat(proposal_boxes).tensor + predict_boxes = apply_deltas_broadcast( + self.box2box_transform, proposal_deltas, proposal_boxes + ) # Nx(KxB) + return predict_boxes.split(num_prop_per_image) + + def predict_probs(self, predictions, proposals): + """ + Returns: + list[Tensor]: A list of Tensors of predicted class probabilities for each image. + Element i has shape (Ri, K + 1), where Ri is the number of predicted objects + for image i. + """ + scores, _ = predictions + num_inst_per_image = [len(p) for p in proposals] + probs = F.softmax(scores, dim=-1) + return probs.split(num_inst_per_image, dim=0) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/keypoint_head.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/keypoint_head.py new file mode 100644 index 0000000000000000000000000000000000000000..c7990c8fd90c70c98d6b2e3f94935f571b957a79 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/keypoint_head.py @@ -0,0 +1,253 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from typing import List +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import configurable +from detectron2.layers import Conv2d, ConvTranspose2d, cat, interpolate +from detectron2.structures import Instances, heatmaps_to_keypoints +from detectron2.utils.events import get_event_storage +from detectron2.utils.registry import Registry + +_TOTAL_SKIPPED = 0 + +ROI_KEYPOINT_HEAD_REGISTRY = Registry("ROI_KEYPOINT_HEAD") +ROI_KEYPOINT_HEAD_REGISTRY.__doc__ = """ +Registry for keypoint heads, which make keypoint predictions from per-region features. + +The registered object will be called with `obj(cfg, input_shape)`. +""" + + +def build_keypoint_head(cfg, input_shape): + """ + Build a keypoint head from `cfg.MODEL.ROI_KEYPOINT_HEAD.NAME`. + """ + name = cfg.MODEL.ROI_KEYPOINT_HEAD.NAME + return ROI_KEYPOINT_HEAD_REGISTRY.get(name)(cfg, input_shape) + + +def keypoint_rcnn_loss(pred_keypoint_logits, instances, normalizer): + """ + Arguments: + pred_keypoint_logits (Tensor): A tensor of shape (N, K, S, S) where N is the total number + of instances in the batch, K is the number of keypoints, and S is the side length + of the keypoint heatmap. The values are spatial logits. + instances (list[Instances]): A list of M Instances, where M is the batch size. + These instances are predictions from the model + that are in 1:1 correspondence with pred_keypoint_logits. + Each Instances should contain a `gt_keypoints` field containing a `structures.Keypoint` + instance. + normalizer (float): Normalize the loss by this amount. + If not specified, we normalize by the number of visible keypoints in the minibatch. + + Returns a scalar tensor containing the loss. + """ + heatmaps = [] + valid = [] + + keypoint_side_len = pred_keypoint_logits.shape[2] + for instances_per_image in instances: + if len(instances_per_image) == 0: + continue + keypoints = instances_per_image.gt_keypoints + heatmaps_per_image, valid_per_image = keypoints.to_heatmap( + instances_per_image.proposal_boxes.tensor, keypoint_side_len + ) + heatmaps.append(heatmaps_per_image.view(-1)) + valid.append(valid_per_image.view(-1)) + + if len(heatmaps): + keypoint_targets = cat(heatmaps, dim=0) + valid = cat(valid, dim=0).to(dtype=torch.uint8) + valid = torch.nonzero(valid).squeeze(1) + + # torch.mean (in binary_cross_entropy_with_logits) doesn't + # accept empty tensors, so handle it separately + if len(heatmaps) == 0 or valid.numel() == 0: + global _TOTAL_SKIPPED + _TOTAL_SKIPPED += 1 + storage = get_event_storage() + storage.put_scalar("kpts_num_skipped_batches", _TOTAL_SKIPPED, smoothing_hint=False) + return pred_keypoint_logits.sum() * 0 + + N, K, H, W = pred_keypoint_logits.shape + pred_keypoint_logits = pred_keypoint_logits.view(N * K, H * W) + + keypoint_loss = F.cross_entropy( + pred_keypoint_logits[valid], keypoint_targets[valid], reduction="sum" + ) + + # If a normalizer isn't specified, normalize by the number of visible keypoints in the minibatch + if normalizer is None: + normalizer = valid.numel() + keypoint_loss /= normalizer + + return keypoint_loss + + +def keypoint_rcnn_inference(pred_keypoint_logits, pred_instances): + """ + Post process each predicted keypoint heatmap in `pred_keypoint_logits` into (x, y, score) + and add it to the `pred_instances` as a `pred_keypoints` field. + + Args: + pred_keypoint_logits (Tensor): A tensor of shape (R, K, S, S) where R is the total number + of instances in the batch, K is the number of keypoints, and S is the side length of + the keypoint heatmap. The values are spatial logits. + pred_instances (list[Instances]): A list of N Instances, where N is the number of images. + + Returns: + None. Each element in pred_instances will contain an extra "pred_keypoints" field. + The field is a tensor of shape (#instance, K, 3) where the last + dimension corresponds to (x, y, score). + The scores are larger than 0. + """ + # flatten all bboxes from all images together (list[Boxes] -> Rx4 tensor) + bboxes_flat = cat([b.pred_boxes.tensor for b in pred_instances], dim=0) + + keypoint_results = heatmaps_to_keypoints(pred_keypoint_logits.detach(), bboxes_flat.detach()) + num_instances_per_image = [len(i) for i in pred_instances] + keypoint_results = keypoint_results[:, :, [0, 1, 3]].split(num_instances_per_image, dim=0) + + for keypoint_results_per_image, instances_per_image in zip(keypoint_results, pred_instances): + # keypoint_results_per_image is (num instances)x(num keypoints)x(x, y, score) + instances_per_image.pred_keypoints = keypoint_results_per_image + + +class BaseKeypointRCNNHead(nn.Module): + """ + Implement the basic Keypoint R-CNN losses and inference logic described in :paper:`Mask R-CNN`. + """ + + @configurable + def __init__(self, *, num_keypoints, loss_weight=1.0, loss_normalizer=1.0): + """ + NOTE: this interface is experimental. + + Args: + num_keypoints (int): number of keypoints to predict + loss_weight (float): weight to multiple on the keypoint loss + loss_normalizer (float or str): + If float, divide the loss by `loss_normalizer * #images`. + If 'visible', the loss is normalized by the total number of + visible keypoints across images. + """ + super().__init__() + self.num_keypoints = num_keypoints + self.loss_weight = loss_weight + assert loss_normalizer == "visible" or isinstance(loss_normalizer, float), loss_normalizer + self.loss_normalizer = loss_normalizer + + @classmethod + def from_config(cls, cfg, input_shape): + ret = { + "loss_weight": cfg.MODEL.ROI_KEYPOINT_HEAD.LOSS_WEIGHT, + "num_keypoints": cfg.MODEL.ROI_KEYPOINT_HEAD.NUM_KEYPOINTS, + } + normalize_by_visible = ( + cfg.MODEL.ROI_KEYPOINT_HEAD.NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS + ) # noqa + if not normalize_by_visible: + batch_size_per_image = cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE + positive_sample_fraction = cfg.MODEL.ROI_HEADS.POSITIVE_FRACTION + ret["loss_normalizer"] = ( + ret["num_keypoints"] * batch_size_per_image * positive_sample_fraction + ) + else: + ret["loss_normalizer"] = "visible" + return ret + + def forward(self, x, instances: List[Instances]): + """ + Args: + x: input region feature(s) provided by :class:`ROIHeads`. + instances (list[Instances]): contains the boxes & labels corresponding + to the input features. + Exact format is up to its caller to decide. + Typically, this is the foreground instances in training, with + "proposal_boxes" field and other gt annotations. + In inference, it contains boxes that are already predicted. + + Returns: + A dict of losses if in training. The predicted "instances" if in inference. + """ + x = self.layers(x) + if self.training: + num_images = len(instances) + normalizer = ( + None if self.loss_normalizer == "visible" else num_images * self.loss_normalizer + ) + return { + "loss_keypoint": keypoint_rcnn_loss(x, instances, normalizer=normalizer) + * self.loss_weight + } + else: + keypoint_rcnn_inference(x, instances) + return instances + + def layers(self, x): + """ + Neural network layers that makes predictions from regional input features. + """ + raise NotImplementedError + + +@ROI_KEYPOINT_HEAD_REGISTRY.register() +class KRCNNConvDeconvUpsampleHead(BaseKeypointRCNNHead): + """ + A standard keypoint head containing a series of 3x3 convs, followed by + a transpose convolution and bilinear interpolation for upsampling. + """ + + @configurable + def __init__(self, input_shape, *, num_keypoints, conv_dims, **kwargs): + """ + NOTE: this interface is experimental. + + Args: + input_shape (ShapeSpec): shape of the input feature + conv_dims: an iterable of output channel counts for each conv in the head + e.g. (512, 512, 512) for three convs outputting 512 channels. + """ + super().__init__(num_keypoints=num_keypoints, **kwargs) + + # default up_scale to 2 (this can be made an option) + up_scale = 2 + in_channels = input_shape.channels + + self.blocks = [] + for idx, layer_channels in enumerate(conv_dims, 1): + module = Conv2d(in_channels, layer_channels, 3, stride=1, padding=1) + self.add_module("conv_fcn{}".format(idx), module) + self.blocks.append(module) + in_channels = layer_channels + + deconv_kernel = 4 + self.score_lowres = ConvTranspose2d( + in_channels, num_keypoints, deconv_kernel, stride=2, padding=deconv_kernel // 2 - 1 + ) + self.up_scale = up_scale + + for name, param in self.named_parameters(): + if "bias" in name: + nn.init.constant_(param, 0) + elif "weight" in name: + # Caffe2 implementation uses MSRAFill, which in fact + # corresponds to kaiming_normal_ in PyTorch + nn.init.kaiming_normal_(param, mode="fan_out", nonlinearity="relu") + + @classmethod + def from_config(cls, cfg, input_shape): + ret = super().from_config(cfg, input_shape) + ret["input_shape"] = input_shape + ret["conv_dims"] = cfg.MODEL.ROI_KEYPOINT_HEAD.CONV_DIMS + return ret + + def layers(self, x): + for layer in self.blocks: + x = F.relu(layer(x)) + x = self.score_lowres(x) + x = interpolate(x, scale_factor=self.up_scale, mode="bilinear", align_corners=False) + return x diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/mask_head.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/mask_head.py new file mode 100644 index 0000000000000000000000000000000000000000..5209722fb96b5e430bb5f30b3fce2b94b91f2b2e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/mask_head.py @@ -0,0 +1,277 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from typing import List +import fvcore.nn.weight_init as weight_init +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import configurable +from detectron2.layers import Conv2d, ConvTranspose2d, ShapeSpec, cat, get_norm +from detectron2.structures import Instances +from detectron2.utils.events import get_event_storage +from detectron2.utils.registry import Registry + +ROI_MASK_HEAD_REGISTRY = Registry("ROI_MASK_HEAD") +ROI_MASK_HEAD_REGISTRY.__doc__ = """ +Registry for mask heads, which predicts instance masks given +per-region features. + +The registered object will be called with `obj(cfg, input_shape)`. +""" + + +def mask_rcnn_loss(pred_mask_logits, instances, vis_period=0): + """ + Compute the mask prediction loss defined in the Mask R-CNN paper. + + Args: + pred_mask_logits (Tensor): A tensor of shape (B, C, Hmask, Wmask) or (B, 1, Hmask, Wmask) + for class-specific or class-agnostic, where B is the total number of predicted masks + in all images, C is the number of foreground classes, and Hmask, Wmask are the height + and width of the mask predictions. The values are logits. + instances (list[Instances]): A list of N Instances, where N is the number of images + in the batch. These instances are in 1:1 + correspondence with the pred_mask_logits. The ground-truth labels (class, box, mask, + ...) associated with each instance are stored in fields. + vis_period (int): the period (in steps) to dump visualization. + + Returns: + mask_loss (Tensor): A scalar tensor containing the loss. + """ + cls_agnostic_mask = pred_mask_logits.size(1) == 1 + total_num_masks = pred_mask_logits.size(0) + mask_side_len = pred_mask_logits.size(2) + assert pred_mask_logits.size(2) == pred_mask_logits.size(3), "Mask prediction must be square!" + + gt_classes = [] + gt_masks = [] + for instances_per_image in instances: + if len(instances_per_image) == 0: + continue + if not cls_agnostic_mask: + gt_classes_per_image = instances_per_image.gt_classes.to(dtype=torch.int64) + gt_classes.append(gt_classes_per_image) + + gt_masks_per_image = instances_per_image.gt_masks.crop_and_resize( + instances_per_image.proposal_boxes.tensor, mask_side_len + ).to(device=pred_mask_logits.device) + # A tensor of shape (N, M, M), N=#instances in the image; M=mask_side_len + gt_masks.append(gt_masks_per_image) + + if len(gt_masks) == 0: + return pred_mask_logits.sum() * 0 + + gt_masks = cat(gt_masks, dim=0) + + if cls_agnostic_mask: + pred_mask_logits = pred_mask_logits[:, 0] + else: + indices = torch.arange(total_num_masks) + gt_classes = cat(gt_classes, dim=0) + pred_mask_logits = pred_mask_logits[indices, gt_classes] + + if gt_masks.dtype == torch.bool: + gt_masks_bool = gt_masks + else: + # Here we allow gt_masks to be float as well (depend on the implementation of rasterize()) + gt_masks_bool = gt_masks > 0.5 + gt_masks = gt_masks.to(dtype=torch.float32) + + # Log the training accuracy (using gt classes and 0.5 threshold) + mask_incorrect = (pred_mask_logits > 0.0) != gt_masks_bool + mask_accuracy = 1 - (mask_incorrect.sum().item() / max(mask_incorrect.numel(), 1.0)) + num_positive = gt_masks_bool.sum().item() + false_positive = (mask_incorrect & ~gt_masks_bool).sum().item() / max( + gt_masks_bool.numel() - num_positive, 1.0 + ) + false_negative = (mask_incorrect & gt_masks_bool).sum().item() / max(num_positive, 1.0) + + storage = get_event_storage() + storage.put_scalar("mask_rcnn/accuracy", mask_accuracy) + storage.put_scalar("mask_rcnn/false_positive", false_positive) + storage.put_scalar("mask_rcnn/false_negative", false_negative) + if vis_period > 0 and storage.iter % vis_period == 0: + pred_masks = pred_mask_logits.sigmoid() + vis_masks = torch.cat([pred_masks, gt_masks], axis=2) + name = "Left: mask prediction; Right: mask GT" + for idx, vis_mask in enumerate(vis_masks): + vis_mask = torch.stack([vis_mask] * 3, axis=0) + storage.put_image(name + f" ({idx})", vis_mask) + + mask_loss = F.binary_cross_entropy_with_logits(pred_mask_logits, gt_masks, reduction="mean") + return mask_loss + + +def mask_rcnn_inference(pred_mask_logits, pred_instances): + """ + Convert pred_mask_logits to estimated foreground probability masks while also + extracting only the masks for the predicted classes in pred_instances. For each + predicted box, the mask of the same class is attached to the instance by adding a + new "pred_masks" field to pred_instances. + + Args: + pred_mask_logits (Tensor): A tensor of shape (B, C, Hmask, Wmask) or (B, 1, Hmask, Wmask) + for class-specific or class-agnostic, where B is the total number of predicted masks + in all images, C is the number of foreground classes, and Hmask, Wmask are the height + and width of the mask predictions. The values are logits. + pred_instances (list[Instances]): A list of N Instances, where N is the number of images + in the batch. Each Instances must have field "pred_classes". + + Returns: + None. pred_instances will contain an extra "pred_masks" field storing a mask of size (Hmask, + Wmask) for predicted class. Note that the masks are returned as a soft (non-quantized) + masks the resolution predicted by the network; post-processing steps, such as resizing + the predicted masks to the original image resolution and/or binarizing them, is left + to the caller. + """ + cls_agnostic_mask = pred_mask_logits.size(1) == 1 + + if cls_agnostic_mask: + mask_probs_pred = pred_mask_logits.sigmoid() + else: + # Select masks corresponding to the predicted classes + num_masks = pred_mask_logits.shape[0] + class_pred = cat([i.pred_classes for i in pred_instances]) + indices = torch.arange(num_masks, device=class_pred.device) + mask_probs_pred = pred_mask_logits[indices, class_pred][:, None].sigmoid() + # mask_probs_pred.shape: (B, 1, Hmask, Wmask) + + num_boxes_per_image = [len(i) for i in pred_instances] + mask_probs_pred = mask_probs_pred.split(num_boxes_per_image, dim=0) + + for prob, instances in zip(mask_probs_pred, pred_instances): + instances.pred_masks = prob # (1, Hmask, Wmask) + + +class BaseMaskRCNNHead(nn.Module): + """ + Implement the basic Mask R-CNN losses and inference logic described in :paper:`Mask R-CNN` + """ + + @configurable + def __init__(self, *, vis_period=0): + """ + NOTE: this interface is experimental. + + Args: + vis_period (int): visualization period + """ + super().__init__() + self.vis_period = vis_period + + @classmethod + def from_config(cls, cfg, input_shape): + return {"vis_period": cfg.VIS_PERIOD} + + def forward(self, x, instances: List[Instances]): + """ + Args: + x: input region feature(s) provided by :class:`ROIHeads`. + instances (list[Instances]): contains the boxes & labels corresponding + to the input features. + Exact format is up to its caller to decide. + Typically, this is the foreground instances in training, with + "proposal_boxes" field and other gt annotations. + In inference, it contains boxes that are already predicted. + + Returns: + A dict of losses in training. The predicted "instances" in inference. + """ + x = self.layers(x) + if self.training: + return {"loss_mask": mask_rcnn_loss(x, instances, self.vis_period)} + else: + mask_rcnn_inference(x, instances) + return instances + + def layers(self, x): + """ + Neural network layers that makes predictions from input features. + """ + raise NotImplementedError + + +@ROI_MASK_HEAD_REGISTRY.register() +class MaskRCNNConvUpsampleHead(BaseMaskRCNNHead): + """ + A mask head with several conv layers, plus an upsample layer (with `ConvTranspose2d`). + Predictions are made with a final 1x1 conv layer. + """ + + @configurable + def __init__(self, input_shape: ShapeSpec, *, num_classes, conv_dims, conv_norm="", **kwargs): + """ + NOTE: this interface is experimental. + + Args: + input_shape (ShapeSpec): shape of the input feature + num_classes (int): the number of classes. 1 if using class agnostic prediction. + conv_dims (list[int]): a list of N>0 integers representing the output dimensions + of N-1 conv layers and the last upsample layer. + conv_norm (str or callable): normalization for the conv layers. + See :func:`detectron2.layers.get_norm` for supported types. + """ + super().__init__(**kwargs) + assert len(conv_dims) >= 1, "conv_dims have to be non-empty!" + + self.conv_norm_relus = [] + + cur_channels = input_shape.channels + for k, conv_dim in enumerate(conv_dims[:-1]): + conv = Conv2d( + cur_channels, + conv_dim, + kernel_size=3, + stride=1, + padding=1, + bias=not conv_norm, + norm=get_norm(conv_norm, conv_dim), + activation=F.relu, + ) + self.add_module("mask_fcn{}".format(k + 1), conv) + self.conv_norm_relus.append(conv) + cur_channels = conv_dim + + self.deconv = ConvTranspose2d( + cur_channels, conv_dims[-1], kernel_size=2, stride=2, padding=0 + ) + cur_channels = conv_dims[-1] + + self.predictor = Conv2d(cur_channels, num_classes, kernel_size=1, stride=1, padding=0) + + for layer in self.conv_norm_relus + [self.deconv]: + weight_init.c2_msra_fill(layer) + # use normal distribution initialization for mask prediction layer + nn.init.normal_(self.predictor.weight, std=0.001) + if self.predictor.bias is not None: + nn.init.constant_(self.predictor.bias, 0) + + @classmethod + def from_config(cls, cfg, input_shape): + ret = super().from_config(cfg, input_shape) + conv_dim = cfg.MODEL.ROI_MASK_HEAD.CONV_DIM + num_conv = cfg.MODEL.ROI_MASK_HEAD.NUM_CONV + ret.update( + conv_dims=[conv_dim] * (num_conv + 1), # +1 for ConvTranspose + conv_norm=cfg.MODEL.ROI_MASK_HEAD.NORM, + input_shape=input_shape, + ) + if cfg.MODEL.ROI_MASK_HEAD.CLS_AGNOSTIC_MASK: + ret["num_classes"] = 1 + else: + ret["num_classes"] = cfg.MODEL.ROI_HEADS.NUM_CLASSES + return ret + + def layers(self, x): + for layer in self.conv_norm_relus: + x = layer(x) + x = F.relu(self.deconv(x)) + return self.predictor(x) + + +def build_mask_head(cfg, input_shape): + """ + Build a mask head defined by `cfg.MODEL.ROI_MASK_HEAD.NAME`. + """ + name = cfg.MODEL.ROI_MASK_HEAD.NAME + return ROI_MASK_HEAD_REGISTRY.get(name)(cfg, input_shape) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/roi_heads.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/roi_heads.py new file mode 100644 index 0000000000000000000000000000000000000000..f35588e474a1c3d938e5a3b2b8a8ae5e88006215 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/roi_heads.py @@ -0,0 +1,812 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import inspect +import logging +import numpy as np +from typing import Dict, List, Optional, Tuple, Union +import torch +from torch import nn + +from detectron2.config import configurable +from detectron2.layers import ShapeSpec +from detectron2.structures import Boxes, ImageList, Instances, pairwise_iou +from detectron2.utils.events import get_event_storage +from detectron2.utils.registry import Registry + +from ..backbone.resnet import BottleneckBlock, make_stage +from ..matcher import Matcher +from ..poolers import ROIPooler +from ..proposal_generator.proposal_utils import add_ground_truth_to_proposals +from ..sampling import subsample_labels +from .box_head import build_box_head +from .fast_rcnn import FastRCNNOutputLayers +from .keypoint_head import build_keypoint_head +from .mask_head import build_mask_head + +ROI_HEADS_REGISTRY = Registry("ROI_HEADS") +ROI_HEADS_REGISTRY.__doc__ = """ +Registry for ROI heads in a generalized R-CNN model. +ROIHeads take feature maps and region proposals, and +perform per-region computation. + +The registered object will be called with `obj(cfg, input_shape)`. +The call is expected to return an :class:`ROIHeads`. +""" + +logger = logging.getLogger(__name__) + + +def build_roi_heads(cfg, input_shape): + """ + Build ROIHeads defined by `cfg.MODEL.ROI_HEADS.NAME`. + """ + name = cfg.MODEL.ROI_HEADS.NAME + return ROI_HEADS_REGISTRY.get(name)(cfg, input_shape) + + +def select_foreground_proposals( + proposals: List[Instances], bg_label: int +) -> Tuple[List[Instances], List[torch.Tensor]]: + """ + Given a list of N Instances (for N images), each containing a `gt_classes` field, + return a list of Instances that contain only instances with `gt_classes != -1 && + gt_classes != bg_label`. + + Args: + proposals (list[Instances]): A list of N Instances, where N is the number of + images in the batch. + bg_label: label index of background class. + + Returns: + list[Instances]: N Instances, each contains only the selected foreground instances. + list[Tensor]: N boolean vector, correspond to the selection mask of + each Instances object. True for selected instances. + """ + assert isinstance(proposals, (list, tuple)) + assert isinstance(proposals[0], Instances) + assert proposals[0].has("gt_classes") + fg_proposals = [] + fg_selection_masks = [] + for proposals_per_image in proposals: + gt_classes = proposals_per_image.gt_classes + fg_selection_mask = (gt_classes != -1) & (gt_classes != bg_label) + fg_idxs = fg_selection_mask.nonzero().squeeze(1) + fg_proposals.append(proposals_per_image[fg_idxs]) + fg_selection_masks.append(fg_selection_mask) + return fg_proposals, fg_selection_masks + + +def select_proposals_with_visible_keypoints(proposals: List[Instances]) -> List[Instances]: + """ + Args: + proposals (list[Instances]): a list of N Instances, where N is the + number of images. + + Returns: + proposals: only contains proposals with at least one visible keypoint. + + Note that this is still slightly different from Detectron. + In Detectron, proposals for training keypoint head are re-sampled from + all the proposals with IOU>threshold & >=1 visible keypoint. + + Here, the proposals are first sampled from all proposals with + IOU>threshold, then proposals with no visible keypoint are filtered out. + This strategy seems to make no difference on Detectron and is easier to implement. + """ + ret = [] + all_num_fg = [] + for proposals_per_image in proposals: + # If empty/unannotated image (hard negatives), skip filtering for train + if len(proposals_per_image) == 0: + ret.append(proposals_per_image) + continue + gt_keypoints = proposals_per_image.gt_keypoints.tensor + # #fg x K x 3 + vis_mask = gt_keypoints[:, :, 2] >= 1 + xs, ys = gt_keypoints[:, :, 0], gt_keypoints[:, :, 1] + proposal_boxes = proposals_per_image.proposal_boxes.tensor.unsqueeze(dim=1) # #fg x 1 x 4 + kp_in_box = ( + (xs >= proposal_boxes[:, :, 0]) + & (xs <= proposal_boxes[:, :, 2]) + & (ys >= proposal_boxes[:, :, 1]) + & (ys <= proposal_boxes[:, :, 3]) + ) + selection = (kp_in_box & vis_mask).any(dim=1) + selection_idxs = torch.nonzero(selection, as_tuple=True)[0] + all_num_fg.append(selection_idxs.numel()) + ret.append(proposals_per_image[selection_idxs]) + + storage = get_event_storage() + storage.put_scalar("keypoint_head/num_fg_samples", np.mean(all_num_fg)) + return ret + + +class ROIHeads(torch.nn.Module): + """ + ROIHeads perform all per-region computation in an R-CNN. + + It typically contains logic to + 1. (in training only) match proposals with ground truth and sample them + 2. crop the regions and extract per-region features using proposals + 3. make per-region predictions with different heads + + It can have many variants, implemented as subclasses of this class. + This base class contains the logic to match/sample proposals. + But it is not necessary to inherit this class if the sampling logic is not needed. + """ + + @configurable + def __init__( + self, + *, + num_classes, + batch_size_per_image, + positive_sample_fraction, + proposal_matcher, + proposal_append_gt=True + ): + """ + NOTE: this interface is experimental. + + Args: + num_classes (int): number of classes. Used to label background proposals. + batch_size_per_image (int): number of proposals to use for training + positive_sample_fraction (float): fraction of positive (foreground) proposals + to use for training. + proposal_matcher (Matcher): matcher that matches proposals and ground truth + proposal_append_gt (bool): whether to include ground truth as proposals as well + """ + super().__init__() + self.batch_size_per_image = batch_size_per_image + self.positive_sample_fraction = positive_sample_fraction + self.num_classes = num_classes + self.proposal_matcher = proposal_matcher + self.proposal_append_gt = proposal_append_gt + + @classmethod + def from_config(cls, cfg): + return { + "batch_size_per_image": cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE, + "positive_sample_fraction": cfg.MODEL.ROI_HEADS.POSITIVE_FRACTION, + "num_classes": cfg.MODEL.ROI_HEADS.NUM_CLASSES, + "proposal_append_gt": cfg.MODEL.ROI_HEADS.PROPOSAL_APPEND_GT, + # Matcher to assign box proposals to gt boxes + "proposal_matcher": Matcher( + cfg.MODEL.ROI_HEADS.IOU_THRESHOLDS, + cfg.MODEL.ROI_HEADS.IOU_LABELS, + allow_low_quality_matches=False, + ), + } + + def _sample_proposals( + self, matched_idxs: torch.Tensor, matched_labels: torch.Tensor, gt_classes: torch.Tensor + ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Based on the matching between N proposals and M groundtruth, + sample the proposals and set their classification labels. + + Args: + matched_idxs (Tensor): a vector of length N, each is the best-matched + gt index in [0, M) for each proposal. + matched_labels (Tensor): a vector of length N, the matcher's label + (one of cfg.MODEL.ROI_HEADS.IOU_LABELS) for each proposal. + gt_classes (Tensor): a vector of length M. + + Returns: + Tensor: a vector of indices of sampled proposals. Each is in [0, N). + Tensor: a vector of the same length, the classification label for + each sampled proposal. Each sample is labeled as either a category in + [0, num_classes) or the background (num_classes). + """ + has_gt = gt_classes.numel() > 0 + # Get the corresponding GT for each proposal + if has_gt: + gt_classes = gt_classes[matched_idxs] + # Label unmatched proposals (0 label from matcher) as background (label=num_classes) + gt_classes[matched_labels == 0] = self.num_classes + # Label ignore proposals (-1 label) + gt_classes[matched_labels == -1] = -1 + else: + gt_classes = torch.zeros_like(matched_idxs) + self.num_classes + + sampled_fg_idxs, sampled_bg_idxs = subsample_labels( + gt_classes, self.batch_size_per_image, self.positive_sample_fraction, self.num_classes + ) + + sampled_idxs = torch.cat([sampled_fg_idxs, sampled_bg_idxs], dim=0) + return sampled_idxs, gt_classes[sampled_idxs] + + @torch.no_grad() + def label_and_sample_proposals( + self, proposals: List[Instances], targets: List[Instances] + ) -> List[Instances]: + """ + Prepare some proposals to be used to train the ROI heads. + It performs box matching between `proposals` and `targets`, and assigns + training labels to the proposals. + It returns ``self.batch_size_per_image`` random samples from proposals and groundtruth + boxes, with a fraction of positives that is no larger than + ``self.positive_sample_fraction``. + + Args: + See :meth:`ROIHeads.forward` + + Returns: + list[Instances]: + length `N` list of `Instances`s containing the proposals + sampled for training. Each `Instances` has the following fields: + + - proposal_boxes: the proposal boxes + - gt_boxes: the ground-truth box that the proposal is assigned to + (this is only meaningful if the proposal has a label > 0; if label = 0 + then the ground-truth box is random) + + Other fields such as "gt_classes", "gt_masks", that's included in `targets`. + """ + gt_boxes = [x.gt_boxes for x in targets] + # Augment proposals with ground-truth boxes. + # In the case of learned proposals (e.g., RPN), when training starts + # the proposals will be low quality due to random initialization. + # It's possible that none of these initial + # proposals have high enough overlap with the gt objects to be used + # as positive examples for the second stage components (box head, + # cls head, mask head). Adding the gt boxes to the set of proposals + # ensures that the second stage components will have some positive + # examples from the start of training. For RPN, this augmentation improves + # convergence and empirically improves box AP on COCO by about 0.5 + # points (under one tested configuration). + if self.proposal_append_gt: + proposals = add_ground_truth_to_proposals(gt_boxes, proposals) + + proposals_with_gt = [] + + num_fg_samples = [] + num_bg_samples = [] + for proposals_per_image, targets_per_image in zip(proposals, targets): + has_gt = len(targets_per_image) > 0 + match_quality_matrix = pairwise_iou( + targets_per_image.gt_boxes, proposals_per_image.proposal_boxes + ) + matched_idxs, matched_labels = self.proposal_matcher(match_quality_matrix) + sampled_idxs, gt_classes = self._sample_proposals( + matched_idxs, matched_labels, targets_per_image.gt_classes + ) + + # Set target attributes of the sampled proposals: + proposals_per_image = proposals_per_image[sampled_idxs] + proposals_per_image.gt_classes = gt_classes + + # We index all the attributes of targets that start with "gt_" + # and have not been added to proposals yet (="gt_classes"). + if has_gt: + sampled_targets = matched_idxs[sampled_idxs] + # NOTE: here the indexing waste some compute, because heads + # like masks, keypoints, etc, will filter the proposals again, + # (by foreground/background, or number of keypoints in the image, etc) + # so we essentially index the data twice. + for (trg_name, trg_value) in targets_per_image.get_fields().items(): + if trg_name.startswith("gt_") and not proposals_per_image.has(trg_name): + proposals_per_image.set(trg_name, trg_value[sampled_targets]) + else: + gt_boxes = Boxes( + targets_per_image.gt_boxes.tensor.new_zeros((len(sampled_idxs), 4)) + ) + proposals_per_image.gt_boxes = gt_boxes + + num_bg_samples.append((gt_classes == self.num_classes).sum().item()) + num_fg_samples.append(gt_classes.numel() - num_bg_samples[-1]) + proposals_with_gt.append(proposals_per_image) + + # Log the number of fg/bg samples that are selected for training ROI heads + storage = get_event_storage() + storage.put_scalar("roi_head/num_fg_samples", np.mean(num_fg_samples)) + storage.put_scalar("roi_head/num_bg_samples", np.mean(num_bg_samples)) + + return proposals_with_gt + + def forward( + self, + images: ImageList, + features: Dict[str, torch.Tensor], + proposals: List[Instances], + targets: Optional[List[Instances]] = None, + ) -> Tuple[List[Instances], Dict[str, torch.Tensor]]: + """ + Args: + images (ImageList): + features (dict[str,Tensor]): input data as a mapping from feature + map name to tensor. Axis 0 represents the number of images `N` in + the input data; axes 1-3 are channels, height, and width, which may + vary between feature maps (e.g., if a feature pyramid is used). + proposals (list[Instances]): length `N` list of `Instances`. The i-th + `Instances` contains object proposals for the i-th input image, + with fields "proposal_boxes" and "objectness_logits". + targets (list[Instances], optional): length `N` list of `Instances`. The i-th + `Instances` contains the ground-truth per-instance annotations + for the i-th input image. Specify `targets` during training only. + It may have the following fields: + + - gt_boxes: the bounding box of each instance. + - gt_classes: the label for each instance with a category ranging in [0, #class]. + - gt_masks: PolygonMasks or BitMasks, the ground-truth masks of each instance. + - gt_keypoints: NxKx3, the groud-truth keypoints for each instance. + + Returns: + list[Instances]: length `N` list of `Instances` containing the + detected instances. Returned during inference only; may be [] during training. + + dict[str->Tensor]: + mapping from a named loss to a tensor storing the loss. Used during training only. + """ + raise NotImplementedError() + + +@ROI_HEADS_REGISTRY.register() +class Res5ROIHeads(ROIHeads): + """ + The ROIHeads in a typical "C4" R-CNN model, where + the box and mask head share the cropping and + the per-region feature computation by a Res5 block. + """ + + def __init__(self, cfg, input_shape): + super().__init__(cfg) + + # fmt: off + self.in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES + pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION + pooler_type = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE + pooler_scales = (1.0 / input_shape[self.in_features[0]].stride, ) + sampling_ratio = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO + self.mask_on = cfg.MODEL.MASK_ON + # fmt: on + assert not cfg.MODEL.KEYPOINT_ON + assert len(self.in_features) == 1 + + self.pooler = ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type=pooler_type, + ) + + self.res5, out_channels = self._build_res5_block(cfg) + self.box_predictor = FastRCNNOutputLayers( + cfg, ShapeSpec(channels=out_channels, height=1, width=1) + ) + + if self.mask_on: + self.mask_head = build_mask_head( + cfg, + ShapeSpec(channels=out_channels, width=pooler_resolution, height=pooler_resolution), + ) + + def _build_res5_block(self, cfg): + # fmt: off + stage_channel_factor = 2 ** 3 # res5 is 8x res2 + num_groups = cfg.MODEL.RESNETS.NUM_GROUPS + width_per_group = cfg.MODEL.RESNETS.WIDTH_PER_GROUP + bottleneck_channels = num_groups * width_per_group * stage_channel_factor + out_channels = cfg.MODEL.RESNETS.RES2_OUT_CHANNELS * stage_channel_factor + stride_in_1x1 = cfg.MODEL.RESNETS.STRIDE_IN_1X1 + norm = cfg.MODEL.RESNETS.NORM + assert not cfg.MODEL.RESNETS.DEFORM_ON_PER_STAGE[-1], \ + "Deformable conv is not yet supported in res5 head." + # fmt: on + + blocks = make_stage( + BottleneckBlock, + 3, + first_stride=2, + in_channels=out_channels // 2, + bottleneck_channels=bottleneck_channels, + out_channels=out_channels, + num_groups=num_groups, + norm=norm, + stride_in_1x1=stride_in_1x1, + ) + return nn.Sequential(*blocks), out_channels + + def _shared_roi_transform(self, features, boxes): + x = self.pooler(features, boxes) + return self.res5(x) + + def forward(self, images, features, proposals, targets=None): + """ + See :meth:`ROIHeads.forward`. + """ + del images + + if self.training: + assert targets + proposals = self.label_and_sample_proposals(proposals, targets) + del targets + + proposal_boxes = [x.proposal_boxes for x in proposals] + box_features = self._shared_roi_transform( + [features[f] for f in self.in_features], proposal_boxes + ) + predictions = self.box_predictor(box_features.mean(dim=[2, 3])) + + if self.training: + del features + losses = self.box_predictor.losses(predictions, proposals) + if self.mask_on: + proposals, fg_selection_masks = select_foreground_proposals( + proposals, self.num_classes + ) + # Since the ROI feature transform is shared between boxes and masks, + # we don't need to recompute features. The mask loss is only defined + # on foreground proposals, so we need to select out the foreground + # features. + mask_features = box_features[torch.cat(fg_selection_masks, dim=0)] + del box_features + losses.update(self.mask_head(mask_features, proposals)) + return [], losses + else: + pred_instances, _ = self.box_predictor.inference(predictions, proposals) + pred_instances = self.forward_with_given_boxes(features, pred_instances) + return pred_instances, {} + + def forward_with_given_boxes(self, features, instances): + """ + Use the given boxes in `instances` to produce other (non-box) per-ROI outputs. + + Args: + features: same as in `forward()` + instances (list[Instances]): instances to predict other outputs. Expect the keys + "pred_boxes" and "pred_classes" to exist. + + Returns: + instances (Instances): + the same `Instances` object, with extra + fields such as `pred_masks` or `pred_keypoints`. + """ + assert not self.training + assert instances[0].has("pred_boxes") and instances[0].has("pred_classes") + + if self.mask_on: + features = [features[f] for f in self.in_features] + x = self._shared_roi_transform(features, [x.pred_boxes for x in instances]) + return self.mask_head(x, instances) + else: + return instances + + +@ROI_HEADS_REGISTRY.register() +class StandardROIHeads(ROIHeads): + """ + It's "standard" in a sense that there is no ROI transform sharing + or feature sharing between tasks. + Each head independently processes the input features by each head's + own pooler and head. + + This class is used by most models, such as FPN and C5. + To implement more models, you can subclass it and implement a different + :meth:`forward()` or a head. + """ + + @configurable + def __init__( + self, + *, + box_in_features: List[str], + box_pooler: ROIPooler, + box_head: nn.Module, + box_predictor: nn.Module, + mask_in_features: Optional[List[str]] = None, + mask_pooler: Optional[ROIPooler] = None, + mask_head: Optional[nn.Module] = None, + keypoint_in_features: Optional[List[str]] = None, + keypoint_pooler: Optional[ROIPooler] = None, + keypoint_head: Optional[nn.Module] = None, + train_on_pred_boxes: bool = False, + **kwargs + ): + """ + NOTE: this interface is experimental. + + Args: + box_in_features (list[str]): list of feature names to use for the box head. + box_pooler (ROIPooler): pooler to extra region features for box head + box_head (nn.Module): transform features to make box predictions + box_predictor (nn.Module): make box predictions from the feature. + Should have the same interface as :class:`FastRCNNOutputLayers`. + mask_in_features (list[str]): list of feature names to use for the mask head. + None if not using mask head. + mask_pooler (ROIPooler): pooler to extra region features for mask head + mask_head (nn.Module): transform features to make mask predictions + keypoint_in_features, keypoint_pooler, keypoint_head: similar to ``mask*``. + train_on_pred_boxes (bool): whether to use proposal boxes or + predicted boxes from the box head to train other heads. + """ + super().__init__(**kwargs) + # keep self.in_features for backward compatibility + self.in_features = self.box_in_features = box_in_features + self.box_pooler = box_pooler + self.box_head = box_head + self.box_predictor = box_predictor + + self.mask_on = mask_in_features is not None + if self.mask_on: + self.mask_in_features = mask_in_features + self.mask_pooler = mask_pooler + self.mask_head = mask_head + self.keypoint_on = keypoint_in_features is not None + if self.keypoint_on: + self.keypoint_in_features = keypoint_in_features + self.keypoint_pooler = keypoint_pooler + self.keypoint_head = keypoint_head + + self.train_on_pred_boxes = train_on_pred_boxes + + @classmethod + def from_config(cls, cfg, input_shape): + ret = super().from_config(cfg) + ret["train_on_pred_boxes"] = cfg.MODEL.ROI_BOX_HEAD.TRAIN_ON_PRED_BOXES + # Subclasses that have not been updated to use from_config style construction + # may have overridden _init_*_head methods. In this case, those overridden methods + # will not be classmethods and we need to avoid trying to call them here. + # We test for this with ismethod which only returns True for bound methods of cls. + # Such subclasses will need to handle calling their overridden _init_*_head methods. + if inspect.ismethod(cls._init_box_head): + ret.update(cls._init_box_head(cfg, input_shape)) + if inspect.ismethod(cls._init_mask_head): + ret.update(cls._init_mask_head(cfg, input_shape)) + if inspect.ismethod(cls._init_keypoint_head): + ret.update(cls._init_keypoint_head(cfg, input_shape)) + return ret + + @classmethod + def _init_box_head(cls, cfg, input_shape): + # fmt: off + in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES + pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION + pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features) + sampling_ratio = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO + pooler_type = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE + # fmt: on + + # If StandardROIHeads is applied on multiple feature maps (as in FPN), + # then we share the same predictors and therefore the channel counts must be the same + in_channels = [input_shape[f].channels for f in in_features] + # Check all channel counts are equal + assert len(set(in_channels)) == 1, in_channels + in_channels = in_channels[0] + + box_pooler = ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type=pooler_type, + ) + # Here we split "box head" and "box predictor", which is mainly due to historical reasons. + # They are used together so the "box predictor" layers should be part of the "box head". + # New subclasses of ROIHeads do not need "box predictor"s. + box_head = build_box_head( + cfg, ShapeSpec(channels=in_channels, height=pooler_resolution, width=pooler_resolution) + ) + box_predictor = FastRCNNOutputLayers(cfg, box_head.output_shape) + return { + "box_in_features": in_features, + "box_pooler": box_pooler, + "box_head": box_head, + "box_predictor": box_predictor, + } + + @classmethod + def _init_mask_head(cls, cfg, input_shape): + if not cfg.MODEL.MASK_ON: + return {} + # fmt: off + in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES + pooler_resolution = cfg.MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION + pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features) + sampling_ratio = cfg.MODEL.ROI_MASK_HEAD.POOLER_SAMPLING_RATIO + pooler_type = cfg.MODEL.ROI_MASK_HEAD.POOLER_TYPE + # fmt: on + + in_channels = [input_shape[f].channels for f in in_features][0] + + ret = {"mask_in_features": in_features} + ret["mask_pooler"] = ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type=pooler_type, + ) + ret["mask_head"] = build_mask_head( + cfg, ShapeSpec(channels=in_channels, width=pooler_resolution, height=pooler_resolution) + ) + return ret + + @classmethod + def _init_keypoint_head(cls, cfg, input_shape): + if not cfg.MODEL.KEYPOINT_ON: + return {} + # fmt: off + in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES + pooler_resolution = cfg.MODEL.ROI_KEYPOINT_HEAD.POOLER_RESOLUTION + pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features) # noqa + sampling_ratio = cfg.MODEL.ROI_KEYPOINT_HEAD.POOLER_SAMPLING_RATIO + pooler_type = cfg.MODEL.ROI_KEYPOINT_HEAD.POOLER_TYPE + # fmt: on + + in_channels = [input_shape[f].channels for f in in_features][0] + + ret = {"keypoint_in_features": in_features} + ret["keypoint_pooler"] = ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type=pooler_type, + ) + ret["keypoint_head"] = build_keypoint_head( + cfg, ShapeSpec(channels=in_channels, width=pooler_resolution, height=pooler_resolution) + ) + return ret + + def forward( + self, + images: ImageList, + features: Dict[str, torch.Tensor], + proposals: List[Instances], + targets: Optional[List[Instances]] = None, + ) -> Tuple[List[Instances], Dict[str, torch.Tensor]]: + """ + See :class:`ROIHeads.forward`. + """ + del images + if self.training: + assert targets + proposals = self.label_and_sample_proposals(proposals, targets) + del targets + + if self.training: + losses = self._forward_box(features, proposals) + # Usually the original proposals used by the box head are used by the mask, keypoint + # heads. But when `self.train_on_pred_boxes is True`, proposals will contain boxes + # predicted by the box head. + losses.update(self._forward_mask(features, proposals)) + losses.update(self._forward_keypoint(features, proposals)) + return proposals, losses + else: + pred_instances = self._forward_box(features, proposals) + # During inference cascaded prediction is used: the mask and keypoints heads are only + # applied to the top scoring box detections. + pred_instances = self.forward_with_given_boxes(features, pred_instances) + return pred_instances, {} + + def forward_with_given_boxes( + self, features: Dict[str, torch.Tensor], instances: List[Instances] + ) -> List[Instances]: + """ + Use the given boxes in `instances` to produce other (non-box) per-ROI outputs. + + This is useful for downstream tasks where a box is known, but need to obtain + other attributes (outputs of other heads). + Test-time augmentation also uses this. + + Args: + features: same as in `forward()` + instances (list[Instances]): instances to predict other outputs. Expect the keys + "pred_boxes" and "pred_classes" to exist. + + Returns: + instances (list[Instances]): + the same `Instances` objects, with extra + fields such as `pred_masks` or `pred_keypoints`. + """ + assert not self.training + assert instances[0].has("pred_boxes") and instances[0].has("pred_classes") + + instances = self._forward_mask(features, instances) + instances = self._forward_keypoint(features, instances) + return instances + + def _forward_box( + self, features: Dict[str, torch.Tensor], proposals: List[Instances] + ) -> Union[Dict[str, torch.Tensor], List[Instances]]: + """ + Forward logic of the box prediction branch. If `self.train_on_pred_boxes is True`, + the function puts predicted boxes in the `proposal_boxes` field of `proposals` argument. + + Args: + features (dict[str, Tensor]): mapping from feature map names to tensor. + Same as in :meth:`ROIHeads.forward`. + proposals (list[Instances]): the per-image object proposals with + their matching ground truth. + Each has fields "proposal_boxes", and "objectness_logits", + "gt_classes", "gt_boxes". + + Returns: + In training, a dict of losses. + In inference, a list of `Instances`, the predicted instances. + """ + features = [features[f] for f in self.box_in_features] + box_features = self.box_pooler(features, [x.proposal_boxes for x in proposals]) + box_features = self.box_head(box_features) + predictions = self.box_predictor(box_features) + del box_features + + if self.training: + losses = self.box_predictor.losses(predictions, proposals) + # proposals is modified in-place below, so losses must be computed first. + if self.train_on_pred_boxes: + with torch.no_grad(): + pred_boxes = self.box_predictor.predict_boxes_for_gt_classes( + predictions, proposals + ) + for proposals_per_image, pred_boxes_per_image in zip(proposals, pred_boxes): + proposals_per_image.proposal_boxes = Boxes(pred_boxes_per_image) + return losses + else: + pred_instances, _ = self.box_predictor.inference(predictions, proposals) + return pred_instances + + def _forward_mask( + self, features: Dict[str, torch.Tensor], instances: List[Instances] + ) -> Union[Dict[str, torch.Tensor], List[Instances]]: + """ + Forward logic of the mask prediction branch. + + Args: + features (dict[str, Tensor]): mapping from feature map names to tensor. + Same as in :meth:`ROIHeads.forward`. + instances (list[Instances]): the per-image instances to train/predict masks. + In training, they can be the proposals. + In inference, they can be the predicted boxes. + + Returns: + In training, a dict of losses. + In inference, update `instances` with new fields "pred_masks" and return it. + """ + if not self.mask_on: + return {} if self.training else instances + + features = [features[f] for f in self.mask_in_features] + + if self.training: + # The loss is only defined on positive proposals. + proposals, _ = select_foreground_proposals(instances, self.num_classes) + proposal_boxes = [x.proposal_boxes for x in proposals] + mask_features = self.mask_pooler(features, proposal_boxes) + return self.mask_head(mask_features, proposals) + else: + pred_boxes = [x.pred_boxes for x in instances] + mask_features = self.mask_pooler(features, pred_boxes) + return self.mask_head(mask_features, instances) + + def _forward_keypoint( + self, features: Dict[str, torch.Tensor], instances: List[Instances] + ) -> Union[Dict[str, torch.Tensor], List[Instances]]: + """ + Forward logic of the keypoint prediction branch. + + Args: + features (dict[str, Tensor]): mapping from feature map names to tensor. + Same as in :meth:`ROIHeads.forward`. + instances (list[Instances]): the per-image instances to train/predict keypoints. + In training, they can be the proposals. + In inference, they can be the predicted boxes. + + Returns: + In training, a dict of losses. + In inference, update `instances` with new fields "pred_keypoints" and return it. + """ + if not self.keypoint_on: + return {} if self.training else instances + + features = [features[f] for f in self.keypoint_in_features] + + if self.training: + # The loss is defined on positive proposals with >=1 visible keypoints. + proposals, _ = select_foreground_proposals(instances, self.num_classes) + proposals = select_proposals_with_visible_keypoints(proposals) + proposal_boxes = [x.proposal_boxes for x in proposals] + + keypoint_features = self.keypoint_pooler(features, proposal_boxes) + return self.keypoint_head(keypoint_features, proposals) + else: + pred_boxes = [x.pred_boxes for x in instances] + keypoint_features = self.keypoint_pooler(features, pred_boxes) + return self.keypoint_head(keypoint_features, instances) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/rotated_fast_rcnn.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/rotated_fast_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..3d7362d93f9be8d3838c477406540603e81ee0be --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/roi_heads/rotated_fast_rcnn.py @@ -0,0 +1,276 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import numpy as np +import torch + +from detectron2.config import configurable +from detectron2.layers import ShapeSpec, batched_nms_rotated +from detectron2.structures import Instances, RotatedBoxes, pairwise_iou_rotated +from detectron2.utils.events import get_event_storage + +from ..box_regression import Box2BoxTransformRotated +from ..poolers import ROIPooler +from ..proposal_generator.proposal_utils import add_ground_truth_to_proposals +from .box_head import build_box_head +from .fast_rcnn import FastRCNNOutputLayers +from .roi_heads import ROI_HEADS_REGISTRY, StandardROIHeads + +logger = logging.getLogger(__name__) + +""" +Shape shorthand in this module: + + N: number of images in the minibatch + R: number of ROIs, combined over all images, in the minibatch + Ri: number of ROIs in image i + K: number of foreground classes. E.g.,there are 80 foreground classes in COCO. + +Naming convention: + + deltas: refers to the 5-d (dx, dy, dw, dh, da) deltas that parameterize the box2box + transform (see :class:`box_regression.Box2BoxTransformRotated`). + + pred_class_logits: predicted class scores in [-inf, +inf]; use + softmax(pred_class_logits) to estimate P(class). + + gt_classes: ground-truth classification labels in [0, K], where [0, K) represent + foreground object classes and K represents the background class. + + pred_proposal_deltas: predicted rotated box2box transform deltas for transforming proposals + to detection box predictions. + + gt_proposal_deltas: ground-truth rotated box2box transform deltas +""" + + +def fast_rcnn_inference_rotated( + boxes, scores, image_shapes, score_thresh, nms_thresh, topk_per_image +): + """ + Call `fast_rcnn_inference_single_image_rotated` for all images. + + Args: + boxes (list[Tensor]): A list of Tensors of predicted class-specific or class-agnostic + boxes for each image. Element i has shape (Ri, K * 5) if doing + class-specific regression, or (Ri, 5) if doing class-agnostic + regression, where Ri is the number of predicted objects for image i. + This is compatible with the output of :meth:`FastRCNNOutputs.predict_boxes`. + scores (list[Tensor]): A list of Tensors of predicted class scores for each image. + Element i has shape (Ri, K + 1), where Ri is the number of predicted objects + for image i. Compatible with the output of :meth:`FastRCNNOutputs.predict_probs`. + image_shapes (list[tuple]): A list of (width, height) tuples for each image in the batch. + score_thresh (float): Only return detections with a confidence score exceeding this + threshold. + nms_thresh (float): The threshold to use for box non-maximum suppression. Value in [0, 1]. + topk_per_image (int): The number of top scoring detections to return. Set < 0 to return + all detections. + + Returns: + instances: (list[Instances]): A list of N instances, one for each image in the batch, + that stores the topk most confidence detections. + kept_indices: (list[Tensor]): A list of 1D tensor of length of N, each element indicates + the corresponding boxes/scores index in [0, Ri) from the input, for image i. + """ + result_per_image = [ + fast_rcnn_inference_single_image_rotated( + boxes_per_image, scores_per_image, image_shape, score_thresh, nms_thresh, topk_per_image + ) + for scores_per_image, boxes_per_image, image_shape in zip(scores, boxes, image_shapes) + ] + return [x[0] for x in result_per_image], [x[1] for x in result_per_image] + + +def fast_rcnn_inference_single_image_rotated( + boxes, scores, image_shape, score_thresh, nms_thresh, topk_per_image +): + """ + Single-image inference. Return rotated bounding-box detection results by thresholding + on scores and applying rotated non-maximum suppression (Rotated NMS). + + Args: + Same as `fast_rcnn_inference_rotated`, but with rotated boxes, scores, and image shapes + per image. + + Returns: + Same as `fast_rcnn_inference_rotated`, but for only one image. + """ + valid_mask = torch.isfinite(boxes).all(dim=1) & torch.isfinite(scores).all(dim=1) + if not valid_mask.all(): + boxes = boxes[valid_mask] + scores = scores[valid_mask] + + B = 5 # box dimension + scores = scores[:, :-1] + num_bbox_reg_classes = boxes.shape[1] // B + # Convert to Boxes to use the `clip` function ... + boxes = RotatedBoxes(boxes.reshape(-1, B)) + boxes.clip(image_shape) + boxes = boxes.tensor.view(-1, num_bbox_reg_classes, B) # R x C x B + # Filter results based on detection scores + filter_mask = scores > score_thresh # R x K + # R' x 2. First column contains indices of the R predictions; + # Second column contains indices of classes. + filter_inds = filter_mask.nonzero() + if num_bbox_reg_classes == 1: + boxes = boxes[filter_inds[:, 0], 0] + else: + boxes = boxes[filter_mask] + scores = scores[filter_mask] + + # Apply per-class Rotated NMS + keep = batched_nms_rotated(boxes, scores, filter_inds[:, 1], nms_thresh) + if topk_per_image >= 0: + keep = keep[:topk_per_image] + boxes, scores, filter_inds = boxes[keep], scores[keep], filter_inds[keep] + + result = Instances(image_shape) + result.pred_boxes = RotatedBoxes(boxes) + result.scores = scores + result.pred_classes = filter_inds[:, 1] + + return result, filter_inds[:, 0] + + +class RotatedFastRCNNOutputLayers(FastRCNNOutputLayers): + """ + Two linear layers for predicting Rotated Fast R-CNN outputs. + """ + + @classmethod + def from_config(cls, cfg, input_shape): + args = super().from_config(cfg, input_shape) + args["box2box_transform"] = Box2BoxTransformRotated( + weights=cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS + ) + return args + + def inference(self, predictions, proposals): + """ + Returns: + list[Instances]: same as `fast_rcnn_inference_rotated`. + list[Tensor]: same as `fast_rcnn_inference_rotated`. + """ + boxes = self.predict_boxes(predictions, proposals) + scores = self.predict_probs(predictions, proposals) + image_shapes = [x.image_size for x in proposals] + + return fast_rcnn_inference_rotated( + boxes, + scores, + image_shapes, + self.test_score_thresh, + self.test_nms_thresh, + self.test_topk_per_image, + ) + + +@ROI_HEADS_REGISTRY.register() +class RROIHeads(StandardROIHeads): + """ + This class is used by Rotated Fast R-CNN to detect rotated boxes. + For now, it only supports box predictions but not mask or keypoints. + """ + + @configurable + def __init__(self, **kwargs): + """ + NOTE: this interface is experimental. + """ + super().__init__(**kwargs) + assert ( + not self.mask_on and not self.keypoint_on + ), "Mask/Keypoints not supported in Rotated ROIHeads." + assert not self.train_on_pred_boxes, "train_on_pred_boxes not implemented for RROIHeads!" + + @classmethod + def _init_box_head(cls, cfg, input_shape): + # fmt: off + in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES + pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION + pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features) + sampling_ratio = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO + pooler_type = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE + # fmt: on + assert pooler_type in ["ROIAlignRotated"], pooler_type + # assume all channel counts are equal + in_channels = [input_shape[f].channels for f in in_features][0] + + box_pooler = ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type=pooler_type, + ) + box_head = build_box_head( + cfg, ShapeSpec(channels=in_channels, height=pooler_resolution, width=pooler_resolution) + ) + # This line is the only difference v.s. StandardROIHeads + box_predictor = RotatedFastRCNNOutputLayers(cfg, box_head.output_shape) + return { + "box_in_features": in_features, + "box_pooler": box_pooler, + "box_head": box_head, + "box_predictor": box_predictor, + } + + @torch.no_grad() + def label_and_sample_proposals(self, proposals, targets): + """ + Prepare some proposals to be used to train the RROI heads. + It performs box matching between `proposals` and `targets`, and assigns + training labels to the proposals. + It returns `self.batch_size_per_image` random samples from proposals and groundtruth boxes, + with a fraction of positives that is no larger than `self.positive_sample_fraction. + + Args: + See :meth:`StandardROIHeads.forward` + + Returns: + list[Instances]: length `N` list of `Instances`s containing the proposals + sampled for training. Each `Instances` has the following fields: + - proposal_boxes: the rotated proposal boxes + - gt_boxes: the ground-truth rotated boxes that the proposal is assigned to + (this is only meaningful if the proposal has a label > 0; if label = 0 + then the ground-truth box is random) + - gt_classes: the ground-truth classification lable for each proposal + """ + gt_boxes = [x.gt_boxes for x in targets] + if self.proposal_append_gt: + proposals = add_ground_truth_to_proposals(gt_boxes, proposals) + + proposals_with_gt = [] + + num_fg_samples = [] + num_bg_samples = [] + for proposals_per_image, targets_per_image in zip(proposals, targets): + has_gt = len(targets_per_image) > 0 + match_quality_matrix = pairwise_iou_rotated( + targets_per_image.gt_boxes, proposals_per_image.proposal_boxes + ) + matched_idxs, matched_labels = self.proposal_matcher(match_quality_matrix) + sampled_idxs, gt_classes = self._sample_proposals( + matched_idxs, matched_labels, targets_per_image.gt_classes + ) + + proposals_per_image = proposals_per_image[sampled_idxs] + proposals_per_image.gt_classes = gt_classes + + if has_gt: + sampled_targets = matched_idxs[sampled_idxs] + proposals_per_image.gt_boxes = targets_per_image.gt_boxes[sampled_targets] + else: + gt_boxes = RotatedBoxes( + targets_per_image.gt_boxes.tensor.new_zeros((len(sampled_idxs), 5)) + ) + proposals_per_image.gt_boxes = gt_boxes + + num_bg_samples.append((gt_classes == self.num_classes).sum().item()) + num_fg_samples.append(gt_classes.numel() - num_bg_samples[-1]) + proposals_with_gt.append(proposals_per_image) + + # Log the number of fg/bg samples that are selected for training ROI heads + storage = get_event_storage() + storage.put_scalar("roi_head/num_fg_samples", np.mean(num_fg_samples)) + storage.put_scalar("roi_head/num_bg_samples", np.mean(num_bg_samples)) + + return proposals_with_gt diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/sampling.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/sampling.py new file mode 100644 index 0000000000000000000000000000000000000000..ecf251a2fa301d9e31eee7d3ba5dc6eaab1732f8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/sampling.py @@ -0,0 +1,50 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import torch + +__all__ = ["subsample_labels"] + + +def subsample_labels(labels, num_samples, positive_fraction, bg_label): + """ + Return `num_samples` (or fewer, if not enough found) + random samples from `labels` which is a mixture of positives & negatives. + It will try to return as many positives as possible without + exceeding `positive_fraction * num_samples`, and then try to + fill the remaining slots with negatives. + + Args: + labels (Tensor): (N, ) label vector with values: + * -1: ignore + * bg_label: background ("negative") class + * otherwise: one or more foreground ("positive") classes + num_samples (int): The total number of labels with value >= 0 to return. + Values that are not sampled will be filled with -1 (ignore). + positive_fraction (float): The number of subsampled labels with values > 0 + is `min(num_positives, int(positive_fraction * num_samples))`. The number + of negatives sampled is `min(num_negatives, num_samples - num_positives_sampled)`. + In order words, if there are not enough positives, the sample is filled with + negatives. If there are also not enough negatives, then as many elements are + sampled as is possible. + bg_label (int): label index of background ("negative") class. + + Returns: + pos_idx, neg_idx (Tensor): + 1D vector of indices. The total length of both is `num_samples` or fewer. + """ + positive = torch.nonzero((labels != -1) & (labels != bg_label), as_tuple=True)[0] + negative = torch.nonzero(labels == bg_label, as_tuple=True)[0] + + num_pos = int(num_samples * positive_fraction) + # protect against not enough positive examples + num_pos = min(positive.numel(), num_pos) + num_neg = num_samples - num_pos + # protect against not enough negative examples + num_neg = min(negative.numel(), num_neg) + + # randomly select positive and negative examples + perm1 = torch.randperm(positive.numel(), device=positive.device)[:num_pos] + perm2 = torch.randperm(negative.numel(), device=negative.device)[:num_neg] + + pos_idx = positive[perm1] + neg_idx = negative[perm2] + return pos_idx, neg_idx diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/test_time_augmentation.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/test_time_augmentation.py new file mode 100644 index 0000000000000000000000000000000000000000..1e5bcf02f655956f76eb78fb7de36d691de6a53c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/modeling/test_time_augmentation.py @@ -0,0 +1,285 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import copy +import numpy as np +from contextlib import contextmanager +from itertools import count +import torch +from torch import nn +from torch.nn.parallel import DistributedDataParallel + +from detectron2.data.detection_utils import read_image +from detectron2.data.transforms import ResizeShortestEdge +from detectron2.structures import Instances + +from .meta_arch import GeneralizedRCNN +from .postprocessing import detector_postprocess +from .roi_heads.fast_rcnn import fast_rcnn_inference_single_image + +__all__ = ["DatasetMapperTTA", "GeneralizedRCNNWithTTA"] + + +class DatasetMapperTTA: + """ + Implement test-time augmentation for detection data. + It is a callable which takes a dataset dict from a detection dataset, + and returns a list of dataset dicts where the images + are augmented from the input image by the transformations defined in the config. + This is used for test-time augmentation. + """ + + def __init__(self, cfg): + self.min_sizes = cfg.TEST.AUG.MIN_SIZES + self.max_size = cfg.TEST.AUG.MAX_SIZE + self.flip = cfg.TEST.AUG.FLIP + self.image_format = cfg.INPUT.FORMAT + + def __call__(self, dataset_dict): + """ + Args: + dict: a detection dataset dict + + Returns: + list[dict]: + a list of dataset dicts, which contain augmented version of the input image. + The total number of dicts is ``len(min_sizes) * (2 if flip else 1)``. + """ + ret = [] + if "image" not in dataset_dict: + numpy_image = read_image(dataset_dict["file_name"], self.image_format) + else: + numpy_image = dataset_dict["image"].permute(1, 2, 0).numpy().astype("uint8") + for min_size in self.min_sizes: + image = np.copy(numpy_image) + tfm = ResizeShortestEdge(min_size, self.max_size).get_transform(image) + resized = tfm.apply_image(image) + resized = torch.as_tensor(resized.transpose(2, 0, 1).astype("float32")) + + dic = copy.deepcopy(dataset_dict) + dic["horiz_flip"] = False + dic["image"] = resized + ret.append(dic) + + if self.flip: + dic = copy.deepcopy(dataset_dict) + dic["horiz_flip"] = True + dic["image"] = torch.flip(resized, dims=[2]) + ret.append(dic) + return ret + + +class GeneralizedRCNNWithTTA(nn.Module): + """ + A GeneralizedRCNN with test-time augmentation enabled. + Its :meth:`__call__` method has the same interface as :meth:`GeneralizedRCNN.forward`. + """ + + def __init__(self, cfg, model, tta_mapper=None, batch_size=3): + """ + Args: + cfg (CfgNode): + model (GeneralizedRCNN): a GeneralizedRCNN to apply TTA on. + tta_mapper (callable): takes a dataset dict and returns a list of + augmented versions of the dataset dict. Defaults to + `DatasetMapperTTA(cfg)`. + batch_size (int): batch the augmented images into this batch size for inference. + """ + super().__init__() + if isinstance(model, DistributedDataParallel): + model = model.module + assert isinstance( + model, GeneralizedRCNN + ), "TTA is only supported on GeneralizedRCNN. Got a model of type {}".format(type(model)) + self.cfg = cfg.clone() + assert not self.cfg.MODEL.KEYPOINT_ON, "TTA for keypoint is not supported yet" + assert ( + not self.cfg.MODEL.LOAD_PROPOSALS + ), "TTA for pre-computed proposals is not supported yet" + + self.model = model + + if tta_mapper is None: + tta_mapper = DatasetMapperTTA(cfg) + self.tta_mapper = tta_mapper + self.batch_size = batch_size + + @contextmanager + def _turn_off_roi_heads(self, attrs): + """ + Open a context where some heads in `model.roi_heads` are temporarily turned off. + Args: + attr (list[str]): the attribute in `model.roi_heads` which can be used + to turn off a specific head, e.g., "mask_on", "keypoint_on". + """ + roi_heads = self.model.roi_heads + old = {} + for attr in attrs: + try: + old[attr] = getattr(roi_heads, attr) + except AttributeError: + # The head may not be implemented in certain ROIHeads + pass + + if len(old.keys()) == 0: + yield + else: + for attr in old.keys(): + setattr(roi_heads, attr, False) + yield + for attr in old.keys(): + setattr(roi_heads, attr, old[attr]) + + def _batch_inference(self, batched_inputs, detected_instances=None, do_postprocess=True): + """ + Execute inference on a list of inputs, + using batch size = self.batch_size, instead of the length of the list. + + Inputs & outputs have the same format as :meth:`GeneralizedRCNN.inference` + """ + if detected_instances is None: + detected_instances = [None] * len(batched_inputs) + + outputs = [] + inputs, instances = [], [] + for idx, input, instance in zip(count(), batched_inputs, detected_instances): + inputs.append(input) + instances.append(instance) + if len(inputs) == self.batch_size or idx == len(batched_inputs) - 1: + outputs.extend( + self.model.inference( + inputs, + instances if instances[0] is not None else None, + do_postprocess=do_postprocess, + ) + ) + inputs, instances = [], [] + return outputs + + def __call__(self, batched_inputs): + """ + Same input/output format as :meth:`GeneralizedRCNN.forward` + """ + return [self._inference_one_image(x) for x in batched_inputs] + + def _detector_postprocess(self, outputs, aug_vars): + return detector_postprocess(outputs, aug_vars["height"], aug_vars["width"]) + + def _inference_one_image(self, input): + """ + Args: + input (dict): one dataset dict + + Returns: + dict: one output dict + """ + + augmented_inputs, aug_vars = self._get_augmented_inputs(input) + # Detect boxes from all augmented versions + with self._turn_off_roi_heads(["mask_on", "keypoint_on"]): + # temporarily disable roi heads + all_boxes, all_scores, all_classes = self._get_augmented_boxes( + augmented_inputs, aug_vars + ) + merged_instances = self._merge_detections( + all_boxes, all_scores, all_classes, (aug_vars["height"], aug_vars["width"]) + ) + + if self.cfg.MODEL.MASK_ON: + # Use the detected boxes to obtain new fields + augmented_instances = self._rescale_detected_boxes( + augmented_inputs, merged_instances, aug_vars + ) + # run forward on the detected boxes + outputs = self._batch_inference( + augmented_inputs, augmented_instances, do_postprocess=False + ) + # Delete now useless variables to avoid being out of memory + del augmented_inputs, augmented_instances, merged_instances + # average the predictions + outputs[0].pred_masks = self._reduce_pred_masks(outputs, aug_vars) + # postprocess + output = self._detector_postprocess(outputs[0], aug_vars) + return {"instances": output} + else: + return {"instances": merged_instances} + + def _get_augmented_inputs(self, input): + augmented_inputs = self.tta_mapper(input) + + do_hflip = [k.pop("horiz_flip", False) for k in augmented_inputs] + heights = [k["height"] for k in augmented_inputs] + widths = [k["width"] for k in augmented_inputs] + assert ( + len(set(heights)) == 1 and len(set(widths)) == 1 + ), "Augmented version of the inputs should have the same original resolution!" + height = heights[0] + width = widths[0] + aug_vars = {"height": height, "width": width, "do_hflip": do_hflip} + + return augmented_inputs, aug_vars + + def _get_augmented_boxes(self, augmented_inputs, aug_vars): + # 1: forward with all augmented images + outputs = self._batch_inference(augmented_inputs, do_postprocess=False) + # 2: union the results + all_boxes = [] + all_scores = [] + all_classes = [] + for idx, output in enumerate(outputs): + rescaled_output = self._detector_postprocess(output, aug_vars) + pred_boxes = rescaled_output.pred_boxes.tensor + if aug_vars["do_hflip"][idx]: + pred_boxes[:, [0, 2]] = aug_vars["width"] - pred_boxes[:, [2, 0]] + all_boxes.append(pred_boxes) + all_scores.extend(rescaled_output.scores) + all_classes.extend(rescaled_output.pred_classes) + all_boxes = torch.cat(all_boxes, dim=0).cpu() + return all_boxes, all_scores, all_classes + + def _merge_detections(self, all_boxes, all_scores, all_classes, shape_hw): + # select from the union of all results + num_boxes = len(all_boxes) + num_classes = self.cfg.MODEL.ROI_HEADS.NUM_CLASSES + # +1 because fast_rcnn_inference expects background scores as well + all_scores_2d = torch.zeros(num_boxes, num_classes + 1, device=all_boxes.device) + for idx, cls, score in zip(count(), all_classes, all_scores): + all_scores_2d[idx, cls] = score + + merged_instances, _ = fast_rcnn_inference_single_image( + all_boxes, + all_scores_2d, + shape_hw, + 1e-8, + self.cfg.MODEL.ROI_HEADS.NMS_THRESH_TEST, + self.cfg.TEST.DETECTIONS_PER_IMAGE, + ) + + return merged_instances + + def _rescale_detected_boxes(self, augmented_inputs, merged_instances, aug_vars): + augmented_instances = [] + for idx, input in enumerate(augmented_inputs): + actual_height, actual_width = input["image"].shape[1:3] + scale_x = actual_width * 1.0 / aug_vars["width"] + scale_y = actual_height * 1.0 / aug_vars["height"] + pred_boxes = merged_instances.pred_boxes.clone() + pred_boxes.tensor[:, 0::2] *= scale_x + pred_boxes.tensor[:, 1::2] *= scale_y + if aug_vars["do_hflip"][idx]: + pred_boxes.tensor[:, [0, 2]] = actual_width - pred_boxes.tensor[:, [2, 0]] + + aug_instances = Instances( + image_size=(actual_height, actual_width), + pred_boxes=pred_boxes, + pred_classes=merged_instances.pred_classes, + scores=merged_instances.scores, + ) + augmented_instances.append(aug_instances) + return augmented_instances + + def _reduce_pred_masks(self, outputs, aug_vars): + for idx, output in enumerate(outputs): + if aug_vars["do_hflip"][idx]: + output.pred_masks = output.pred_masks.flip(dims=[3]) + all_pred_masks = torch.stack([o.pred_masks for o in outputs], dim=0) + avg_pred_masks = torch.mean(all_pred_masks, dim=0) + return avg_pred_masks diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/solver/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/solver/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..10f84e12d029a07d5c7d3ac29e18b572a92ef03c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/solver/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .build import build_lr_scheduler, build_optimizer +from .lr_scheduler import WarmupCosineLR, WarmupMultiStepLR + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/solver/build.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/solver/build.py new file mode 100644 index 0000000000000000000000000000000000000000..6d9d0ee5df1a6135c1a3df0151dfe0e36aa9971a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/solver/build.py @@ -0,0 +1,165 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from enum import Enum +from typing import Any, Callable, Dict, Iterable, List, Set, Type, Union +import torch + +from detectron2.config import CfgNode + +from .lr_scheduler import WarmupCosineLR, WarmupMultiStepLR + +_GradientClipperInput = Union[torch.Tensor, Iterable[torch.Tensor]] +_GradientClipper = Callable[[_GradientClipperInput], None] + + +class GradientClipType(Enum): + VALUE = "value" + NORM = "norm" + + +def _create_gradient_clipper(cfg: CfgNode) -> _GradientClipper: + """ + Creates gradient clipping closure to clip by value or by norm, + according to the provided config. + """ + cfg = cfg.clone() + + def clip_grad_norm(p: _GradientClipperInput): + torch.nn.utils.clip_grad_norm_(p, cfg.CLIP_VALUE, cfg.NORM_TYPE) + + def clip_grad_value(p: _GradientClipperInput): + torch.nn.utils.clip_grad_value_(p, cfg.CLIP_VALUE) + + _GRADIENT_CLIP_TYPE_TO_CLIPPER = { + GradientClipType.VALUE: clip_grad_value, + GradientClipType.NORM: clip_grad_norm, + } + return _GRADIENT_CLIP_TYPE_TO_CLIPPER[GradientClipType(cfg.CLIP_TYPE)] + + +def _generate_optimizer_class_with_gradient_clipping( + optimizer_type: Type[torch.optim.Optimizer], gradient_clipper: _GradientClipper +) -> Type[torch.optim.Optimizer]: + """ + Dynamically creates a new type that inherits the type of a given instance + and overrides the `step` method to add gradient clipping + """ + + def optimizer_wgc_step(self, closure=None): + for group in self.param_groups: + for p in group["params"]: + gradient_clipper(p) + super(type(self), self).step(closure) + + OptimizerWithGradientClip = type( + optimizer_type.__name__ + "WithGradientClip", + (optimizer_type,), + {"step": optimizer_wgc_step}, + ) + return OptimizerWithGradientClip + + +def maybe_add_gradient_clipping( + cfg: CfgNode, optimizer: torch.optim.Optimizer +) -> torch.optim.Optimizer: + """ + If gradient clipping is enabled through config options, wraps the existing + optimizer instance of some type OptimizerType to become an instance + of the new dynamically created class OptimizerTypeWithGradientClip + that inherits OptimizerType and overrides the `step` method to + include gradient clipping. + + Args: + cfg: CfgNode + configuration options + optimizer: torch.optim.Optimizer + existing optimizer instance + + Return: + optimizer: torch.optim.Optimizer + either the unmodified optimizer instance (if gradient clipping is + disabled), or the same instance with adjusted __class__ to override + the `step` method and include gradient clipping + """ + if not cfg.SOLVER.CLIP_GRADIENTS.ENABLED: + return optimizer + grad_clipper = _create_gradient_clipper(cfg.SOLVER.CLIP_GRADIENTS) + OptimizerWithGradientClip = _generate_optimizer_class_with_gradient_clipping( + type(optimizer), grad_clipper + ) + optimizer.__class__ = OptimizerWithGradientClip + return optimizer + + +def build_optimizer(cfg: CfgNode, model: torch.nn.Module) -> torch.optim.Optimizer: + """ + Build an optimizer from config. + """ + norm_module_types = ( + torch.nn.BatchNorm1d, + torch.nn.BatchNorm2d, + torch.nn.BatchNorm3d, + torch.nn.SyncBatchNorm, + # NaiveSyncBatchNorm inherits from BatchNorm2d + torch.nn.GroupNorm, + torch.nn.InstanceNorm1d, + torch.nn.InstanceNorm2d, + torch.nn.InstanceNorm3d, + torch.nn.LayerNorm, + torch.nn.LocalResponseNorm, + ) + params: List[Dict[str, Any]] = [] + memo: Set[torch.nn.parameter.Parameter] = set() + for module in model.modules(): + for key, value in module.named_parameters(recurse=False): + if not value.requires_grad: + continue + # Avoid duplicating parameters + if value in memo: + continue + memo.add(value) + lr = cfg.SOLVER.BASE_LR + weight_decay = cfg.SOLVER.WEIGHT_DECAY + if isinstance(module, norm_module_types): + weight_decay = cfg.SOLVER.WEIGHT_DECAY_NORM + elif key == "bias": + # NOTE: unlike Detectron v1, we now default BIAS_LR_FACTOR to 1.0 + # and WEIGHT_DECAY_BIAS to WEIGHT_DECAY so that bias optimizer + # hyperparameters are by default exactly the same as for regular + # weights. + lr = cfg.SOLVER.BASE_LR * cfg.SOLVER.BIAS_LR_FACTOR + weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS + params += [{"params": [value], "lr": lr, "weight_decay": weight_decay}] + + optimizer = torch.optim.SGD( + params, cfg.SOLVER.BASE_LR, momentum=cfg.SOLVER.MOMENTUM, nesterov=cfg.SOLVER.NESTEROV + ) + optimizer = maybe_add_gradient_clipping(cfg, optimizer) + return optimizer + + +def build_lr_scheduler( + cfg: CfgNode, optimizer: torch.optim.Optimizer +) -> torch.optim.lr_scheduler._LRScheduler: + """ + Build a LR scheduler from config. + """ + name = cfg.SOLVER.LR_SCHEDULER_NAME + if name == "WarmupMultiStepLR": + return WarmupMultiStepLR( + optimizer, + cfg.SOLVER.STEPS, + cfg.SOLVER.GAMMA, + warmup_factor=cfg.SOLVER.WARMUP_FACTOR, + warmup_iters=cfg.SOLVER.WARMUP_ITERS, + warmup_method=cfg.SOLVER.WARMUP_METHOD, + ) + elif name == "WarmupCosineLR": + return WarmupCosineLR( + optimizer, + cfg.SOLVER.MAX_ITER, + warmup_factor=cfg.SOLVER.WARMUP_FACTOR, + warmup_iters=cfg.SOLVER.WARMUP_ITERS, + warmup_method=cfg.SOLVER.WARMUP_METHOD, + ) + else: + raise ValueError("Unknown LR scheduler: {}".format(name)) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/solver/lr_scheduler.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/solver/lr_scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..6148d86785dae03ed2611792fb28da387d1103b8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/solver/lr_scheduler.py @@ -0,0 +1,116 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import math +from bisect import bisect_right +from typing import List +import torch + +# NOTE: PyTorch's LR scheduler interface uses names that assume the LR changes +# only on epoch boundaries. We typically use iteration based schedules instead. +# As a result, "epoch" (e.g., as in self.last_epoch) should be understood to mean +# "iteration" instead. + +# FIXME: ideally this would be achieved with a CombinedLRScheduler, separating +# MultiStepLR with WarmupLR but the current LRScheduler design doesn't allow it. + + +class WarmupMultiStepLR(torch.optim.lr_scheduler._LRScheduler): + def __init__( + self, + optimizer: torch.optim.Optimizer, + milestones: List[int], + gamma: float = 0.1, + warmup_factor: float = 0.001, + warmup_iters: int = 1000, + warmup_method: str = "linear", + last_epoch: int = -1, + ): + if not list(milestones) == sorted(milestones): + raise ValueError( + "Milestones should be a list of" " increasing integers. Got {}", milestones + ) + self.milestones = milestones + self.gamma = gamma + self.warmup_factor = warmup_factor + self.warmup_iters = warmup_iters + self.warmup_method = warmup_method + super().__init__(optimizer, last_epoch) + + def get_lr(self) -> List[float]: + warmup_factor = _get_warmup_factor_at_iter( + self.warmup_method, self.last_epoch, self.warmup_iters, self.warmup_factor + ) + return [ + base_lr * warmup_factor * self.gamma ** bisect_right(self.milestones, self.last_epoch) + for base_lr in self.base_lrs + ] + + def _compute_values(self) -> List[float]: + # The new interface + return self.get_lr() + + +class WarmupCosineLR(torch.optim.lr_scheduler._LRScheduler): + def __init__( + self, + optimizer: torch.optim.Optimizer, + max_iters: int, + warmup_factor: float = 0.001, + warmup_iters: int = 1000, + warmup_method: str = "linear", + last_epoch: int = -1, + ): + self.max_iters = max_iters + self.warmup_factor = warmup_factor + self.warmup_iters = warmup_iters + self.warmup_method = warmup_method + super().__init__(optimizer, last_epoch) + + def get_lr(self) -> List[float]: + warmup_factor = _get_warmup_factor_at_iter( + self.warmup_method, self.last_epoch, self.warmup_iters, self.warmup_factor + ) + # Different definitions of half-cosine with warmup are possible. For + # simplicity we multiply the standard half-cosine schedule by the warmup + # factor. An alternative is to start the period of the cosine at warmup_iters + # instead of at 0. In the case that warmup_iters << max_iters the two are + # very close to each other. + return [ + base_lr + * warmup_factor + * 0.5 + * (1.0 + math.cos(math.pi * self.last_epoch / self.max_iters)) + for base_lr in self.base_lrs + ] + + def _compute_values(self) -> List[float]: + # The new interface + return self.get_lr() + + +def _get_warmup_factor_at_iter( + method: str, iter: int, warmup_iters: int, warmup_factor: float +) -> float: + """ + Return the learning rate warmup factor at a specific iteration. + See :paper:`in1k1h` for more details. + + Args: + method (str): warmup method; either "constant" or "linear". + iter (int): iteration at which to calculate the warmup factor. + warmup_iters (int): the number of warmup iterations. + warmup_factor (float): the base warmup factor (the meaning changes according + to the method used). + + Returns: + float: the effective warmup factor at the given iteration. + """ + if iter >= warmup_iters: + return 1.0 + + if method == "constant": + return warmup_factor + elif method == "linear": + alpha = iter / warmup_iters + return warmup_factor * (1 - alpha) + alpha + else: + raise ValueError("Unknown warmup method: {}".format(method)) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..618f526753b5813b86645023271b67b421ea4cb5 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/__init__.py @@ -0,0 +1,11 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .boxes import Boxes, BoxMode, pairwise_iou +from .image_list import ImageList + +from .instances import Instances +from .keypoints import Keypoints, heatmaps_to_keypoints +from .masks import BitMasks, PolygonMasks, rasterize_polygons_within_box, polygons_to_bitmask +from .rotated_boxes import RotatedBoxes +from .rotated_boxes import pairwise_iou as pairwise_iou_rotated + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/boxes.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/boxes.py new file mode 100644 index 0000000000000000000000000000000000000000..e625803e23ec6c0f71ada847ba7bef8e15c8fa40 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/boxes.py @@ -0,0 +1,367 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import math +import numpy as np +from enum import IntEnum, unique +from typing import Iterator, List, Tuple, Union +import torch + +_RawBoxType = Union[List[float], Tuple[float, ...], torch.Tensor, np.ndarray] + + +@unique +class BoxMode(IntEnum): + """ + Enum of different ways to represent a box. + """ + + XYXY_ABS = 0 + """ + (x0, y0, x1, y1) in absolute floating points coordinates. + The coordinates in range [0, width or height]. + """ + XYWH_ABS = 1 + """ + (x0, y0, w, h) in absolute floating points coordinates. + """ + XYXY_REL = 2 + """ + Not yet supported! + (x0, y0, x1, y1) in range [0, 1]. They are relative to the size of the image. + """ + XYWH_REL = 3 + """ + Not yet supported! + (x0, y0, w, h) in range [0, 1]. They are relative to the size of the image. + """ + XYWHA_ABS = 4 + """ + (xc, yc, w, h, a) in absolute floating points coordinates. + (xc, yc) is the center of the rotated box, and the angle a is in degrees ccw. + """ + + @staticmethod + def convert(box: _RawBoxType, from_mode: "BoxMode", to_mode: "BoxMode") -> _RawBoxType: + """ + Args: + box: can be a k-tuple, k-list or an Nxk array/tensor, where k = 4 or 5 + from_mode, to_mode (BoxMode) + + Returns: + The converted box of the same type. + """ + if from_mode == to_mode: + return box + + original_type = type(box) + is_numpy = isinstance(box, np.ndarray) + single_box = isinstance(box, (list, tuple)) + if single_box: + assert len(box) == 4 or len(box) == 5, ( + "BoxMode.convert takes either a k-tuple/list or an Nxk array/tensor," + " where k == 4 or 5" + ) + arr = torch.tensor(box)[None, :] + else: + # avoid modifying the input box + if is_numpy: + arr = torch.from_numpy(np.asarray(box)).clone() + else: + arr = box.clone() + + assert to_mode.value not in [ + BoxMode.XYXY_REL, + BoxMode.XYWH_REL, + ] and from_mode.value not in [ + BoxMode.XYXY_REL, + BoxMode.XYWH_REL, + ], "Relative mode not yet supported!" + + if from_mode == BoxMode.XYWHA_ABS and to_mode == BoxMode.XYXY_ABS: + assert ( + arr.shape[-1] == 5 + ), "The last dimension of input shape must be 5 for XYWHA format" + original_dtype = arr.dtype + arr = arr.double() + + w = arr[:, 2] + h = arr[:, 3] + a = arr[:, 4] + c = torch.abs(torch.cos(a * math.pi / 180.0)) + s = torch.abs(torch.sin(a * math.pi / 180.0)) + # This basically computes the horizontal bounding rectangle of the rotated box + new_w = c * w + s * h + new_h = c * h + s * w + + # convert center to top-left corner + arr[:, 0] -= new_w / 2.0 + arr[:, 1] -= new_h / 2.0 + # bottom-right corner + arr[:, 2] = arr[:, 0] + new_w + arr[:, 3] = arr[:, 1] + new_h + + arr = arr[:, :4].to(dtype=original_dtype) + elif from_mode == BoxMode.XYWH_ABS and to_mode == BoxMode.XYWHA_ABS: + original_dtype = arr.dtype + arr = arr.double() + arr[:, 0] += arr[:, 2] / 2.0 + arr[:, 1] += arr[:, 3] / 2.0 + angles = torch.zeros((arr.shape[0], 1), dtype=arr.dtype) + arr = torch.cat((arr, angles), axis=1).to(dtype=original_dtype) + else: + if to_mode == BoxMode.XYXY_ABS and from_mode == BoxMode.XYWH_ABS: + arr[:, 2] += arr[:, 0] + arr[:, 3] += arr[:, 1] + elif from_mode == BoxMode.XYXY_ABS and to_mode == BoxMode.XYWH_ABS: + arr[:, 2] -= arr[:, 0] + arr[:, 3] -= arr[:, 1] + else: + raise NotImplementedError( + "Conversion from BoxMode {} to {} is not supported yet".format( + from_mode, to_mode + ) + ) + + if single_box: + return original_type(arr.flatten().tolist()) + if is_numpy: + return arr.numpy() + else: + return arr + + +class Boxes: + """ + This structure stores a list of boxes as a Nx4 torch.Tensor. + It supports some common methods about boxes + (`area`, `clip`, `nonempty`, etc), + and also behaves like a Tensor + (support indexing, `to(device)`, `.device`, and iteration over all boxes) + + Attributes: + tensor (torch.Tensor): float matrix of Nx4. Each row is (x1, y1, x2, y2). + """ + + BoxSizeType = Union[List[int], Tuple[int, int]] + + def __init__(self, tensor: torch.Tensor): + """ + Args: + tensor (Tensor[float]): a Nx4 matrix. Each row is (x1, y1, x2, y2). + """ + device = tensor.device if isinstance(tensor, torch.Tensor) else torch.device("cpu") + tensor = torch.as_tensor(tensor, dtype=torch.float32, device=device) + if tensor.numel() == 0: + # Use reshape, so we don't end up creating a new tensor that does not depend on + # the inputs (and consequently confuses jit) + tensor = tensor.reshape((0, 4)).to(dtype=torch.float32, device=device) + assert tensor.dim() == 2 and tensor.size(-1) == 4, tensor.size() + + self.tensor = tensor + + def clone(self) -> "Boxes": + """ + Clone the Boxes. + + Returns: + Boxes + """ + return Boxes(self.tensor.clone()) + + def to(self, device: str) -> "Boxes": + return Boxes(self.tensor.to(device)) + + def area(self) -> torch.Tensor: + """ + Computes the area of all the boxes. + + Returns: + torch.Tensor: a vector with areas of each box. + """ + box = self.tensor + area = (box[:, 2] - box[:, 0]) * (box[:, 3] - box[:, 1]) + return area + + def clip(self, box_size: BoxSizeType) -> None: + """ + Clip (in place) the boxes by limiting x coordinates to the range [0, width] + and y coordinates to the range [0, height]. + + Args: + box_size (height, width): The clipping box's size. + """ + assert torch.isfinite(self.tensor).all(), "Box tensor contains infinite or NaN!" + h, w = box_size + self.tensor[:, 0].clamp_(min=0, max=w) + self.tensor[:, 1].clamp_(min=0, max=h) + self.tensor[:, 2].clamp_(min=0, max=w) + self.tensor[:, 3].clamp_(min=0, max=h) + + def nonempty(self, threshold: float = 0.0) -> torch.Tensor: + """ + Find boxes that are non-empty. + A box is considered empty, if either of its side is no larger than threshold. + + Returns: + Tensor: + a binary vector which represents whether each box is empty + (False) or non-empty (True). + """ + box = self.tensor + widths = box[:, 2] - box[:, 0] + heights = box[:, 3] - box[:, 1] + keep = (widths > threshold) & (heights > threshold) + return keep + + def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> "Boxes": + """ + Returns: + Boxes: Create a new :class:`Boxes` by indexing. + + The following usage are allowed: + + 1. `new_boxes = boxes[3]`: return a `Boxes` which contains only one box. + 2. `new_boxes = boxes[2:10]`: return a slice of boxes. + 3. `new_boxes = boxes[vector]`, where vector is a torch.BoolTensor + with `length = len(boxes)`. Nonzero elements in the vector will be selected. + + Note that the returned Boxes might share storage with this Boxes, + subject to Pytorch's indexing semantics. + """ + if isinstance(item, int): + return Boxes(self.tensor[item].view(1, -1)) + b = self.tensor[item] + assert b.dim() == 2, "Indexing on Boxes with {} failed to return a matrix!".format(item) + return Boxes(b) + + def __len__(self) -> int: + return self.tensor.shape[0] + + def __repr__(self) -> str: + return "Boxes(" + str(self.tensor) + ")" + + def inside_box(self, box_size: BoxSizeType, boundary_threshold: int = 0) -> torch.Tensor: + """ + Args: + box_size (height, width): Size of the reference box. + boundary_threshold (int): Boxes that extend beyond the reference box + boundary by more than boundary_threshold are considered "outside". + + Returns: + a binary vector, indicating whether each box is inside the reference box. + """ + height, width = box_size + inds_inside = ( + (self.tensor[..., 0] >= -boundary_threshold) + & (self.tensor[..., 1] >= -boundary_threshold) + & (self.tensor[..., 2] < width + boundary_threshold) + & (self.tensor[..., 3] < height + boundary_threshold) + ) + return inds_inside + + def get_centers(self) -> torch.Tensor: + """ + Returns: + The box centers in a Nx2 array of (x, y). + """ + return (self.tensor[:, :2] + self.tensor[:, 2:]) / 2 + + def scale(self, scale_x: float, scale_y: float) -> None: + """ + Scale the box with horizontal and vertical scaling factors + """ + self.tensor[:, 0::2] *= scale_x + self.tensor[:, 1::2] *= scale_y + + @classmethod + def cat(cls, boxes_list: List["Boxes"]) -> "Boxes": + """ + Concatenates a list of Boxes into a single Boxes + + Arguments: + boxes_list (list[Boxes]) + + Returns: + Boxes: the concatenated Boxes + """ + assert isinstance(boxes_list, (list, tuple)) + if len(boxes_list) == 0: + return cls(torch.empty(0)) + assert all(isinstance(box, Boxes) for box in boxes_list) + + # use torch.cat (v.s. layers.cat) so the returned boxes never share storage with input + cat_boxes = cls(torch.cat([b.tensor for b in boxes_list], dim=0)) + return cat_boxes + + @property + def device(self) -> torch.device: + return self.tensor.device + + def __iter__(self) -> Iterator[torch.Tensor]: + """ + Yield a box as a Tensor of shape (4,) at a time. + """ + yield from self.tensor + + +# implementation from https://github.com/kuangliu/torchcv/blob/master/torchcv/utils/box.py +# with slight modifications +def pairwise_iou(boxes1: Boxes, boxes2: Boxes) -> torch.Tensor: + """ + Given two lists of boxes of size N and M, + compute the IoU (intersection over union) + between __all__ N x M pairs of boxes. + The box order must be (xmin, ymin, xmax, ymax). + + Args: + boxes1,boxes2 (Boxes): two `Boxes`. Contains N & M boxes, respectively. + + Returns: + Tensor: IoU, sized [N,M]. + """ + area1 = boxes1.area() + area2 = boxes2.area() + + boxes1, boxes2 = boxes1.tensor, boxes2.tensor + + width_height = torch.min(boxes1[:, None, 2:], boxes2[:, 2:]) - torch.max( + boxes1[:, None, :2], boxes2[:, :2] + ) # [N,M,2] + + width_height.clamp_(min=0) # [N,M,2] + inter = width_height.prod(dim=2) # [N,M] + del width_height + + # handle empty boxes + iou = torch.where( + inter > 0, + inter / (area1[:, None] + area2 - inter), + torch.zeros(1, dtype=inter.dtype, device=inter.device), + ) + return iou + + +def matched_boxlist_iou(boxes1: Boxes, boxes2: Boxes) -> torch.Tensor: + """ + Compute pairwise intersection over union (IOU) of two sets of matched + boxes. The box order must be (xmin, ymin, xmax, ymax). + Similar to boxlist_iou, but computes only diagonal elements of the matrix + Arguments: + boxes1: (Boxes) bounding boxes, sized [N,4]. + boxes2: (Boxes) bounding boxes, sized [N,4]. + Returns: + (tensor) iou, sized [N]. + """ + assert len(boxes1) == len( + boxes2 + ), "boxlists should have the same" "number of entries, got {}, {}".format( + len(boxes1), len(boxes2) + ) + area1 = boxes1.area() # [N] + area2 = boxes2.area() # [N] + box1, box2 = boxes1.tensor, boxes2.tensor + lt = torch.max(box1[:, :2], box2[:, :2]) # [N,2] + rb = torch.min(box1[:, 2:], box2[:, 2:]) # [N,2] + wh = (rb - lt).clamp(min=0) # [N,2] + inter = wh[:, 0] * wh[:, 1] # [N] + iou = inter / (area1 + area2 - inter) # [N] + return iou diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/image_list.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/image_list.py new file mode 100644 index 0000000000000000000000000000000000000000..2d89224b64402badf7f0b113188b5f653df912ac --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/image_list.py @@ -0,0 +1,113 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +from __future__ import division +from typing import Any, List, Sequence, Tuple, Union +import torch +from torch.nn import functional as F + + +class ImageList(object): + """ + Structure that holds a list of images (of possibly + varying sizes) as a single tensor. + This works by padding the images to the same size, + and storing in a field the original sizes of each image + + Attributes: + image_sizes (list[tuple[int, int]]): each tuple is (h, w) + """ + + def __init__(self, tensor: torch.Tensor, image_sizes: List[Tuple[int, int]]): + """ + Arguments: + tensor (Tensor): of shape (N, H, W) or (N, C_1, ..., C_K, H, W) where K >= 1 + image_sizes (list[tuple[int, int]]): Each tuple is (h, w). It can + be smaller than (H, W) due to padding. + """ + self.tensor = tensor + self.image_sizes = image_sizes + + def __len__(self) -> int: + return len(self.image_sizes) + + def __getitem__(self, idx: Union[int, slice]) -> torch.Tensor: + """ + Access the individual image in its original size. + + Returns: + Tensor: an image of shape (H, W) or (C_1, ..., C_K, H, W) where K >= 1 + """ + size = self.image_sizes[idx] + return self.tensor[idx, ..., : size[0], : size[1]] # type: ignore + + def to(self, *args: Any, **kwargs: Any) -> "ImageList": + cast_tensor = self.tensor.to(*args, **kwargs) + return ImageList(cast_tensor, self.image_sizes) + + @property + def device(self) -> torch.device: + return self.tensor.device + + @staticmethod + def from_tensors( + tensors: Sequence[torch.Tensor], size_divisibility: int = 0, pad_value: float = 0.0 + ) -> "ImageList": + """ + Args: + tensors: a tuple or list of `torch.Tensors`, each of shape (Hi, Wi) or + (C_1, ..., C_K, Hi, Wi) where K >= 1. The Tensors will be padded + to the same shape with `pad_value`. + size_divisibility (int): If `size_divisibility > 0`, add padding to ensure + the common height and width is divisible by `size_divisibility`. + This depends on the model and many models need a divisibility of 32. + pad_value (float): value to pad + + Returns: + an `ImageList`. + """ + assert len(tensors) > 0 + assert isinstance(tensors, (tuple, list)) + for t in tensors: + assert isinstance(t, torch.Tensor), type(t) + assert t.shape[1:-2] == tensors[0].shape[1:-2], t.shape + # per dimension maximum (H, W) or (C_1, ..., C_K, H, W) where K >= 1 among all tensors + max_size = ( + # In tracing mode, x.shape[i] is Tensor, and should not be converted + # to int: this will cause the traced graph to have hard-coded shapes. + # Instead we should make max_size a Tensor that depends on these tensors. + # Using torch.stack twice seems to be the best way to convert + # list[list[ScalarTensor]] to a Tensor + torch.stack( + [ + torch.stack([torch.as_tensor(dim) for dim in size]) + for size in [tuple(img.shape) for img in tensors] + ] + ) + .max(0) + .values + ) + + if size_divisibility > 0: + stride = size_divisibility + # the last two dims are H,W, both subject to divisibility requirement + max_size = torch.cat([max_size[:-2], (max_size[-2:] + (stride - 1)) // stride * stride]) + + image_sizes = [tuple(im.shape[-2:]) for im in tensors] + + if len(tensors) == 1: + # This seems slightly (2%) faster. + # TODO: check whether it's faster for multiple images as well + image_size = image_sizes[0] + padding_size = [0, max_size[-1] - image_size[1], 0, max_size[-2] - image_size[0]] + if all(x == 0 for x in padding_size): # https://github.com/pytorch/pytorch/issues/31734 + batched_imgs = tensors[0].unsqueeze(0) + else: + padded = F.pad(tensors[0], padding_size, value=pad_value) + batched_imgs = padded.unsqueeze_(0) + else: + # max_size can be a tensor in tracing mode, therefore use tuple() + batch_shape = (len(tensors),) + tuple(max_size) + batched_imgs = tensors[0].new_full(batch_shape, pad_value) + for img, pad_img in zip(tensors, batched_imgs): + pad_img[..., : img.shape[-2], : img.shape[-1]].copy_(img) + + return ImageList(batched_imgs.contiguous(), image_sizes) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/instances.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/instances.py new file mode 100644 index 0000000000000000000000000000000000000000..373de08c01517c0f78b14d94da7ff702daaf375d --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/instances.py @@ -0,0 +1,185 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import itertools +from typing import Any, Dict, List, Tuple, Union +import torch + + +class Instances: + """ + This class represents a list of instances in an image. + It stores the attributes of instances (e.g., boxes, masks, labels, scores) as "fields". + All fields must have the same ``__len__`` which is the number of instances. + + All other (non-field) attributes of this class are considered private: + they must start with '_' and are not modifiable by a user. + + Some basic usage: + + 1. Set/Get a field: + + .. code-block:: python + + instances.gt_boxes = Boxes(...) + print(instances.pred_masks) # a tensor of shape (N, H, W) + print('gt_masks' in instances) + + 2. ``len(instances)`` returns the number of instances + 3. Indexing: ``instances[indices]`` will apply the indexing on all the fields + and returns a new :class:`Instances`. + Typically, ``indices`` is a integer vector of indices, + or a binary mask of length ``num_instances``, + """ + + def __init__(self, image_size: Tuple[int, int], **kwargs: Any): + """ + Args: + image_size (height, width): the spatial size of the image. + kwargs: fields to add to this `Instances`. + """ + self._image_size = image_size + self._fields: Dict[str, Any] = {} + for k, v in kwargs.items(): + self.set(k, v) + + @property + def image_size(self) -> Tuple[int, int]: + """ + Returns: + tuple: height, width + """ + return self._image_size + + def __setattr__(self, name: str, val: Any) -> None: + if name.startswith("_"): + super().__setattr__(name, val) + else: + self.set(name, val) + + def __getattr__(self, name: str) -> Any: + if name == "_fields" or name not in self._fields: + raise AttributeError("Cannot find field '{}' in the given Instances!".format(name)) + return self._fields[name] + + def set(self, name: str, value: Any) -> None: + """ + Set the field named `name` to `value`. + The length of `value` must be the number of instances, + and must agree with other existing fields in this object. + """ + data_len = len(value) + if len(self._fields): + assert ( + len(self) == data_len + ), "Adding a field of length {} to a Instances of length {}".format(data_len, len(self)) + self._fields[name] = value + + def has(self, name: str) -> bool: + """ + Returns: + bool: whether the field called `name` exists. + """ + return name in self._fields + + def remove(self, name: str) -> None: + """ + Remove the field called `name`. + """ + del self._fields[name] + + def get(self, name: str) -> Any: + """ + Returns the field called `name`. + """ + return self._fields[name] + + def get_fields(self) -> Dict[str, Any]: + """ + Returns: + dict: a dict which maps names (str) to data of the fields + + Modifying the returned dict will modify this instance. + """ + return self._fields + + # Tensor-like methods + def to(self, device: str) -> "Instances": + """ + Returns: + Instances: all fields are called with a `to(device)`, if the field has this method. + """ + ret = Instances(self._image_size) + for k, v in self._fields.items(): + if hasattr(v, "to"): + v = v.to(device) + ret.set(k, v) + return ret + + def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> "Instances": + """ + Args: + item: an index-like object and will be used to index all the fields. + + Returns: + If `item` is a string, return the data in the corresponding field. + Otherwise, returns an `Instances` where all fields are indexed by `item`. + """ + if type(item) == int: + if item >= len(self) or item < -len(self): + raise IndexError("Instances index out of range!") + else: + item = slice(item, None, len(self)) + + ret = Instances(self._image_size) + for k, v in self._fields.items(): + ret.set(k, v[item]) + return ret + + def __len__(self) -> int: + for v in self._fields.values(): + return len(v) + raise NotImplementedError("Empty Instances does not support __len__!") + + def __iter__(self): + raise NotImplementedError("`Instances` object is not iterable!") + + @staticmethod + def cat(instance_lists: List["Instances"]) -> "Instances": + """ + Args: + instance_lists (list[Instances]) + + Returns: + Instances + """ + assert all(isinstance(i, Instances) for i in instance_lists) + assert len(instance_lists) > 0 + if len(instance_lists) == 1: + return instance_lists[0] + + image_size = instance_lists[0].image_size + for i in instance_lists[1:]: + assert i.image_size == image_size + ret = Instances(image_size) + for k in instance_lists[0]._fields.keys(): + values = [i.get(k) for i in instance_lists] + v0 = values[0] + if isinstance(v0, torch.Tensor): + values = torch.cat(values, dim=0) + elif isinstance(v0, list): + values = list(itertools.chain(*values)) + elif hasattr(type(v0), "cat"): + values = type(v0).cat(values) + else: + raise ValueError("Unsupported type {} for concatenation".format(type(v0))) + ret.set(k, values) + return ret + + def __str__(self) -> str: + s = self.__class__.__name__ + "(" + s += "num_instances={}, ".format(len(self)) + s += "image_height={}, ".format(self._image_size[0]) + s += "image_width={}, ".format(self._image_size[1]) + s += "fields=[{}])".format(", ".join((f"{k}: {v}" for k, v in self._fields.items()))) + return s + + __repr__ = __str__ diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/keypoints.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/keypoints.py new file mode 100644 index 0000000000000000000000000000000000000000..2242815f31dfe88aaabbf4b49f724c999a71912d --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/keypoints.py @@ -0,0 +1,209 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import numpy as np +from typing import Any, List, Tuple, Union +import torch + +from detectron2.layers import interpolate + + +class Keypoints: + """ + Stores keypoint annotation data. GT Instances have a `gt_keypoints` property + containing the x,y location and visibility flag of each keypoint. This tensor has shape + (N, K, 3) where N is the number of instances and K is the number of keypoints per instance. + + The visibility flag follows the COCO format and must be one of three integers: + * v=0: not labeled (in which case x=y=0) + * v=1: labeled but not visible + * v=2: labeled and visible + """ + + def __init__(self, keypoints: Union[torch.Tensor, np.ndarray, List[List[float]]]): + """ + Arguments: + keypoints: A Tensor, numpy array, or list of the x, y, and visibility of each keypoint. + The shape should be (N, K, 3) where N is the number of + instances, and K is the number of keypoints per instance. + """ + device = keypoints.device if isinstance(keypoints, torch.Tensor) else torch.device("cpu") + keypoints = torch.as_tensor(keypoints, dtype=torch.float32, device=device) + assert keypoints.dim() == 3 and keypoints.shape[2] == 3, keypoints.shape + self.tensor = keypoints + + def __len__(self) -> int: + return self.tensor.size(0) + + def to(self, *args: Any, **kwargs: Any) -> "Keypoints": + return type(self)(self.tensor.to(*args, **kwargs)) + + @property + def device(self) -> torch.device: + return self.tensor.device + + def to_heatmap(self, boxes: torch.Tensor, heatmap_size: int) -> torch.Tensor: + """ + Arguments: + boxes: Nx4 tensor, the boxes to draw the keypoints to + + Returns: + heatmaps: + A tensor of shape (N, K) containing an integer spatial label + in the range [0, heatmap_size**2 - 1] for each keypoint in the input. + valid: + A tensor of shape (N, K) containing whether each keypoint is in the roi or not. + """ + return _keypoints_to_heatmap(self.tensor, boxes, heatmap_size) + + def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> "Keypoints": + """ + Create a new `Keypoints` by indexing on this `Keypoints`. + + The following usage are allowed: + + 1. `new_kpts = kpts[3]`: return a `Keypoints` which contains only one instance. + 2. `new_kpts = kpts[2:10]`: return a slice of key points. + 3. `new_kpts = kpts[vector]`, where vector is a torch.ByteTensor + with `length = len(kpts)`. Nonzero elements in the vector will be selected. + + Note that the returned Keypoints might share storage with this Keypoints, + subject to Pytorch's indexing semantics. + """ + if isinstance(item, int): + return Keypoints([self.tensor[item]]) + return Keypoints(self.tensor[item]) + + def __repr__(self) -> str: + s = self.__class__.__name__ + "(" + s += "num_instances={})".format(len(self.tensor)) + return s + + +# TODO make this nicer, this is a direct translation from C2 (but removing the inner loop) +def _keypoints_to_heatmap( + keypoints: torch.Tensor, rois: torch.Tensor, heatmap_size: int +) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Encode keypoint locations into a target heatmap for use in SoftmaxWithLoss across space. + + Maps keypoints from the half-open interval [x1, x2) on continuous image coordinates to the + closed interval [0, heatmap_size - 1] on discrete image coordinates. We use the + continuous-discrete conversion from Heckbert 1990 ("What is the coordinate of a pixel?"): + d = floor(c) and c = d + 0.5, where d is a discrete coordinate and c is a continuous coordinate. + + Arguments: + keypoints: tensor of keypoint locations in of shape (N, K, 3). + rois: Nx4 tensor of rois in xyxy format + heatmap_size: integer side length of square heatmap. + + Returns: + heatmaps: A tensor of shape (N, K) containing an integer spatial label + in the range [0, heatmap_size**2 - 1] for each keypoint in the input. + valid: A tensor of shape (N, K) containing whether each keypoint is in + the roi or not. + """ + + if rois.numel() == 0: + return rois.new().long(), rois.new().long() + offset_x = rois[:, 0] + offset_y = rois[:, 1] + scale_x = heatmap_size / (rois[:, 2] - rois[:, 0]) + scale_y = heatmap_size / (rois[:, 3] - rois[:, 1]) + + offset_x = offset_x[:, None] + offset_y = offset_y[:, None] + scale_x = scale_x[:, None] + scale_y = scale_y[:, None] + + x = keypoints[..., 0] + y = keypoints[..., 1] + + x_boundary_inds = x == rois[:, 2][:, None] + y_boundary_inds = y == rois[:, 3][:, None] + + x = (x - offset_x) * scale_x + x = x.floor().long() + y = (y - offset_y) * scale_y + y = y.floor().long() + + x[x_boundary_inds] = heatmap_size - 1 + y[y_boundary_inds] = heatmap_size - 1 + + valid_loc = (x >= 0) & (y >= 0) & (x < heatmap_size) & (y < heatmap_size) + vis = keypoints[..., 2] > 0 + valid = (valid_loc & vis).long() + + lin_ind = y * heatmap_size + x + heatmaps = lin_ind * valid + + return heatmaps, valid + + +@torch.no_grad() +def heatmaps_to_keypoints(maps: torch.Tensor, rois: torch.Tensor) -> torch.Tensor: + """ + Extract predicted keypoint locations from heatmaps. + + Args: + maps (Tensor): (#ROIs, #keypoints, POOL_H, POOL_W). The predicted heatmap of logits for + each ROI and each keypoint. + rois (Tensor): (#ROIs, 4). The box of each ROI. + + Returns: + Tensor of shape (#ROIs, #keypoints, 4) with the last dimension corresponding to + (x, y, logit, score) for each keypoint. + + When converting discrete pixel indices in an NxN image to a continuous keypoint coordinate, + we maintain consistency with :meth:`Keypoints.to_heatmap` by using the conversion from + Heckbert 1990: c = d + 0.5, where d is a discrete coordinate and c is a continuous coordinate. + """ + offset_x = rois[:, 0] + offset_y = rois[:, 1] + + widths = (rois[:, 2] - rois[:, 0]).clamp(min=1) + heights = (rois[:, 3] - rois[:, 1]).clamp(min=1) + widths_ceil = widths.ceil() + heights_ceil = heights.ceil() + + num_rois, num_keypoints = maps.shape[:2] + xy_preds = maps.new_zeros(rois.shape[0], num_keypoints, 4) + + width_corrections = widths / widths_ceil + height_corrections = heights / heights_ceil + + keypoints_idx = torch.arange(num_keypoints, device=maps.device) + + for i in range(num_rois): + outsize = (int(heights_ceil[i]), int(widths_ceil[i])) + roi_map = interpolate(maps[[i]], size=outsize, mode="bicubic", align_corners=False).squeeze( + 0 + ) # #keypoints x H x W + + # softmax over the spatial region + max_score, _ = roi_map.view(num_keypoints, -1).max(1) + max_score = max_score.view(num_keypoints, 1, 1) + tmp_full_resolution = (roi_map - max_score).exp_() + tmp_pool_resolution = (maps[i] - max_score).exp_() + # Produce scores over the region H x W, but normalize with POOL_H x POOL_W, + # so that the scores of objects of different absolute sizes will be more comparable + roi_map_scores = tmp_full_resolution / tmp_pool_resolution.sum((1, 2), keepdim=True) + + w = roi_map.shape[2] + pos = roi_map.view(num_keypoints, -1).argmax(1) + + x_int = pos % w + y_int = (pos - x_int) // w + + assert ( + roi_map_scores[keypoints_idx, y_int, x_int] + == roi_map_scores.view(num_keypoints, -1).max(1)[0] + ).all() + + x = (x_int.float() + 0.5) * width_corrections[i] + y = (y_int.float() + 0.5) * height_corrections[i] + + xy_preds[i, :, 0] = x + offset_x[i] + xy_preds[i, :, 1] = y + offset_y[i] + xy_preds[i, :, 2] = roi_map[keypoints_idx, y_int, x_int] + xy_preds[i, :, 3] = roi_map_scores[keypoints_idx, y_int, x_int] + + return xy_preds diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/masks.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/masks.py new file mode 100644 index 0000000000000000000000000000000000000000..e363baf3d8cfc4694558fc12bbd2e9d65507b9d9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/masks.py @@ -0,0 +1,424 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import copy +import itertools +import numpy as np +from typing import Any, Iterator, List, Union +import pycocotools.mask as mask_utils +import torch + +from detectron2.layers.roi_align import ROIAlign + +from .boxes import Boxes + + +def polygon_area(x, y): + # Using the shoelace formula + # https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates + return 0.5 * np.abs(np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1))) + + +def polygons_to_bitmask(polygons: List[np.ndarray], height: int, width: int) -> np.ndarray: + """ + Args: + polygons (list[ndarray]): each array has shape (Nx2,) + height, width (int) + + Returns: + ndarray: a bool mask of shape (height, width) + """ + assert len(polygons) > 0, "COCOAPI does not support empty polygons" + rles = mask_utils.frPyObjects(polygons, height, width) + rle = mask_utils.merge(rles) + return mask_utils.decode(rle).astype(np.bool) + + +def rasterize_polygons_within_box( + polygons: List[np.ndarray], box: np.ndarray, mask_size: int +) -> torch.Tensor: + """ + Rasterize the polygons into a mask image and + crop the mask content in the given box. + The cropped mask is resized to (mask_size, mask_size). + + This function is used when generating training targets for mask head in Mask R-CNN. + Given original ground-truth masks for an image, new ground-truth mask + training targets in the size of `mask_size x mask_size` + must be provided for each predicted box. This function will be called to + produce such targets. + + Args: + polygons (list[ndarray[float]]): a list of polygons, which represents an instance. + box: 4-element numpy array + mask_size (int): + + Returns: + Tensor: BoolTensor of shape (mask_size, mask_size) + """ + # 1. Shift the polygons w.r.t the boxes + w, h = box[2] - box[0], box[3] - box[1] + + polygons = copy.deepcopy(polygons) + for p in polygons: + p[0::2] = p[0::2] - box[0] + p[1::2] = p[1::2] - box[1] + + # 2. Rescale the polygons to the new box size + # max() to avoid division by small number + ratio_h = mask_size / max(h, 0.1) + ratio_w = mask_size / max(w, 0.1) + + if ratio_h == ratio_w: + for p in polygons: + p *= ratio_h + else: + for p in polygons: + p[0::2] *= ratio_w + p[1::2] *= ratio_h + + # 3. Rasterize the polygons with coco api + mask = polygons_to_bitmask(polygons, mask_size, mask_size) + mask = torch.from_numpy(mask) + return mask + + +class BitMasks: + """ + This class stores the segmentation masks for all objects in one image, in + the form of bitmaps. + + Attributes: + tensor: bool Tensor of N,H,W, representing N instances in the image. + """ + + def __init__(self, tensor: Union[torch.Tensor, np.ndarray]): + """ + Args: + tensor: bool Tensor of N,H,W, representing N instances in the image. + """ + device = tensor.device if isinstance(tensor, torch.Tensor) else torch.device("cpu") + tensor = torch.as_tensor(tensor, dtype=torch.bool, device=device) + assert tensor.dim() == 3, tensor.size() + self.image_size = tensor.shape[1:] + self.tensor = tensor + + def to(self, device: str) -> "BitMasks": + return BitMasks(self.tensor.to(device)) + + @property + def device(self) -> torch.device: + return self.tensor.device + + def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> "BitMasks": + """ + Returns: + BitMasks: Create a new :class:`BitMasks` by indexing. + + The following usage are allowed: + + 1. `new_masks = masks[3]`: return a `BitMasks` which contains only one mask. + 2. `new_masks = masks[2:10]`: return a slice of masks. + 3. `new_masks = masks[vector]`, where vector is a torch.BoolTensor + with `length = len(masks)`. Nonzero elements in the vector will be selected. + + Note that the returned object might share storage with this object, + subject to Pytorch's indexing semantics. + """ + if isinstance(item, int): + return BitMasks(self.tensor[item].view(1, -1)) + m = self.tensor[item] + assert m.dim() == 3, "Indexing on BitMasks with {} returns a tensor with shape {}!".format( + item, m.shape + ) + return BitMasks(m) + + def __iter__(self) -> torch.Tensor: + yield from self.tensor + + def __repr__(self) -> str: + s = self.__class__.__name__ + "(" + s += "num_instances={})".format(len(self.tensor)) + return s + + def __len__(self) -> int: + return self.tensor.shape[0] + + def nonempty(self) -> torch.Tensor: + """ + Find masks that are non-empty. + + Returns: + Tensor: a BoolTensor which represents + whether each mask is empty (False) or non-empty (True). + """ + return self.tensor.flatten(1).any(dim=1) + + @staticmethod + def from_polygon_masks( + polygon_masks: Union["PolygonMasks", List[List[np.ndarray]]], height: int, width: int + ) -> "BitMasks": + """ + Args: + polygon_masks (list[list[ndarray]] or PolygonMasks) + height, width (int) + """ + if isinstance(polygon_masks, PolygonMasks): + polygon_masks = polygon_masks.polygons + masks = [polygons_to_bitmask(p, height, width) for p in polygon_masks] + return BitMasks(torch.stack([torch.from_numpy(x) for x in masks])) + + def crop_and_resize(self, boxes: torch.Tensor, mask_size: int) -> torch.Tensor: + """ + Crop each bitmask by the given box, and resize results to (mask_size, mask_size). + This can be used to prepare training targets for Mask R-CNN. + It has less reconstruction error compared to rasterization with polygons. + However we observe no difference in accuracy, + but BitMasks requires more memory to store all the masks. + + Args: + boxes (Tensor): Nx4 tensor storing the boxes for each mask + mask_size (int): the size of the rasterized mask. + + Returns: + Tensor: + A bool tensor of shape (N, mask_size, mask_size), where + N is the number of predicted boxes for this image. + """ + assert len(boxes) == len(self), "{} != {}".format(len(boxes), len(self)) + device = self.tensor.device + + batch_inds = torch.arange(len(boxes), device=device).to(dtype=boxes.dtype)[:, None] + rois = torch.cat([batch_inds, boxes], dim=1) # Nx5 + + bit_masks = self.tensor.to(dtype=torch.float32) + rois = rois.to(device=device) + output = ( + ROIAlign((mask_size, mask_size), 1.0, 0, aligned=True) + .forward(bit_masks[:, None, :, :], rois) + .squeeze(1) + ) + output = output >= 0.5 + return output + + def get_bounding_boxes(self) -> None: + # not needed now + raise NotImplementedError + + @staticmethod + def cat(bitmasks_list: List["BitMasks"]) -> "BitMasks": + """ + Concatenates a list of BitMasks into a single BitMasks + + Arguments: + bitmasks_list (list[BitMasks]) + + Returns: + BitMasks: the concatenated BitMasks + """ + assert isinstance(bitmasks_list, (list, tuple)) + assert len(bitmasks_list) > 0 + assert all(isinstance(bitmask, BitMasks) for bitmask in bitmasks_list) + + cat_bitmasks = type(bitmasks_list[0])(torch.cat([bm.tensor for bm in bitmasks_list], dim=0)) + return cat_bitmasks + + +class PolygonMasks: + """ + This class stores the segmentation masks for all objects in one image, in the form of polygons. + + Attributes: + polygons: list[list[ndarray]]. Each ndarray is a float64 vector representing a polygon. + """ + + def __init__(self, polygons: List[List[Union[torch.Tensor, np.ndarray]]]): + """ + Arguments: + polygons (list[list[np.ndarray]]): The first + level of the list correspond to individual instances, + the second level to all the polygons that compose the + instance, and the third level to the polygon coordinates. + The third level array should have the format of + [x0, y0, x1, y1, ..., xn, yn] (n >= 3). + """ + assert isinstance(polygons, list), ( + "Cannot create PolygonMasks: Expect a list of list of polygons per image. " + "Got '{}' instead.".format(type(polygons)) + ) + + def _make_array(t: Union[torch.Tensor, np.ndarray]) -> np.ndarray: + # Use float64 for higher precision, because why not? + # Always put polygons on CPU (self.to is a no-op) since they + # are supposed to be small tensors. + # May need to change this assumption if GPU placement becomes useful + if isinstance(t, torch.Tensor): + t = t.cpu().numpy() + return np.asarray(t).astype("float64") + + def process_polygons( + polygons_per_instance: List[Union[torch.Tensor, np.ndarray]] + ) -> List[np.ndarray]: + assert isinstance(polygons_per_instance, list), ( + "Cannot create polygons: Expect a list of polygons per instance. " + "Got '{}' instead.".format(type(polygons_per_instance)) + ) + # transform the polygon to a tensor + polygons_per_instance = [_make_array(p) for p in polygons_per_instance] + for polygon in polygons_per_instance: + assert len(polygon) % 2 == 0 and len(polygon) >= 6 + return polygons_per_instance + + self.polygons: List[List[np.ndarray]] = [ + process_polygons(polygons_per_instance) for polygons_per_instance in polygons + ] + + def to(self, *args: Any, **kwargs: Any) -> "PolygonMasks": + return self + + @property + def device(self) -> torch.device: + return torch.device("cpu") + + def get_bounding_boxes(self) -> Boxes: + """ + Returns: + Boxes: tight bounding boxes around polygon masks. + """ + boxes = torch.zeros(len(self.polygons), 4, dtype=torch.float32) + for idx, polygons_per_instance in enumerate(self.polygons): + minxy = torch.as_tensor([float("inf"), float("inf")], dtype=torch.float32) + maxxy = torch.zeros(2, dtype=torch.float32) + for polygon in polygons_per_instance: + coords = torch.from_numpy(polygon).view(-1, 2).to(dtype=torch.float32) + minxy = torch.min(minxy, torch.min(coords, dim=0).values) + maxxy = torch.max(maxxy, torch.max(coords, dim=0).values) + boxes[idx, :2] = minxy + boxes[idx, 2:] = maxxy + return Boxes(boxes) + + def nonempty(self) -> torch.Tensor: + """ + Find masks that are non-empty. + + Returns: + Tensor: + a BoolTensor which represents whether each mask is empty (False) or not (True). + """ + keep = [1 if len(polygon) > 0 else 0 for polygon in self.polygons] + return torch.from_numpy(np.asarray(keep, dtype=np.bool)) + + def __getitem__(self, item: Union[int, slice, List[int], torch.BoolTensor]) -> "PolygonMasks": + """ + Support indexing over the instances and return a `PolygonMasks` object. + `item` can be: + + 1. An integer. It will return an object with only one instance. + 2. A slice. It will return an object with the selected instances. + 3. A list[int]. It will return an object with the selected instances, + correpsonding to the indices in the list. + 4. A vector mask of type BoolTensor, whose length is num_instances. + It will return an object with the instances whose mask is nonzero. + """ + if isinstance(item, int): + selected_polygons = [self.polygons[item]] + elif isinstance(item, slice): + selected_polygons = self.polygons[item] + elif isinstance(item, list): + selected_polygons = [self.polygons[i] for i in item] + elif isinstance(item, torch.Tensor): + # Polygons is a list, so we have to move the indices back to CPU. + if item.dtype == torch.bool: + assert item.dim() == 1, item.shape + item = item.nonzero().squeeze(1).cpu().numpy().tolist() + elif item.dtype in [torch.int32, torch.int64]: + item = item.cpu().numpy().tolist() + else: + raise ValueError("Unsupported tensor dtype={} for indexing!".format(item.dtype)) + selected_polygons = [self.polygons[i] for i in item] + return PolygonMasks(selected_polygons) + + def __iter__(self) -> Iterator[List[np.ndarray]]: + """ + Yields: + list[ndarray]: the polygons for one instance. + Each Tensor is a float64 vector representing a polygon. + """ + return iter(self.polygons) + + def __repr__(self) -> str: + s = self.__class__.__name__ + "(" + s += "num_instances={})".format(len(self.polygons)) + return s + + def __len__(self) -> int: + return len(self.polygons) + + def crop_and_resize(self, boxes: torch.Tensor, mask_size: int) -> torch.Tensor: + """ + Crop each mask by the given box, and resize results to (mask_size, mask_size). + This can be used to prepare training targets for Mask R-CNN. + + Args: + boxes (Tensor): Nx4 tensor storing the boxes for each mask + mask_size (int): the size of the rasterized mask. + + Returns: + Tensor: A bool tensor of shape (N, mask_size, mask_size), where + N is the number of predicted boxes for this image. + """ + assert len(boxes) == len(self), "{} != {}".format(len(boxes), len(self)) + + device = boxes.device + # Put boxes on the CPU, as the polygon representation is not efficient GPU-wise + # (several small tensors for representing a single instance mask) + boxes = boxes.to(torch.device("cpu")) + + results = [ + rasterize_polygons_within_box(poly, box.numpy(), mask_size) + for poly, box in zip(self.polygons, boxes) + ] + """ + poly: list[list[float]], the polygons for one instance + box: a tensor of shape (4,) + """ + if len(results) == 0: + return torch.empty(0, mask_size, mask_size, dtype=torch.bool, device=device) + return torch.stack(results, dim=0).to(device=device) + + def area(self): + """ + Computes area of the mask. + Only works with Polygons, using the shoelace formula: + https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates + + Returns: + Tensor: a vector, area for each instance + """ + + area = [] + for polygons_per_instance in self.polygons: + area_per_instance = 0 + for p in polygons_per_instance: + area_per_instance += polygon_area(p[0::2], p[1::2]) + area.append(area_per_instance) + + return torch.tensor(area) + + @staticmethod + def cat(polymasks_list: List["PolygonMasks"]) -> "PolygonMasks": + """ + Concatenates a list of PolygonMasks into a single PolygonMasks + + Arguments: + polymasks_list (list[PolygonMasks]) + + Returns: + PolygonMasks: the concatenated PolygonMasks + """ + assert isinstance(polymasks_list, (list, tuple)) + assert len(polymasks_list) > 0 + assert all(isinstance(polymask, PolygonMasks) for polymask in polymasks_list) + + cat_polymasks = type(polymasks_list[0])( + list(itertools.chain.from_iterable(pm.polygons for pm in polymasks_list)) + ) + return cat_polymasks diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/rotated_boxes.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/rotated_boxes.py new file mode 100644 index 0000000000000000000000000000000000000000..823cfb62a13d0ff060099d1b930bc900a4ca009b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/structures/rotated_boxes.py @@ -0,0 +1,481 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import math +from typing import Iterator, Union +import torch + +from detectron2.layers.rotated_boxes import pairwise_iou_rotated + +from .boxes import Boxes + + +class RotatedBoxes(Boxes): + """ + This structure stores a list of rotated boxes as a Nx5 torch.Tensor. + It supports some common methods about boxes + (`area`, `clip`, `nonempty`, etc), + and also behaves like a Tensor + (support indexing, `to(device)`, `.device`, and iteration over all boxes) + """ + + def __init__(self, tensor: torch.Tensor): + """ + Args: + tensor (Tensor[float]): a Nx5 matrix. Each row is + (x_center, y_center, width, height, angle), + in which angle is represented in degrees. + While there's no strict range restriction for it, + the recommended principal range is between [-180, 180) degrees. + + Assume we have a horizontal box B = (x_center, y_center, width, height), + where width is along the x-axis and height is along the y-axis. + The rotated box B_rot (x_center, y_center, width, height, angle) + can be seen as: + + 1. When angle == 0: + B_rot == B + 2. When angle > 0: + B_rot is obtained by rotating B w.r.t its center by :math:`|angle|` degrees CCW; + 3. When angle < 0: + B_rot is obtained by rotating B w.r.t its center by :math:`|angle|` degrees CW. + + Mathematically, since the right-handed coordinate system for image space + is (y, x), where y is top->down and x is left->right, the 4 vertices of the + rotated rectangle :math:`(yr_i, xr_i)` (i = 1, 2, 3, 4) can be obtained from + the vertices of the horizontal rectangle (y_i, x_i) (i = 1, 2, 3, 4) + in the following way (:math:`\\theta = angle*\\pi/180` is the angle in radians, + (y_c, x_c) is the center of the rectangle): + + .. math:: + + yr_i = \\cos(\\theta) (y_i - y_c) - \\sin(\\theta) (x_i - x_c) + y_c, + + xr_i = \\sin(\\theta) (y_i - y_c) + \\cos(\\theta) (x_i - x_c) + x_c, + + which is the standard rigid-body rotation transformation. + + Intuitively, the angle is + (1) the rotation angle from y-axis in image space + to the height vector (top->down in the box's local coordinate system) + of the box in CCW, and + (2) the rotation angle from x-axis in image space + to the width vector (left->right in the box's local coordinate system) + of the box in CCW. + + More intuitively, consider the following horizontal box ABCD represented + in (x1, y1, x2, y2): (3, 2, 7, 4), + covering the [3, 7] x [2, 4] region of the continuous coordinate system + which looks like this: + + .. code:: none + + O--------> x + | + | A---B + | | | + | D---C + | + v y + + Note that each capital letter represents one 0-dimensional geometric point + instead of a 'square pixel' here. + + In the example above, using (x, y) to represent a point we have: + + .. math:: + + O = (0, 0), A = (3, 2), B = (7, 2), C = (7, 4), D = (3, 4) + + We name vector AB = vector DC as the width vector in box's local coordinate system, and + vector AD = vector BC as the height vector in box's local coordinate system. Initially, + when angle = 0 degree, they're aligned with the positive directions of x-axis and y-axis + in the image space, respectively. + + For better illustration, we denote the center of the box as E, + + .. code:: none + + O--------> x + | + | A---B + | | E | + | D---C + | + v y + + where the center E = ((3+7)/2, (2+4)/2) = (5, 3). + + Also, + + .. math:: + + width = |AB| = |CD| = 7 - 3 = 4, + height = |AD| = |BC| = 4 - 2 = 2. + + Therefore, the corresponding representation for the same shape in rotated box in + (x_center, y_center, width, height, angle) format is: + + (5, 3, 4, 2, 0), + + Now, let's consider (5, 3, 4, 2, 90), which is rotated by 90 degrees + CCW (counter-clockwise) by definition. It looks like this: + + .. code:: none + + O--------> x + | B-C + | | | + | |E| + | | | + | A-D + v y + + The center E is still located at the same point (5, 3), while the vertices + ABCD are rotated by 90 degrees CCW with regard to E: + A = (4, 5), B = (4, 1), C = (6, 1), D = (6, 5) + + Here, 90 degrees can be seen as the CCW angle to rotate from y-axis to + vector AD or vector BC (the top->down height vector in box's local coordinate system), + or the CCW angle to rotate from x-axis to vector AB or vector DC (the left->right + width vector in box's local coordinate system). + + .. math:: + + width = |AB| = |CD| = 5 - 1 = 4, + height = |AD| = |BC| = 6 - 4 = 2. + + Next, how about (5, 3, 4, 2, -90), which is rotated by 90 degrees CW (clockwise) + by definition? It looks like this: + + .. code:: none + + O--------> x + | D-A + | | | + | |E| + | | | + | C-B + v y + + The center E is still located at the same point (5, 3), while the vertices + ABCD are rotated by 90 degrees CW with regard to E: + A = (6, 1), B = (6, 5), C = (4, 5), D = (4, 1) + + .. math:: + + width = |AB| = |CD| = 5 - 1 = 4, + height = |AD| = |BC| = 6 - 4 = 2. + + This covers exactly the same region as (5, 3, 4, 2, 90) does, and their IoU + will be 1. However, these two will generate different RoI Pooling results and + should not be treated as an identical box. + + On the other hand, it's easy to see that (X, Y, W, H, A) is identical to + (X, Y, W, H, A+360N), for any integer N. For example (5, 3, 4, 2, 270) would be + identical to (5, 3, 4, 2, -90), because rotating the shape 270 degrees CCW is + equivalent to rotating the same shape 90 degrees CW. + + We could rotate further to get (5, 3, 4, 2, 180), or (5, 3, 4, 2, -180): + + .. code:: none + + O--------> x + | + | C---D + | | E | + | B---A + | + v y + + .. math:: + + A = (7, 4), B = (3, 4), C = (3, 2), D = (7, 2), + + width = |AB| = |CD| = 7 - 3 = 4, + height = |AD| = |BC| = 4 - 2 = 2. + + Finally, this is a very inaccurate (heavily quantized) illustration of + how (5, 3, 4, 2, 60) looks like in case anyone wonders: + + .. code:: none + + O--------> x + | B\ + | / C + | /E / + | A / + | `D + v y + + It's still a rectangle with center of (5, 3), width of 4 and height of 2, + but its angle (and thus orientation) is somewhere between + (5, 3, 4, 2, 0) and (5, 3, 4, 2, 90). + """ + device = tensor.device if isinstance(tensor, torch.Tensor) else torch.device("cpu") + tensor = torch.as_tensor(tensor, dtype=torch.float32, device=device) + if tensor.numel() == 0: + # Use reshape, so we don't end up creating a new tensor that does not depend on + # the inputs (and consequently confuses jit) + tensor = tensor.reshape((0, 5)).to(dtype=torch.float32, device=device) + assert tensor.dim() == 2 and tensor.size(-1) == 5, tensor.size() + + self.tensor = tensor + + def clone(self) -> "RotatedBoxes": + """ + Clone the RotatedBoxes. + + Returns: + RotatedBoxes + """ + return RotatedBoxes(self.tensor.clone()) + + def to(self, device: str) -> "RotatedBoxes": + return RotatedBoxes(self.tensor.to(device)) + + def area(self) -> torch.Tensor: + """ + Computes the area of all the boxes. + + Returns: + torch.Tensor: a vector with areas of each box. + """ + box = self.tensor + area = box[:, 2] * box[:, 3] + return area + + def normalize_angles(self) -> None: + """ + Restrict angles to the range of [-180, 180) degrees + """ + self.tensor[:, 4] = (self.tensor[:, 4] + 180.0) % 360.0 - 180.0 + + def clip(self, box_size: Boxes.BoxSizeType, clip_angle_threshold: float = 1.0) -> None: + """ + Clip (in place) the boxes by limiting x coordinates to the range [0, width] + and y coordinates to the range [0, height]. + + For RRPN: + Only clip boxes that are almost horizontal with a tolerance of + clip_angle_threshold to maintain backward compatibility. + + Rotated boxes beyond this threshold are not clipped for two reasons: + + 1. There are potentially multiple ways to clip a rotated box to make it + fit within the image. + 2. It's tricky to make the entire rectangular box fit within the image + and still be able to not leave out pixels of interest. + + Therefore we rely on ops like RoIAlignRotated to safely handle this. + + Args: + box_size (height, width): The clipping box's size. + clip_angle_threshold: + Iff. abs(normalized(angle)) <= clip_angle_threshold (in degrees), + we do the clipping as horizontal boxes. + """ + h, w = box_size + + # normalize angles to be within (-180, 180] degrees + self.normalize_angles() + + idx = torch.where(torch.abs(self.tensor[:, 4]) <= clip_angle_threshold)[0] + + # convert to (x1, y1, x2, y2) + x1 = self.tensor[idx, 0] - self.tensor[idx, 2] / 2.0 + y1 = self.tensor[idx, 1] - self.tensor[idx, 3] / 2.0 + x2 = self.tensor[idx, 0] + self.tensor[idx, 2] / 2.0 + y2 = self.tensor[idx, 1] + self.tensor[idx, 3] / 2.0 + + # clip + x1.clamp_(min=0, max=w) + y1.clamp_(min=0, max=h) + x2.clamp_(min=0, max=w) + y2.clamp_(min=0, max=h) + + # convert back to (xc, yc, w, h) + self.tensor[idx, 0] = (x1 + x2) / 2.0 + self.tensor[idx, 1] = (y1 + y2) / 2.0 + # make sure widths and heights do not increase due to numerical errors + self.tensor[idx, 2] = torch.min(self.tensor[idx, 2], x2 - x1) + self.tensor[idx, 3] = torch.min(self.tensor[idx, 3], y2 - y1) + + def nonempty(self, threshold: float = 0.0) -> torch.Tensor: + """ + Find boxes that are non-empty. + A box is considered empty, if either of its side is no larger than threshold. + + Returns: + Tensor: a binary vector which represents + whether each box is empty (False) or non-empty (True). + """ + box = self.tensor + widths = box[:, 2] + heights = box[:, 3] + keep = (widths > threshold) & (heights > threshold) + return keep + + def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> "RotatedBoxes": + """ + Returns: + RotatedBoxes: Create a new :class:`RotatedBoxes` by indexing. + + The following usage are allowed: + + 1. `new_boxes = boxes[3]`: return a `RotatedBoxes` which contains only one box. + 2. `new_boxes = boxes[2:10]`: return a slice of boxes. + 3. `new_boxes = boxes[vector]`, where vector is a torch.ByteTensor + with `length = len(boxes)`. Nonzero elements in the vector will be selected. + + Note that the returned RotatedBoxes might share storage with this RotatedBoxes, + subject to Pytorch's indexing semantics. + """ + if isinstance(item, int): + return RotatedBoxes(self.tensor[item].view(1, -1)) + b = self.tensor[item] + assert b.dim() == 2, "Indexing on RotatedBoxes with {} failed to return a matrix!".format( + item + ) + return RotatedBoxes(b) + + def __len__(self) -> int: + return self.tensor.shape[0] + + def __repr__(self) -> str: + return "RotatedBoxes(" + str(self.tensor) + ")" + + def inside_box(self, box_size: Boxes.BoxSizeType, boundary_threshold: int = 0) -> torch.Tensor: + """ + Args: + box_size (height, width): Size of the reference box covering + [0, width] x [0, height] + boundary_threshold (int): Boxes that extend beyond the reference box + boundary by more than boundary_threshold are considered "outside". + + For RRPN, it might not be necessary to call this function since it's common + for rotated box to extend to outside of the image boundaries + (the clip function only clips the near-horizontal boxes) + + Returns: + a binary vector, indicating whether each box is inside the reference box. + """ + height, width = box_size + + cnt_x = self.tensor[..., 0] + cnt_y = self.tensor[..., 1] + half_w = self.tensor[..., 2] / 2.0 + half_h = self.tensor[..., 3] / 2.0 + a = self.tensor[..., 4] + c = torch.abs(torch.cos(a * math.pi / 180.0)) + s = torch.abs(torch.sin(a * math.pi / 180.0)) + # This basically computes the horizontal bounding rectangle of the rotated box + max_rect_dx = c * half_w + s * half_h + max_rect_dy = c * half_h + s * half_w + + inds_inside = ( + (cnt_x - max_rect_dx >= -boundary_threshold) + & (cnt_y - max_rect_dy >= -boundary_threshold) + & (cnt_x + max_rect_dx < width + boundary_threshold) + & (cnt_y + max_rect_dy < height + boundary_threshold) + ) + + return inds_inside + + def get_centers(self) -> torch.Tensor: + """ + Returns: + The box centers in a Nx2 array of (x, y). + """ + return self.tensor[:, :2] + + def scale(self, scale_x: float, scale_y: float) -> None: + """ + Scale the rotated box with horizontal and vertical scaling factors + Note: when scale_factor_x != scale_factor_y, + the rotated box does not preserve the rectangular shape when the angle + is not a multiple of 90 degrees under resize transformation. + Instead, the shape is a parallelogram (that has skew) + Here we make an approximation by fitting a rotated rectangle to the parallelogram. + """ + self.tensor[:, 0] *= scale_x + self.tensor[:, 1] *= scale_y + theta = self.tensor[:, 4] * math.pi / 180.0 + c = torch.cos(theta) + s = torch.sin(theta) + + # In image space, y is top->down and x is left->right + # Consider the local coordintate system for the rotated box, + # where the box center is located at (0, 0), and the four vertices ABCD are + # A(-w / 2, -h / 2), B(w / 2, -h / 2), C(w / 2, h / 2), D(-w / 2, h / 2) + # the midpoint of the left edge AD of the rotated box E is: + # E = (A+D)/2 = (-w / 2, 0) + # the midpoint of the top edge AB of the rotated box F is: + # F(0, -h / 2) + # To get the old coordinates in the global system, apply the rotation transformation + # (Note: the right-handed coordinate system for image space is yOx): + # (old_x, old_y) = (s * y + c * x, c * y - s * x) + # E(old) = (s * 0 + c * (-w/2), c * 0 - s * (-w/2)) = (-c * w / 2, s * w / 2) + # F(old) = (s * (-h / 2) + c * 0, c * (-h / 2) - s * 0) = (-s * h / 2, -c * h / 2) + # After applying the scaling factor (sfx, sfy): + # E(new) = (-sfx * c * w / 2, sfy * s * w / 2) + # F(new) = (-sfx * s * h / 2, -sfy * c * h / 2) + # The new width after scaling tranformation becomes: + + # w(new) = |E(new) - O| * 2 + # = sqrt[(sfx * c * w / 2)^2 + (sfy * s * w / 2)^2] * 2 + # = sqrt[(sfx * c)^2 + (sfy * s)^2] * w + # i.e., scale_factor_w = sqrt[(sfx * c)^2 + (sfy * s)^2] + # + # For example, + # when angle = 0 or 180, |c| = 1, s = 0, scale_factor_w == scale_factor_x; + # when |angle| = 90, c = 0, |s| = 1, scale_factor_w == scale_factor_y + self.tensor[:, 2] *= torch.sqrt((scale_x * c) ** 2 + (scale_y * s) ** 2) + + # h(new) = |F(new) - O| * 2 + # = sqrt[(sfx * s * h / 2)^2 + (sfy * c * h / 2)^2] * 2 + # = sqrt[(sfx * s)^2 + (sfy * c)^2] * h + # i.e., scale_factor_h = sqrt[(sfx * s)^2 + (sfy * c)^2] + # + # For example, + # when angle = 0 or 180, |c| = 1, s = 0, scale_factor_h == scale_factor_y; + # when |angle| = 90, c = 0, |s| = 1, scale_factor_h == scale_factor_x + self.tensor[:, 3] *= torch.sqrt((scale_x * s) ** 2 + (scale_y * c) ** 2) + + # The angle is the rotation angle from y-axis in image space to the height + # vector (top->down in the box's local coordinate system) of the box in CCW. + # + # angle(new) = angle_yOx(O - F(new)) + # = angle_yOx( (sfx * s * h / 2, sfy * c * h / 2) ) + # = atan2(sfx * s * h / 2, sfy * c * h / 2) + # = atan2(sfx * s, sfy * c) + # + # For example, + # when sfx == sfy, angle(new) == atan2(s, c) == angle(old) + self.tensor[:, 4] = torch.atan2(scale_x * s, scale_y * c) * 180 / math.pi + + @property + def device(self) -> str: + return self.tensor.device + + def __iter__(self) -> Iterator[torch.Tensor]: + """ + Yield a box as a Tensor of shape (5,) at a time. + """ + yield from self.tensor + + +def pairwise_iou(boxes1: RotatedBoxes, boxes2: RotatedBoxes) -> None: + """ + Given two lists of rotated boxes of size N and M, + compute the IoU (intersection over union) + between __all__ N x M pairs of boxes. + The box order must be (x_center, y_center, width, height, angle). + + Args: + boxes1, boxes2 (RotatedBoxes): + two `RotatedBoxes`. Contains N & M rotated boxes, respectively. + + Returns: + Tensor: IoU, sized [N,M]. + """ + + return pairwise_iou_rotated(boxes1.tensor, boxes2.tensor) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9765b24a730b77556104187ac3ef5439ab0859fd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/README.md @@ -0,0 +1,5 @@ +# Utility functions + +This folder contain utility functions that are not used in the +core library, but are useful for building models or training +code using the config system. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..168f9979a4623806934b0ff1102ac166704e7dec --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/analysis.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/analysis.py new file mode 100644 index 0000000000000000000000000000000000000000..c48e376c242f57f480280538ae770520d14110f8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/analysis.py @@ -0,0 +1,164 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# -*- coding: utf-8 -*- + +import logging +import typing +import torch +from fvcore.nn import activation_count, flop_count, parameter_count, parameter_count_table +from torch import nn + +from detectron2.structures import BitMasks, Boxes, ImageList, Instances + +from .logger import log_first_n + +__all__ = [ + "activation_count_operators", + "flop_count_operators", + "parameter_count_table", + "parameter_count", +] + +FLOPS_MODE = "flops" +ACTIVATIONS_MODE = "activations" + + +# some extra ops to ignore from counting. +_IGNORED_OPS = [ + "aten::add", + "aten::add_", + "aten::batch_norm", + "aten::constant_pad_nd", + "aten::div", + "aten::div_", + "aten::exp", + "aten::log2", + "aten::max_pool2d", + "aten::meshgrid", + "aten::mul", + "aten::mul_", + "aten::nonzero_numpy", + "aten::relu", + "aten::relu_", + "aten::rsub", + "aten::sigmoid", + "aten::sigmoid_", + "aten::softmax", + "aten::sort", + "aten::sqrt", + "aten::sub", + "aten::upsample_nearest2d", + "prim::PythonOp", + "torchvision::nms", +] + + +def flop_count_operators( + model: nn.Module, inputs: list, **kwargs +) -> typing.DefaultDict[str, float]: + """ + Implement operator-level flops counting using jit. + This is a wrapper of fvcore.nn.flop_count, that supports standard detection models + in detectron2. + + Note: + The function runs the input through the model to compute flops. + The flops of a detection model is often input-dependent, for example, + the flops of box & mask head depends on the number of proposals & + the number of detected objects. + Therefore, the flops counting using a single input may not accurately + reflect the computation cost of a model. + + Args: + model: a detectron2 model that takes `list[dict]` as input. + inputs (list[dict]): inputs to model, in detectron2's standard format. + """ + return _wrapper_count_operators(model=model, inputs=inputs, mode=FLOPS_MODE, **kwargs) + + +def activation_count_operators( + model: nn.Module, inputs: list, **kwargs +) -> typing.DefaultDict[str, float]: + """ + Implement operator-level activations counting using jit. + This is a wrapper of fvcore.nn.activation_count, that supports standard detection models + in detectron2. + + Note: + The function runs the input through the model to compute activations. + The activations of a detection model is often input-dependent, for example, + the activations of box & mask head depends on the number of proposals & + the number of detected objects. + + Args: + model: a detectron2 model that takes `list[dict]` as input. + inputs (list[dict]): inputs to model, in detectron2's standard format. + """ + return _wrapper_count_operators(model=model, inputs=inputs, mode=ACTIVATIONS_MODE, **kwargs) + + +def _flatten_to_tuple(outputs): + result = [] + if isinstance(outputs, torch.Tensor): + result.append(outputs) + elif isinstance(outputs, (list, tuple)): + for v in outputs: + result.extend(_flatten_to_tuple(v)) + elif isinstance(outputs, dict): + for _, v in outputs.items(): + result.extend(_flatten_to_tuple(v)) + elif isinstance(outputs, Instances): + result.extend(_flatten_to_tuple(outputs.get_fields())) + elif isinstance(outputs, (Boxes, BitMasks, ImageList)): + result.append(outputs.tensor) + else: + log_first_n( + logging.WARN, + f"Output of type {type(outputs)} not included in flops/activations count.", + n=10, + ) + return tuple(result) + + +def _wrapper_count_operators( + model: nn.Module, inputs: list, mode: str, **kwargs +) -> typing.DefaultDict[str, float]: + + # ignore some ops + supported_ops = {k: lambda *args, **kwargs: {} for k in _IGNORED_OPS} + supported_ops.update(kwargs.pop("supported_ops", {})) + kwargs["supported_ops"] = supported_ops + + assert len(inputs) == 1, "Please use batch size=1" + tensor_input = inputs[0]["image"] + + class WrapModel(nn.Module): + def __init__(self, model): + super().__init__() + if isinstance( + model, (nn.parallel.distributed.DistributedDataParallel, nn.DataParallel) + ): + self.model = model.module + else: + self.model = model + + def forward(self, image): + # jit requires the input/output to be Tensors + inputs = [{"image": image}] + outputs = self.model.forward(inputs) + # Only the subgraph that computes the returned tuple of tensor will be + # counted. So we flatten everything we found to tuple of tensors. + return _flatten_to_tuple(outputs) + + old_train = model.training + with torch.no_grad(): + if mode == FLOPS_MODE: + ret = flop_count(WrapModel(model).train(False), (tensor_input,), **kwargs) + elif mode == ACTIVATIONS_MODE: + ret = activation_count(WrapModel(model).train(False), (tensor_input,), **kwargs) + else: + raise NotImplementedError("Count for mode {} is not supported yet.".format(mode)) + # compatible with change in fvcore + if isinstance(ret, tuple): + ret = ret[0] + model.train(old_train) + return ret diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/collect_env.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/collect_env.py new file mode 100644 index 0000000000000000000000000000000000000000..c25b99cb0ab626cc4f4dabca5eb81f710011f2e3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/collect_env.py @@ -0,0 +1,160 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import importlib +import numpy as np +import os +import re +import subprocess +import sys +from collections import defaultdict +import PIL +import torch +import torchvision +from tabulate import tabulate + +__all__ = ["collect_env_info"] + + +def collect_torch_env(): + try: + import torch.__config__ + + return torch.__config__.show() + except ImportError: + # compatible with older versions of pytorch + from torch.utils.collect_env import get_pretty_env_info + + return get_pretty_env_info() + + +def get_env_module(): + var_name = "DETECTRON2_ENV_MODULE" + return var_name, os.environ.get(var_name, "") + + +def detect_compute_compatibility(CUDA_HOME, so_file): + try: + cuobjdump = os.path.join(CUDA_HOME, "bin", "cuobjdump") + if os.path.isfile(cuobjdump): + output = subprocess.check_output( + "'{}' --list-elf '{}'".format(cuobjdump, so_file), shell=True + ) + output = output.decode("utf-8").strip().split("\n") + sm = [] + for line in output: + line = re.findall(r"\.sm_[0-9]*\.", line)[0] + sm.append(line.strip(".")) + sm = sorted(set(sm)) + return ", ".join(sm) + else: + return so_file + "; cannot find cuobjdump" + except Exception: + # unhandled failure + return so_file + + +def collect_env_info(): + has_cuda = torch.cuda.is_available() + # NOTE: the use of CUDA_HOME requires the CUDA build deps, though in + # theory detectron2 should be made runnable with only the CUDA runtime + from torch.utils.cpp_extension import CUDA_HOME + + data = [] + data.append(("sys.platform", sys.platform)) + data.append(("Python", sys.version.replace("\n", ""))) + data.append(("numpy", np.__version__)) + + try: + import detectron2 # noqa + + data.append( + ("detectron2", detectron2.__version__ + " @" + os.path.dirname(detectron2.__file__)) + ) + except ImportError: + data.append(("detectron2", "failed to import")) + else: + try: + from detectron2 import _C + except ImportError: + data.append(("detectron2._C", "failed to import")) + else: + data.append(("detectron2 compiler", _C.get_compiler_version())) + data.append(("detectron2 CUDA compiler", _C.get_cuda_version())) + if has_cuda: + data.append( + ("detectron2 arch flags", detect_compute_compatibility(CUDA_HOME, _C.__file__)) + ) + + data.append(get_env_module()) + data.append(("PyTorch", torch.__version__ + " @" + os.path.dirname(torch.__file__))) + data.append(("PyTorch debug build", torch.version.debug)) + + data.append(("CUDA available", has_cuda)) + if has_cuda: + devices = defaultdict(list) + for k in range(torch.cuda.device_count()): + devices[torch.cuda.get_device_name(k)].append(str(k)) + for name, devids in devices.items(): + data.append(("GPU " + ",".join(devids), name)) + + from torch.utils.cpp_extension import CUDA_HOME + + data.append(("CUDA_HOME", str(CUDA_HOME))) + + if CUDA_HOME is not None and os.path.isdir(CUDA_HOME): + try: + nvcc = os.path.join(CUDA_HOME, "bin", "nvcc") + nvcc = subprocess.check_output("'{}' -V | tail -n1".format(nvcc), shell=True) + nvcc = nvcc.decode("utf-8").strip() + except subprocess.SubprocessError: + nvcc = "Not Available" + data.append(("NVCC", nvcc)) + + cuda_arch_list = os.environ.get("TORCH_CUDA_ARCH_LIST", None) + if cuda_arch_list: + data.append(("TORCH_CUDA_ARCH_LIST", cuda_arch_list)) + data.append(("Pillow", PIL.__version__)) + + try: + data.append( + ( + "torchvision", + str(torchvision.__version__) + " @" + os.path.dirname(torchvision.__file__), + ) + ) + if has_cuda: + try: + torchvision_C = importlib.util.find_spec("torchvision._C").origin + msg = detect_compute_compatibility(CUDA_HOME, torchvision_C) + data.append(("torchvision arch flags", msg)) + except ImportError: + data.append(("torchvision._C", "failed to find")) + except AttributeError: + data.append(("torchvision", "unknown")) + + try: + import fvcore + + data.append(("fvcore", fvcore.__version__)) + except ImportError: + pass + + try: + import cv2 + + data.append(("cv2", cv2.__version__)) + except ImportError: + pass + env_str = tabulate(data) + "\n" + env_str += collect_torch_env() + return env_str + + +if __name__ == "__main__": + try: + import detectron2 # noqa + except ImportError: + print(collect_env_info()) + else: + from detectron2.utils.collect_env import collect_env_info + + print(collect_env_info()) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/colormap.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/colormap.py new file mode 100644 index 0000000000000000000000000000000000000000..1bf1455e4ce9e077961143c8d734a7298d28476d --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/colormap.py @@ -0,0 +1,140 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +""" +An awesome colormap for really neat visualizations. +Copied from Detectron, and removed gray colors. +""" + +import numpy as np + +__all__ = ["colormap", "random_color"] + +# fmt: off +# RGB: +_COLORS = np.array( + [ + 0.000, 0.447, 0.741, + 0.850, 0.325, 0.098, + 0.929, 0.694, 0.125, + 0.494, 0.184, 0.556, + 0.466, 0.674, 0.188, + 0.301, 0.745, 0.933, + 0.635, 0.078, 0.184, + 0.300, 0.300, 0.300, + 0.600, 0.600, 0.600, + 1.000, 0.000, 0.000, + 1.000, 0.500, 0.000, + 0.749, 0.749, 0.000, + 0.000, 1.000, 0.000, + 0.000, 0.000, 1.000, + 0.667, 0.000, 1.000, + 0.333, 0.333, 0.000, + 0.333, 0.667, 0.000, + 0.333, 1.000, 0.000, + 0.667, 0.333, 0.000, + 0.667, 0.667, 0.000, + 0.667, 1.000, 0.000, + 1.000, 0.333, 0.000, + 1.000, 0.667, 0.000, + 1.000, 1.000, 0.000, + 0.000, 0.333, 0.500, + 0.000, 0.667, 0.500, + 0.000, 1.000, 0.500, + 0.333, 0.000, 0.500, + 0.333, 0.333, 0.500, + 0.333, 0.667, 0.500, + 0.333, 1.000, 0.500, + 0.667, 0.000, 0.500, + 0.667, 0.333, 0.500, + 0.667, 0.667, 0.500, + 0.667, 1.000, 0.500, + 1.000, 0.000, 0.500, + 1.000, 0.333, 0.500, + 1.000, 0.667, 0.500, + 1.000, 1.000, 0.500, + 0.000, 0.333, 1.000, + 0.000, 0.667, 1.000, + 0.000, 1.000, 1.000, + 0.333, 0.000, 1.000, + 0.333, 0.333, 1.000, + 0.333, 0.667, 1.000, + 0.333, 1.000, 1.000, + 0.667, 0.000, 1.000, + 0.667, 0.333, 1.000, + 0.667, 0.667, 1.000, + 0.667, 1.000, 1.000, + 1.000, 0.000, 1.000, + 1.000, 0.333, 1.000, + 1.000, 0.667, 1.000, + 0.333, 0.000, 0.000, + 0.500, 0.000, 0.000, + 0.667, 0.000, 0.000, + 0.833, 0.000, 0.000, + 1.000, 0.000, 0.000, + 0.000, 0.167, 0.000, + 0.000, 0.333, 0.000, + 0.000, 0.500, 0.000, + 0.000, 0.667, 0.000, + 0.000, 0.833, 0.000, + 0.000, 1.000, 0.000, + 0.000, 0.000, 0.167, + 0.000, 0.000, 0.333, + 0.000, 0.000, 0.500, + 0.000, 0.000, 0.667, + 0.000, 0.000, 0.833, + 0.000, 0.000, 1.000, + 0.000, 0.000, 0.000, + 0.143, 0.143, 0.143, + 0.857, 0.857, 0.857, + 1.000, 1.000, 1.000 + ] +).astype(np.float32).reshape(-1, 3) +# fmt: on + + +def colormap(rgb=False, maximum=255): + """ + Args: + rgb (bool): whether to return RGB colors or BGR colors. + maximum (int): either 255 or 1 + + Returns: + ndarray: a float32 array of Nx3 colors, in range [0, 255] or [0, 1] + """ + assert maximum in [255, 1], maximum + c = _COLORS * maximum + if not rgb: + c = c[:, ::-1] + return c + + +def random_color(rgb=False, maximum=255): + """ + Args: + rgb (bool): whether to return RGB colors or BGR colors. + maximum (int): either 255 or 1 + + Returns: + ndarray: a vector of 3 numbers + """ + idx = np.random.randint(0, len(_COLORS)) + ret = _COLORS[idx] * maximum + if not rgb: + ret = ret[::-1] + return ret + + +if __name__ == "__main__": + import cv2 + + size = 100 + H, W = 10, 10 + canvas = np.random.rand(H * size, W * size, 3).astype("float32") + for h in range(H): + for w in range(W): + idx = h * W + w + if idx >= len(_COLORS): + break + canvas[h * size : (h + 1) * size, w * size : (w + 1) * size] = _COLORS[idx] + cv2.imshow("a", canvas) + cv2.waitKey(0) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/comm.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/comm.py new file mode 100644 index 0000000000000000000000000000000000000000..8cc7b3dac5a45db87fa91ac86fce50805ecf1bad --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/comm.py @@ -0,0 +1,263 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +This file contains primitives for multi-gpu communication. +This is useful when doing distributed training. +""" + +import functools +import logging +import numpy as np +import pickle +import torch +import torch.distributed as dist + +_LOCAL_PROCESS_GROUP = None +""" +A torch process group which only includes processes that on the same machine as the current process. +This variable is set when processes are spawned by `launch()` in "engine/launch.py". +""" + + +def get_world_size() -> int: + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank() -> int: + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + return dist.get_rank() + + +def get_local_rank() -> int: + """ + Returns: + The rank of the current process within the local (per-machine) process group. + """ + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + assert _LOCAL_PROCESS_GROUP is not None + return dist.get_rank(group=_LOCAL_PROCESS_GROUP) + + +def get_local_size() -> int: + """ + Returns: + The size of the per-machine process group, + i.e. the number of processes per machine. + """ + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size(group=_LOCAL_PROCESS_GROUP) + + +def is_main_process() -> bool: + return get_rank() == 0 + + +def synchronize(): + """ + Helper function to synchronize (barrier) among all processes when + using distributed training + """ + if not dist.is_available(): + return + if not dist.is_initialized(): + return + world_size = dist.get_world_size() + if world_size == 1: + return + dist.barrier() + + +@functools.lru_cache() +def _get_global_gloo_group(): + """ + Return a process group based on gloo backend, containing all the ranks + The result is cached. + """ + if dist.get_backend() == "nccl": + return dist.new_group(backend="gloo") + else: + return dist.group.WORLD + + +def _serialize_to_tensor(data, group): + backend = dist.get_backend(group) + assert backend in ["gloo", "nccl"] + device = torch.device("cpu" if backend == "gloo" else "cuda") + + buffer = pickle.dumps(data) + if len(buffer) > 1024 ** 3: + logger = logging.getLogger(__name__) + logger.warning( + "Rank {} trying to all-gather {:.2f} GB of data on device {}".format( + get_rank(), len(buffer) / (1024 ** 3), device + ) + ) + storage = torch.ByteStorage.from_buffer(buffer) + tensor = torch.ByteTensor(storage).to(device=device) + return tensor + + +def _pad_to_largest_tensor(tensor, group): + """ + Returns: + list[int]: size of the tensor, on each rank + Tensor: padded tensor that has the max size + """ + world_size = dist.get_world_size(group=group) + assert ( + world_size >= 1 + ), "comm.gather/all_gather must be called from ranks within the given group!" + local_size = torch.tensor([tensor.numel()], dtype=torch.int64, device=tensor.device) + size_list = [ + torch.zeros([1], dtype=torch.int64, device=tensor.device) for _ in range(world_size) + ] + dist.all_gather(size_list, local_size, group=group) + size_list = [int(size.item()) for size in size_list] + + max_size = max(size_list) + + # we pad the tensor because torch all_gather does not support + # gathering tensors of different shapes + if local_size != max_size: + padding = torch.zeros((max_size - local_size,), dtype=torch.uint8, device=tensor.device) + tensor = torch.cat((tensor, padding), dim=0) + return size_list, tensor + + +def all_gather(data, group=None): + """ + Run all_gather on arbitrary picklable data (not necessarily tensors). + + Args: + data: any picklable object + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + + Returns: + list[data]: list of data gathered from each rank + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() + if dist.get_world_size(group) == 1: + return [data] + + tensor = _serialize_to_tensor(data, group) + + size_list, tensor = _pad_to_largest_tensor(tensor, group) + max_size = max(size_list) + + # receiving Tensor from all ranks + tensor_list = [ + torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) for _ in size_list + ] + dist.all_gather(tensor_list, tensor, group=group) + + data_list = [] + for size, tensor in zip(size_list, tensor_list): + buffer = tensor.cpu().numpy().tobytes()[:size] + data_list.append(pickle.loads(buffer)) + + return data_list + + +def gather(data, dst=0, group=None): + """ + Run gather on arbitrary picklable data (not necessarily tensors). + + Args: + data: any picklable object + dst (int): destination rank + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + + Returns: + list[data]: on dst, a list of data gathered from each rank. Otherwise, + an empty list. + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() + if dist.get_world_size(group=group) == 1: + return [data] + rank = dist.get_rank(group=group) + + tensor = _serialize_to_tensor(data, group) + size_list, tensor = _pad_to_largest_tensor(tensor, group) + + # receiving Tensor from all ranks + if rank == dst: + max_size = max(size_list) + tensor_list = [ + torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) for _ in size_list + ] + dist.gather(tensor, tensor_list, dst=dst, group=group) + + data_list = [] + for size, tensor in zip(size_list, tensor_list): + buffer = tensor.cpu().numpy().tobytes()[:size] + data_list.append(pickle.loads(buffer)) + return data_list + else: + dist.gather(tensor, [], dst=dst, group=group) + return [] + + +def shared_random_seed(): + """ + Returns: + int: a random number that is the same across all workers. + If workers need a shared RNG, they can use this shared seed to + create one. + + All workers must call this function, otherwise it will deadlock. + """ + ints = np.random.randint(2 ** 31) + all_ints = all_gather(ints) + return all_ints[0] + + +def reduce_dict(input_dict, average=True): + """ + Reduce the values in the dictionary from all processes so that process with rank + 0 has the reduced results. + + Args: + input_dict (dict): inputs to be reduced. All the values must be scalar CUDA Tensor. + average (bool): whether to do average or sum + + Returns: + a dict with the same keys as input_dict, after reduction. + """ + world_size = get_world_size() + if world_size < 2: + return input_dict + with torch.no_grad(): + names = [] + values = [] + # sort the keys so that they are consistent across processes + for k in sorted(input_dict.keys()): + names.append(k) + values.append(input_dict[k]) + values = torch.stack(values, dim=0) + dist.reduce(values, dst=0) + if dist.get_rank() == 0 and average: + # only main process gets accumulated, so only divide by + # world_size in this case + values /= world_size + reduced_dict = {k: v for k, v in zip(names, values)} + return reduced_dict diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/env.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/env.py new file mode 100644 index 0000000000000000000000000000000000000000..6769cae4cfb71ae05c605cb9e30eb12ee58c6ee7 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/env.py @@ -0,0 +1,116 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import importlib +import importlib.util +import logging +import numpy as np +import os +import random +import sys +from datetime import datetime +import torch + +__all__ = ["seed_all_rng"] + + +def seed_all_rng(seed=None): + """ + Set the random seed for the RNG in torch, numpy and python. + + Args: + seed (int): if None, will use a strong random seed. + """ + if seed is None: + seed = ( + os.getpid() + + int(datetime.now().strftime("%S%f")) + + int.from_bytes(os.urandom(2), "big") + ) + logger = logging.getLogger(__name__) + logger.info("Using a generated random seed {}".format(seed)) + np.random.seed(seed) + torch.set_rng_state(torch.manual_seed(seed).get_state()) + random.seed(seed) + + +# from https://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path +def _import_file(module_name, file_path, make_importable=False): + spec = importlib.util.spec_from_file_location(module_name, file_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + if make_importable: + sys.modules[module_name] = module + return module + + +def _configure_libraries(): + """ + Configurations for some libraries. + """ + # An environment option to disable `import cv2` globally, + # in case it leads to negative performance impact + disable_cv2 = int(os.environ.get("DETECTRON2_DISABLE_CV2", False)) + if disable_cv2: + sys.modules["cv2"] = None + else: + # Disable opencl in opencv since its interaction with cuda often has negative effects + # This envvar is supported after OpenCV 3.4.0 + os.environ["OPENCV_OPENCL_RUNTIME"] = "disabled" + try: + import cv2 + + if int(cv2.__version__.split(".")[0]) >= 3: + cv2.ocl.setUseOpenCL(False) + except ImportError: + pass + + def get_version(module, digit=2): + return tuple(map(int, module.__version__.split(".")[:digit])) + + # fmt: off + assert get_version(torch) >= (1, 4), "Requires torch>=1.4" + import fvcore + assert get_version(fvcore, 3) >= (0, 1, 1), "Requires fvcore>=0.1.1" + import yaml + assert get_version(yaml) >= (5, 1), "Requires pyyaml>=5.1" + # fmt: on + + +_ENV_SETUP_DONE = False + + +def setup_environment(): + """Perform environment setup work. The default setup is a no-op, but this + function allows the user to specify a Python source file or a module in + the $DETECTRON2_ENV_MODULE environment variable, that performs + custom setup work that may be necessary to their computing environment. + """ + global _ENV_SETUP_DONE + if _ENV_SETUP_DONE: + return + _ENV_SETUP_DONE = True + + _configure_libraries() + + custom_module_path = os.environ.get("DETECTRON2_ENV_MODULE") + + if custom_module_path: + setup_custom_environment(custom_module_path) + else: + # The default setup is a no-op + pass + + +def setup_custom_environment(custom_module): + """ + Load custom environment setup by importing a Python source file or a + module, and run the setup function. + """ + if custom_module.endswith(".py"): + module = _import_file("detectron2.utils.env.custom_module", custom_module) + else: + module = importlib.import_module(custom_module) + assert hasattr(module, "setup_environment") and callable(module.setup_environment), ( + "Custom environment module defined in {} does not have the " + "required callable attribute 'setup_environment'." + ).format(custom_module) + module.setup_environment() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/events.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/events.py new file mode 100644 index 0000000000000000000000000000000000000000..a3c57edb05016d2df041d756f59e90dfabddd718 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/events.py @@ -0,0 +1,432 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import datetime +import json +import logging +import os +import time +from collections import defaultdict +from contextlib import contextmanager +import torch +from fvcore.common.file_io import PathManager +from fvcore.common.history_buffer import HistoryBuffer + +_CURRENT_STORAGE_STACK = [] + + +def get_event_storage(): + """ + Returns: + The :class:`EventStorage` object that's currently being used. + Throws an error if no :class:`EventStorage` is currently enabled. + """ + assert len( + _CURRENT_STORAGE_STACK + ), "get_event_storage() has to be called inside a 'with EventStorage(...)' context!" + return _CURRENT_STORAGE_STACK[-1] + + +class EventWriter: + """ + Base class for writers that obtain events from :class:`EventStorage` and process them. + """ + + def write(self): + raise NotImplementedError + + def close(self): + pass + + +class JSONWriter(EventWriter): + """ + Write scalars to a json file. + + It saves scalars as one json per line (instead of a big json) for easy parsing. + + Examples parsing such a json file: + + .. code-block:: none + + $ cat metrics.json | jq -s '.[0:2]' + [ + { + "data_time": 0.008433341979980469, + "iteration": 20, + "loss": 1.9228371381759644, + "loss_box_reg": 0.050025828182697296, + "loss_classifier": 0.5316952466964722, + "loss_mask": 0.7236229181289673, + "loss_rpn_box": 0.0856662318110466, + "loss_rpn_cls": 0.48198649287223816, + "lr": 0.007173333333333333, + "time": 0.25401854515075684 + }, + { + "data_time": 0.007216215133666992, + "iteration": 40, + "loss": 1.282649278640747, + "loss_box_reg": 0.06222952902317047, + "loss_classifier": 0.30682939291000366, + "loss_mask": 0.6970193982124329, + "loss_rpn_box": 0.038663312792778015, + "loss_rpn_cls": 0.1471673548221588, + "lr": 0.007706666666666667, + "time": 0.2490077018737793 + } + ] + + $ cat metrics.json | jq '.loss_mask' + 0.7126231789588928 + 0.689423680305481 + 0.6776131987571716 + ... + + """ + + def __init__(self, json_file, window_size=20): + """ + Args: + json_file (str): path to the json file. New data will be appended if the file exists. + window_size (int): the window size of median smoothing for the scalars whose + `smoothing_hint` are True. + """ + self._file_handle = PathManager.open(json_file, "a") + self._window_size = window_size + + def write(self): + storage = get_event_storage() + to_save = {"iteration": storage.iter} + to_save.update(storage.latest_with_smoothing_hint(self._window_size)) + self._file_handle.write(json.dumps(to_save, sort_keys=True) + "\n") + self._file_handle.flush() + try: + os.fsync(self._file_handle.fileno()) + except AttributeError: + pass + + def close(self): + self._file_handle.close() + + +class TensorboardXWriter(EventWriter): + """ + Write all scalars to a tensorboard file. + """ + + def __init__(self, log_dir: str, window_size: int = 20, **kwargs): + """ + Args: + log_dir (str): the directory to save the output events + window_size (int): the scalars will be median-smoothed by this window size + + kwargs: other arguments passed to `torch.utils.tensorboard.SummaryWriter(...)` + """ + self._window_size = window_size + from torch.utils.tensorboard import SummaryWriter + + self._writer = SummaryWriter(log_dir, **kwargs) + + def write(self): + storage = get_event_storage() + for k, v in storage.latest_with_smoothing_hint(self._window_size).items(): + self._writer.add_scalar(k, v, storage.iter) + + # storage.put_{image,histogram} is only meant to be used by + # tensorboard writer. So we access its internal fields directly from here. + if len(storage._vis_data) >= 1: + for img_name, img, step_num in storage._vis_data: + self._writer.add_image(img_name, img, step_num) + # Storage stores all image data and rely on this writer to clear them. + # As a result it assumes only one writer will use its image data. + # An alternative design is to let storage store limited recent + # data (e.g. only the most recent image) that all writers can access. + # In that case a writer may not see all image data if its period is long. + storage.clear_images() + + if len(storage._histograms) >= 1: + for params in storage._histograms: + self._writer.add_histogram_raw(**params) + storage.clear_histograms() + + def close(self): + if hasattr(self, "_writer"): # doesn't exist when the code fails at import + self._writer.close() + + +class CommonMetricPrinter(EventWriter): + """ + Print **common** metrics to the terminal, including + iteration time, ETA, memory, all losses, and the learning rate. + + To print something different, please implement a similar printer by yourself. + """ + + def __init__(self, max_iter): + """ + Args: + max_iter (int): the maximum number of iterations to train. + Used to compute ETA. + """ + self.logger = logging.getLogger(__name__) + self._max_iter = max_iter + self._last_write = None + + def write(self): + storage = get_event_storage() + iteration = storage.iter + + try: + data_time = storage.history("data_time").avg(20) + except KeyError: + # they may not exist in the first few iterations (due to warmup) + # or when SimpleTrainer is not used + data_time = None + + eta_string = None + try: + iter_time = storage.history("time").global_avg() + eta_seconds = storage.history("time").median(1000) * (self._max_iter - iteration) + storage.put_scalar("eta_seconds", eta_seconds, smoothing_hint=False) + eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) + except KeyError: + iter_time = None + # estimate eta on our own - more noisy + if self._last_write is not None: + estimate_iter_time = (time.perf_counter() - self._last_write[1]) / ( + iteration - self._last_write[0] + ) + eta_seconds = estimate_iter_time * (self._max_iter - iteration) + eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) + self._last_write = (iteration, time.perf_counter()) + + try: + lr = "{:.6f}".format(storage.history("lr").latest()) + except KeyError: + lr = "N/A" + + if torch.cuda.is_available(): + max_mem_mb = torch.cuda.max_memory_allocated() / 1024.0 / 1024.0 + else: + max_mem_mb = None + + # NOTE: max_mem is parsed by grep in "dev/parse_results.sh" + self.logger.info( + " {eta}iter: {iter} {losses} {time}{data_time}lr: {lr} {memory}".format( + eta=f"eta: {eta_string} " if eta_string else "", + iter=iteration, + losses=" ".join( + [ + "{}: {:.3f}".format(k, v.median(20)) + for k, v in storage.histories().items() + if "loss" in k + ] + ), + time="time: {:.4f} ".format(iter_time) if iter_time is not None else "", + data_time="data_time: {:.4f} ".format(data_time) if data_time is not None else "", + lr=lr, + memory="max_mem: {:.0f}M".format(max_mem_mb) if max_mem_mb is not None else "", + ) + ) + + +class EventStorage: + """ + The user-facing class that provides metric storage functionalities. + + In the future we may add support for storing / logging other types of data if needed. + """ + + def __init__(self, start_iter=0): + """ + Args: + start_iter (int): the iteration number to start with + """ + self._history = defaultdict(HistoryBuffer) + self._smoothing_hints = {} + self._latest_scalars = {} + self._iter = start_iter + self._current_prefix = "" + self._vis_data = [] + self._histograms = [] + + def put_image(self, img_name, img_tensor): + """ + Add an `img_tensor` associated with `img_name`, to be shown on + tensorboard. + + Args: + img_name (str): The name of the image to put into tensorboard. + img_tensor (torch.Tensor or numpy.array): An `uint8` or `float` + Tensor of shape `[channel, height, width]` where `channel` is + 3. The image format should be RGB. The elements in img_tensor + can either have values in [0, 1] (float32) or [0, 255] (uint8). + The `img_tensor` will be visualized in tensorboard. + """ + self._vis_data.append((img_name, img_tensor, self._iter)) + + def put_scalar(self, name, value, smoothing_hint=True): + """ + Add a scalar `value` to the `HistoryBuffer` associated with `name`. + + Args: + smoothing_hint (bool): a 'hint' on whether this scalar is noisy and should be + smoothed when logged. The hint will be accessible through + :meth:`EventStorage.smoothing_hints`. A writer may ignore the hint + and apply custom smoothing rule. + + It defaults to True because most scalars we save need to be smoothed to + provide any useful signal. + """ + name = self._current_prefix + name + history = self._history[name] + value = float(value) + history.update(value, self._iter) + self._latest_scalars[name] = value + + existing_hint = self._smoothing_hints.get(name) + if existing_hint is not None: + assert ( + existing_hint == smoothing_hint + ), "Scalar {} was put with a different smoothing_hint!".format(name) + else: + self._smoothing_hints[name] = smoothing_hint + + def put_scalars(self, *, smoothing_hint=True, **kwargs): + """ + Put multiple scalars from keyword arguments. + + Examples: + + storage.put_scalars(loss=my_loss, accuracy=my_accuracy, smoothing_hint=True) + """ + for k, v in kwargs.items(): + self.put_scalar(k, v, smoothing_hint=smoothing_hint) + + def put_histogram(self, hist_name, hist_tensor, bins=1000): + """ + Create a histogram from a tensor. + + Args: + hist_name (str): The name of the histogram to put into tensorboard. + hist_tensor (torch.Tensor): A Tensor of arbitrary shape to be converted + into a histogram. + bins (int): Number of histogram bins. + """ + ht_min, ht_max = hist_tensor.min().item(), hist_tensor.max().item() + + # Create a histogram with PyTorch + hist_counts = torch.histc(hist_tensor, bins=bins) + hist_edges = torch.linspace(start=ht_min, end=ht_max, steps=bins + 1, dtype=torch.float32) + + # Parameter for the add_histogram_raw function of SummaryWriter + hist_params = dict( + tag=hist_name, + min=ht_min, + max=ht_max, + num=len(hist_tensor), + sum=float(hist_tensor.sum()), + sum_squares=float(torch.sum(hist_tensor ** 2)), + bucket_limits=hist_edges[1:].tolist(), + bucket_counts=hist_counts.tolist(), + global_step=self._iter, + ) + self._histograms.append(hist_params) + + def history(self, name): + """ + Returns: + HistoryBuffer: the scalar history for name + """ + ret = self._history.get(name, None) + if ret is None: + raise KeyError("No history metric available for {}!".format(name)) + return ret + + def histories(self): + """ + Returns: + dict[name -> HistoryBuffer]: the HistoryBuffer for all scalars + """ + return self._history + + def latest(self): + """ + Returns: + dict[name -> number]: the scalars that's added in the current iteration. + """ + return self._latest_scalars + + def latest_with_smoothing_hint(self, window_size=20): + """ + Similar to :meth:`latest`, but the returned values + are either the un-smoothed original latest value, + or a median of the given window_size, + depend on whether the smoothing_hint is True. + + This provides a default behavior that other writers can use. + """ + result = {} + for k, v in self._latest_scalars.items(): + result[k] = self._history[k].median(window_size) if self._smoothing_hints[k] else v + return result + + def smoothing_hints(self): + """ + Returns: + dict[name -> bool]: the user-provided hint on whether the scalar + is noisy and needs smoothing. + """ + return self._smoothing_hints + + def step(self): + """ + User should call this function at the beginning of each iteration, to + notify the storage of the start of a new iteration. + The storage will then be able to associate the new data with the + correct iteration number. + """ + self._iter += 1 + self._latest_scalars = {} + + @property + def iter(self): + return self._iter + + @property + def iteration(self): + # for backward compatibility + return self._iter + + def __enter__(self): + _CURRENT_STORAGE_STACK.append(self) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + assert _CURRENT_STORAGE_STACK[-1] == self + _CURRENT_STORAGE_STACK.pop() + + @contextmanager + def name_scope(self, name): + """ + Yields: + A context within which all the events added to this storage + will be prefixed by the name scope. + """ + old_prefix = self._current_prefix + self._current_prefix = name.rstrip("/") + "/" + yield + self._current_prefix = old_prefix + + def clear_images(self): + """ + Delete all the stored images for visualization. This should be called + after images are written to tensorboard. + """ + self._vis_data = [] + + def clear_histograms(self): + """ + Delete all the stored histograms for visualization. + This should be called after histograms are written to tensorboard. + """ + self._histograms = [] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/logger.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..b6496d9d6096f557ffa684be80342ec220c6014c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/logger.py @@ -0,0 +1,221 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import functools +import logging +import os +import sys +import time +from collections import Counter +from fvcore.common.file_io import PathManager +from tabulate import tabulate +from termcolor import colored + + +class _ColorfulFormatter(logging.Formatter): + def __init__(self, *args, **kwargs): + self._root_name = kwargs.pop("root_name") + "." + self._abbrev_name = kwargs.pop("abbrev_name", "") + if len(self._abbrev_name): + self._abbrev_name = self._abbrev_name + "." + super(_ColorfulFormatter, self).__init__(*args, **kwargs) + + def formatMessage(self, record): + record.name = record.name.replace(self._root_name, self._abbrev_name) + log = super(_ColorfulFormatter, self).formatMessage(record) + if record.levelno == logging.WARNING: + prefix = colored("WARNING", "red", attrs=["blink"]) + elif record.levelno == logging.ERROR or record.levelno == logging.CRITICAL: + prefix = colored("ERROR", "red", attrs=["blink", "underline"]) + else: + return log + return prefix + " " + log + + +@functools.lru_cache() # so that calling setup_logger multiple times won't add many handlers +def setup_logger( + output=None, distributed_rank=0, *, color=True, name="detectron2", abbrev_name=None +): + """ + Initialize the detectron2 logger and set its verbosity level to "DEBUG". + + Args: + output (str): a file name or a directory to save log. If None, will not save log file. + If ends with ".txt" or ".log", assumed to be a file name. + Otherwise, logs will be saved to `output/log.txt`. + name (str): the root module name of this logger + abbrev_name (str): an abbreviation of the module, to avoid long names in logs. + Set to "" to not log the root module in logs. + By default, will abbreviate "detectron2" to "d2" and leave other + modules unchanged. + + Returns: + logging.Logger: a logger + """ + logger = logging.getLogger(name) + logger.setLevel(logging.DEBUG) + logger.propagate = False + + if abbrev_name is None: + abbrev_name = "d2" if name == "detectron2" else name + + plain_formatter = logging.Formatter( + "[%(asctime)s] %(name)s %(levelname)s: %(message)s", datefmt="%m/%d %H:%M:%S" + ) + # stdout logging: master only + if distributed_rank == 0: + ch = logging.StreamHandler(stream=sys.stdout) + ch.setLevel(logging.DEBUG) + if color: + formatter = _ColorfulFormatter( + colored("[%(asctime)s %(name)s]: ", "green") + "%(message)s", + datefmt="%m/%d %H:%M:%S", + root_name=name, + abbrev_name=str(abbrev_name), + ) + else: + formatter = plain_formatter + ch.setFormatter(formatter) + logger.addHandler(ch) + + # file logging: all workers + if output is not None: + if output.endswith(".txt") or output.endswith(".log"): + filename = output + else: + filename = os.path.join(output, "log.txt") + if distributed_rank > 0: + filename = filename + ".rank{}".format(distributed_rank) + PathManager.mkdirs(os.path.dirname(filename)) + + fh = logging.StreamHandler(_cached_log_stream(filename)) + fh.setLevel(logging.DEBUG) + fh.setFormatter(plain_formatter) + logger.addHandler(fh) + + return logger + + +# cache the opened file object, so that different calls to `setup_logger` +# with the same file name can safely write to the same file. +@functools.lru_cache(maxsize=None) +def _cached_log_stream(filename): + return PathManager.open(filename, "a") + + +""" +Below are some other convenient logging methods. +They are mainly adopted from +https://github.com/abseil/abseil-py/blob/master/absl/logging/__init__.py +""" + + +def _find_caller(): + """ + Returns: + str: module name of the caller + tuple: a hashable key to be used to identify different callers + """ + frame = sys._getframe(2) + while frame: + code = frame.f_code + if os.path.join("utils", "logger.") not in code.co_filename: + mod_name = frame.f_globals["__name__"] + if mod_name == "__main__": + mod_name = "detectron2" + return mod_name, (code.co_filename, frame.f_lineno, code.co_name) + frame = frame.f_back + + +_LOG_COUNTER = Counter() +_LOG_TIMER = {} + + +def log_first_n(lvl, msg, n=1, *, name=None, key="caller"): + """ + Log only for the first n times. + + Args: + lvl (int): the logging level + msg (str): + n (int): + name (str): name of the logger to use. Will use the caller's module by default. + key (str or tuple[str]): the string(s) can be one of "caller" or + "message", which defines how to identify duplicated logs. + For example, if called with `n=1, key="caller"`, this function + will only log the first call from the same caller, regardless of + the message content. + If called with `n=1, key="message"`, this function will log the + same content only once, even if they are called from different places. + If called with `n=1, key=("caller", "message")`, this function + will not log only if the same caller has logged the same message before. + """ + if isinstance(key, str): + key = (key,) + assert len(key) > 0 + + caller_module, caller_key = _find_caller() + hash_key = () + if "caller" in key: + hash_key = hash_key + caller_key + if "message" in key: + hash_key = hash_key + (msg,) + + _LOG_COUNTER[hash_key] += 1 + if _LOG_COUNTER[hash_key] <= n: + logging.getLogger(name or caller_module).log(lvl, msg) + + +def log_every_n(lvl, msg, n=1, *, name=None): + """ + Log once per n times. + + Args: + lvl (int): the logging level + msg (str): + n (int): + name (str): name of the logger to use. Will use the caller's module by default. + """ + caller_module, key = _find_caller() + _LOG_COUNTER[key] += 1 + if n == 1 or _LOG_COUNTER[key] % n == 1: + logging.getLogger(name or caller_module).log(lvl, msg) + + +def log_every_n_seconds(lvl, msg, n=1, *, name=None): + """ + Log no more than once per n seconds. + + Args: + lvl (int): the logging level + msg (str): + n (int): + name (str): name of the logger to use. Will use the caller's module by default. + """ + caller_module, key = _find_caller() + last_logged = _LOG_TIMER.get(key, None) + current_time = time.time() + if last_logged is None or current_time - last_logged >= n: + logging.getLogger(name or caller_module).log(lvl, msg) + _LOG_TIMER[key] = current_time + + +def create_small_table(small_dict): + """ + Create a small table using the keys of small_dict as headers. This is only + suitable for small dictionaries. + + Args: + small_dict (dict): a result dictionary of only a few items. + + Returns: + str: the table as a string. + """ + keys, values = tuple(zip(*small_dict.items())) + table = tabulate( + [values], + headers=keys, + tablefmt="pipe", + floatfmt=".3f", + stralign="center", + numalign="center", + ) + return table diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/memory.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/memory.py new file mode 100644 index 0000000000000000000000000000000000000000..d495a1681f460668c96f64454e31e7f2fca8137a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/memory.py @@ -0,0 +1,86 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import logging +from contextlib import contextmanager +from functools import wraps +import torch + +__all__ = ["retry_if_cuda_oom"] + + +@contextmanager +def _ignore_torch_cuda_oom(): + """ + A context which ignores CUDA OOM exception from pytorch. + """ + try: + yield + except RuntimeError as e: + # NOTE: the string may change? + if "CUDA out of memory. " in str(e): + pass + else: + raise + + +def retry_if_cuda_oom(func): + """ + Makes a function retry itself after encountering + pytorch's CUDA OOM error. + It will first retry after calling `torch.cuda.empty_cache()`. + + If that still fails, it will then retry by trying to convert inputs to CPUs. + In this case, it expects the function to dispatch to CPU implementation. + The return values may become CPU tensors as well and it's user's + responsibility to convert it back to CUDA tensor if needed. + + Args: + func: a stateless callable that takes tensor-like objects as arguments + + Returns: + a callable which retries `func` if OOM is encountered. + + Examples: + + .. code-block:: python + + output = retry_if_cuda_oom(some_torch_function)(input1, input2) + # output may be on CPU even if inputs are on GPU + + Note: + 1. When converting inputs to CPU, it will only look at each argument and check + if it has `.device` and `.to` for conversion. Nested structures of tensors + are not supported. + + 2. Since the function might be called more than once, it has to be + stateless. + """ + + def maybe_to_cpu(x): + try: + like_gpu_tensor = x.device.type == "cuda" and hasattr(x, "to") + except AttributeError: + like_gpu_tensor = False + if like_gpu_tensor: + return x.to(device="cpu") + else: + return x + + @wraps(func) + def wrapped(*args, **kwargs): + with _ignore_torch_cuda_oom(): + return func(*args, **kwargs) + + # Clear cache and retry + torch.cuda.empty_cache() + with _ignore_torch_cuda_oom(): + return func(*args, **kwargs) + + # Try on CPU. This slows down the code significantly, therefore print a notice. + logger = logging.getLogger(__name__) + logger.info("Attempting to copy inputs of {} to CPU due to CUDA OOM".format(str(func))) + new_args = (maybe_to_cpu(x) for x in args) + new_kwargs = {k: maybe_to_cpu(v) for k, v in kwargs.items()} + return func(*new_args, **new_kwargs) + + return wrapped diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/registry.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..fea1de961f0dbdacc934e11b9af5647b2a008051 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/registry.py @@ -0,0 +1,6 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +# Keep this module for backward compatibility. +from fvcore.common.registry import Registry # noqa + +__all__ = ["Registry"] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/serialize.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/serialize.py new file mode 100644 index 0000000000000000000000000000000000000000..734a62c2c4ecfd520eb9e8b941857b6f7e17d4c8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/serialize.py @@ -0,0 +1,29 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import cloudpickle + + +class PicklableWrapper(object): + """ + Wrap an object to make it more picklable, note that it uses + heavy weight serialization libraries that are slower than pickle. + It's best to use it only on closures (which are usually not picklable). + + This is a simplified version of + https://github.com/joblib/joblib/blob/master/joblib/externals/loky/cloudpickle_wrapper.py + """ + + def __init__(self, obj): + self._obj = obj + + def __reduce__(self): + s = cloudpickle.dumps(self._obj) + return cloudpickle.loads, (s,) + + def __call__(self, *args, **kwargs): + return self._obj(*args, **kwargs) + + def __getattr__(self, attr): + # Ensure that the wrapped object can be used seamlessly as the previous object. + if attr not in ["_obj"]: + return getattr(self._obj, attr) + return getattr(self, attr) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/video_visualizer.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/video_visualizer.py new file mode 100644 index 0000000000000000000000000000000000000000..0144b679d09bbb8049c30eb849099422355b492c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/video_visualizer.py @@ -0,0 +1,235 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import numpy as np +import pycocotools.mask as mask_util + +from detectron2.utils.visualizer import ( + ColorMode, + Visualizer, + _create_text_labels, + _PanopticPrediction, +) + +from .colormap import random_color + + +class _DetectedInstance: + """ + Used to store data about detected objects in video frame, + in order to transfer color to objects in the future frames. + + Attributes: + label (int): + bbox (tuple[float]): + mask_rle (dict): + color (tuple[float]): RGB colors in range (0, 1) + ttl (int): time-to-live for the instance. For example, if ttl=2, + the instance color can be transferred to objects in the next two frames. + """ + + __slots__ = ["label", "bbox", "mask_rle", "color", "ttl"] + + def __init__(self, label, bbox, mask_rle, color, ttl): + self.label = label + self.bbox = bbox + self.mask_rle = mask_rle + self.color = color + self.ttl = ttl + + +class VideoVisualizer: + def __init__(self, metadata, instance_mode=ColorMode.IMAGE): + """ + Args: + metadata (MetadataCatalog): image metadata. + """ + self.metadata = metadata + self._old_instances = [] + assert instance_mode in [ + ColorMode.IMAGE, + ColorMode.IMAGE_BW, + ], "Other mode not supported yet." + self._instance_mode = instance_mode + + def draw_instance_predictions(self, frame, predictions): + """ + Draw instance-level prediction results on an image. + + Args: + frame (ndarray): an RGB image of shape (H, W, C), in the range [0, 255]. + predictions (Instances): the output of an instance detection/segmentation + model. Following fields will be used to draw: + "pred_boxes", "pred_classes", "scores", "pred_masks" (or "pred_masks_rle"). + + Returns: + output (VisImage): image object with visualizations. + """ + frame_visualizer = Visualizer(frame, self.metadata) + num_instances = len(predictions) + if num_instances == 0: + return frame_visualizer.output + + boxes = predictions.pred_boxes.tensor.numpy() if predictions.has("pred_boxes") else None + scores = predictions.scores if predictions.has("scores") else None + classes = predictions.pred_classes.numpy() if predictions.has("pred_classes") else None + keypoints = predictions.pred_keypoints if predictions.has("pred_keypoints") else None + + if predictions.has("pred_masks"): + masks = predictions.pred_masks + # mask IOU is not yet enabled + # masks_rles = mask_util.encode(np.asarray(masks.permute(1, 2, 0), order="F")) + # assert len(masks_rles) == num_instances + else: + masks = None + + detected = [ + _DetectedInstance(classes[i], boxes[i], mask_rle=None, color=None, ttl=8) + for i in range(num_instances) + ] + colors = self._assign_colors(detected) + + labels = _create_text_labels(classes, scores, self.metadata.get("thing_classes", None)) + + if self._instance_mode == ColorMode.IMAGE_BW: + # any() returns uint8 tensor + frame_visualizer.output.img = frame_visualizer._create_grayscale_image( + (masks.any(dim=0) > 0).numpy() if masks is not None else None + ) + alpha = 0.3 + else: + alpha = 0.5 + + frame_visualizer.overlay_instances( + boxes=None if masks is not None else boxes, # boxes are a bit distracting + masks=masks, + labels=labels, + keypoints=keypoints, + assigned_colors=colors, + alpha=alpha, + ) + + return frame_visualizer.output + + def draw_sem_seg(self, frame, sem_seg, area_threshold=None): + """ + Args: + sem_seg (ndarray or Tensor): semantic segmentation of shape (H, W), + each value is the integer label. + area_threshold (Optional[int]): only draw segmentations larger than the threshold + """ + # don't need to do anything special + frame_visualizer = Visualizer(frame, self.metadata) + frame_visualizer.draw_sem_seg(sem_seg, area_threshold=None) + return frame_visualizer.output + + def draw_panoptic_seg_predictions( + self, frame, panoptic_seg, segments_info, area_threshold=None, alpha=0.5 + ): + frame_visualizer = Visualizer(frame, self.metadata) + pred = _PanopticPrediction(panoptic_seg, segments_info) + + if self._instance_mode == ColorMode.IMAGE_BW: + frame_visualizer.output.img = frame_visualizer._create_grayscale_image( + pred.non_empty_mask() + ) + + # draw mask for all semantic segments first i.e. "stuff" + for mask, sinfo in pred.semantic_masks(): + category_idx = sinfo["category_id"] + try: + mask_color = [x / 255 for x in self.metadata.stuff_colors[category_idx]] + except AttributeError: + mask_color = None + + frame_visualizer.draw_binary_mask( + mask, + color=mask_color, + text=self.metadata.stuff_classes[category_idx], + alpha=alpha, + area_threshold=area_threshold, + ) + + all_instances = list(pred.instance_masks()) + if len(all_instances) == 0: + return frame_visualizer.output + # draw mask for all instances second + masks, sinfo = list(zip(*all_instances)) + num_instances = len(masks) + masks_rles = mask_util.encode( + np.asarray(np.asarray(masks).transpose(1, 2, 0), dtype=np.uint8, order="F") + ) + assert len(masks_rles) == num_instances + + category_ids = [x["category_id"] for x in sinfo] + detected = [ + _DetectedInstance(category_ids[i], bbox=None, mask_rle=masks_rles[i], color=None, ttl=8) + for i in range(num_instances) + ] + colors = self._assign_colors(detected) + labels = [self.metadata.thing_classes[k] for k in category_ids] + + frame_visualizer.overlay_instances( + boxes=None, + masks=masks, + labels=labels, + keypoints=None, + assigned_colors=colors, + alpha=alpha, + ) + return frame_visualizer.output + + def _assign_colors(self, instances): + """ + Naive tracking heuristics to assign same color to the same instance, + will update the internal state of tracked instances. + + Returns: + list[tuple[float]]: list of colors. + """ + + # Compute iou with either boxes or masks: + is_crowd = np.zeros((len(instances),), dtype=np.bool) + if instances[0].bbox is None: + assert instances[0].mask_rle is not None + # use mask iou only when box iou is None + # because box seems good enough + rles_old = [x.mask_rle for x in self._old_instances] + rles_new = [x.mask_rle for x in instances] + ious = mask_util.iou(rles_old, rles_new, is_crowd) + threshold = 0.5 + else: + boxes_old = [x.bbox for x in self._old_instances] + boxes_new = [x.bbox for x in instances] + ious = mask_util.iou(boxes_old, boxes_new, is_crowd) + threshold = 0.6 + if len(ious) == 0: + ious = np.zeros((len(self._old_instances), len(instances)), dtype="float32") + + # Only allow matching instances of the same label: + for old_idx, old in enumerate(self._old_instances): + for new_idx, new in enumerate(instances): + if old.label != new.label: + ious[old_idx, new_idx] = 0 + + matched_new_per_old = np.asarray(ious).argmax(axis=1) + max_iou_per_old = np.asarray(ious).max(axis=1) + + # Try to find match for each old instance: + extra_instances = [] + for idx, inst in enumerate(self._old_instances): + if max_iou_per_old[idx] > threshold: + newidx = matched_new_per_old[idx] + if instances[newidx].color is None: + instances[newidx].color = inst.color + continue + # If an old instance does not match any new instances, + # keep it for the next frame in case it is just missed by the detector + inst.ttl -= 1 + if inst.ttl > 0: + extra_instances.append(inst) + + # Assign random color to newly-detected instances: + for inst in instances: + if inst.color is None: + inst.color = random_color(rgb=True, maximum=1) + self._old_instances = instances[:] + extra_instances + return [d.color for d in instances] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/visualizer.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/visualizer.py new file mode 100644 index 0000000000000000000000000000000000000000..3ffcbdbd19518bce877a776582a7caeddc18108e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/detectron2/utils/visualizer.py @@ -0,0 +1,1143 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import colorsys +import logging +import math +import numpy as np +from enum import Enum, unique +import cv2 +import matplotlib as mpl +import matplotlib.colors as mplc +import matplotlib.figure as mplfigure +import pycocotools.mask as mask_util +import torch +from fvcore.common.file_io import PathManager +from matplotlib.backends.backend_agg import FigureCanvasAgg +from PIL import Image + +from detectron2.structures import BitMasks, Boxes, BoxMode, Keypoints, PolygonMasks, RotatedBoxes + +from .colormap import random_color + +logger = logging.getLogger(__name__) + +__all__ = ["ColorMode", "VisImage", "Visualizer"] + + +_SMALL_OBJECT_AREA_THRESH = 1000 +_LARGE_MASK_AREA_THRESH = 120000 +_OFF_WHITE = (1.0, 1.0, 240.0 / 255) +_BLACK = (0, 0, 0) +_RED = (1.0, 0, 0) + +_KEYPOINT_THRESHOLD = 0.05 + + +@unique +class ColorMode(Enum): + """ + Enum of different color modes to use for instance visualizations. + """ + + IMAGE = 0 + """ + Picks a random color for every instance and overlay segmentations with low opacity. + """ + SEGMENTATION = 1 + """ + Let instances of the same category have similar colors + (from metadata.thing_colors), and overlay them with + high opacity. This provides more attention on the quality of segmentation. + """ + IMAGE_BW = 2 + """ + Same as IMAGE, but convert all areas without masks to gray-scale. + Only available for drawing per-instance mask predictions. + """ + + +class GenericMask: + """ + Attribute: + polygons (list[ndarray]): list[ndarray]: polygons for this mask. + Each ndarray has format [x, y, x, y, ...] + mask (ndarray): a binary mask + """ + + def __init__(self, mask_or_polygons, height, width): + self._mask = self._polygons = self._has_holes = None + self.height = height + self.width = width + + m = mask_or_polygons + if isinstance(m, dict): + # RLEs + assert "counts" in m and "size" in m + if isinstance(m["counts"], list): # uncompressed RLEs + h, w = m["size"] + assert h == height and w == width + m = mask_util.frPyObjects(m, h, w) + self._mask = mask_util.decode(m)[:, :] + return + + if isinstance(m, list): # list[ndarray] + self._polygons = [np.asarray(x).reshape(-1) for x in m] + return + + if isinstance(m, np.ndarray): # assumed to be a binary mask + assert m.shape[1] != 2, m.shape + assert m.shape == (height, width), m.shape + self._mask = m.astype("uint8") + return + + raise ValueError("GenericMask cannot handle object {} of type '{}'".format(m, type(m))) + + @property + def mask(self): + if self._mask is None: + self._mask = self.polygons_to_mask(self._polygons) + return self._mask + + @property + def polygons(self): + if self._polygons is None: + self._polygons, self._has_holes = self.mask_to_polygons(self._mask) + return self._polygons + + @property + def has_holes(self): + if self._has_holes is None: + if self._mask is not None: + self._polygons, self._has_holes = self.mask_to_polygons(self._mask) + else: + self._has_holes = False # if original format is polygon, does not have holes + return self._has_holes + + def mask_to_polygons(self, mask): + # cv2.RETR_CCOMP flag retrieves all the contours and arranges them to a 2-level + # hierarchy. External contours (boundary) of the object are placed in hierarchy-1. + # Internal contours (holes) are placed in hierarchy-2. + # cv2.CHAIN_APPROX_NONE flag gets vertices of polygons from contours. + mask = np.ascontiguousarray(mask) # some versions of cv2 does not support incontiguous arr + res = cv2.findContours(mask.astype("uint8"), cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE) + hierarchy = res[-1] + if hierarchy is None: # empty mask + return [], False + has_holes = (hierarchy.reshape(-1, 4)[:, 3] >= 0).sum() > 0 + res = res[-2] + res = [x.flatten() for x in res] + res = [x for x in res if len(x) >= 6] + return res, has_holes + + def polygons_to_mask(self, polygons): + rle = mask_util.frPyObjects(polygons, self.height, self.width) + rle = mask_util.merge(rle) + return mask_util.decode(rle)[:, :] + + def area(self): + return self.mask.sum() + + def bbox(self): + p = mask_util.frPyObjects(self.polygons, self.height, self.width) + p = mask_util.merge(p) + bbox = mask_util.toBbox(p) + bbox[2] += bbox[0] + bbox[3] += bbox[1] + return bbox + + +class _PanopticPrediction: + def __init__(self, panoptic_seg, segments_info): + self._seg = panoptic_seg + + self._sinfo = {s["id"]: s for s in segments_info} # seg id -> seg info + segment_ids, areas = torch.unique(panoptic_seg, sorted=True, return_counts=True) + areas = areas.numpy() + sorted_idxs = np.argsort(-areas) + self._seg_ids, self._seg_areas = segment_ids[sorted_idxs], areas[sorted_idxs] + self._seg_ids = self._seg_ids.tolist() + for sid, area in zip(self._seg_ids, self._seg_areas): + if sid in self._sinfo: + self._sinfo[sid]["area"] = float(area) + + def non_empty_mask(self): + """ + Returns: + (H, W) array, a mask for all pixels that have a prediction + """ + empty_ids = [] + for id in self._seg_ids: + if id not in self._sinfo: + empty_ids.append(id) + if len(empty_ids) == 0: + return np.zeros(self._seg.shape, dtype=np.uint8) + assert ( + len(empty_ids) == 1 + ), ">1 ids corresponds to no labels. This is currently not supported" + return (self._seg != empty_ids[0]).numpy().astype(np.bool) + + def semantic_masks(self): + for sid in self._seg_ids: + sinfo = self._sinfo.get(sid) + if sinfo is None or sinfo["isthing"]: + # Some pixels (e.g. id 0 in PanopticFPN) have no instance or semantic predictions. + continue + yield (self._seg == sid).numpy().astype(np.bool), sinfo + + def instance_masks(self): + for sid in self._seg_ids: + sinfo = self._sinfo.get(sid) + if sinfo is None or not sinfo["isthing"]: + continue + mask = (self._seg == sid).numpy().astype(np.bool) + if mask.sum() > 0: + yield mask, sinfo + + +def _create_text_labels(classes, scores, class_names): + """ + Args: + classes (list[int] or None): + scores (list[float] or None): + class_names (list[str] or None): + + Returns: + list[str] or None + """ + labels = None + if classes is not None and class_names is not None and len(class_names) > 1: + labels = [class_names[i] for i in classes] + if scores is not None: + if labels is None: + labels = ["{:.0f}%".format(s * 100) for s in scores] + else: + labels = ["{} {:.0f}%".format(l, s * 100) for l, s in zip(labels, scores)] + return labels + + +class VisImage: + def __init__(self, img, scale=1.0): + """ + Args: + img (ndarray): an RGB image of shape (H, W, 3). + scale (float): scale the input image + """ + self.img = img + self.scale = scale + self.width, self.height = img.shape[1], img.shape[0] + self._setup_figure(img) + + def _setup_figure(self, img): + """ + Args: + Same as in :meth:`__init__()`. + + Returns: + fig (matplotlib.pyplot.figure): top level container for all the image plot elements. + ax (matplotlib.pyplot.Axes): contains figure elements and sets the coordinate system. + """ + fig = mplfigure.Figure(frameon=False) + self.dpi = fig.get_dpi() + # add a small 1e-2 to avoid precision lost due to matplotlib's truncation + # (https://github.com/matplotlib/matplotlib/issues/15363) + fig.set_size_inches( + (self.width * self.scale + 1e-2) / self.dpi, + (self.height * self.scale + 1e-2) / self.dpi, + ) + self.canvas = FigureCanvasAgg(fig) + # self.canvas = mpl.backends.backend_cairo.FigureCanvasCairo(fig) + ax = fig.add_axes([0.0, 0.0, 1.0, 1.0]) + ax.axis("off") + ax.set_xlim(0.0, self.width) + ax.set_ylim(self.height) + + self.fig = fig + self.ax = ax + + def save(self, filepath): + """ + Args: + filepath (str): a string that contains the absolute path, including the file name, where + the visualized image will be saved. + """ + if filepath.lower().endswith(".jpg") or filepath.lower().endswith(".png"): + # faster than matplotlib's imshow + cv2.imwrite(filepath, self.get_image()[:, :, ::-1]) + else: + # support general formats (e.g. pdf) + self.ax.imshow(self.img, interpolation="nearest") + self.fig.savefig(filepath) + + def get_image(self): + """ + Returns: + ndarray: + the visualized image of shape (H, W, 3) (RGB) in uint8 type. + The shape is scaled w.r.t the input image using the given `scale` argument. + """ + canvas = self.canvas + s, (width, height) = canvas.print_to_buffer() + if (self.width, self.height) != (width, height): + img = cv2.resize(self.img, (width, height)) + else: + img = self.img + + # buf = io.BytesIO() # works for cairo backend + # canvas.print_rgba(buf) + # width, height = self.width, self.height + # s = buf.getvalue() + + buffer = np.frombuffer(s, dtype="uint8") + + # imshow is slow. blend manually (still quite slow) + img_rgba = buffer.reshape(height, width, 4) + rgb, alpha = np.split(img_rgba, [3], axis=2) + + try: + import numexpr as ne # fuse them with numexpr + + visualized_image = ne.evaluate("demo * (1 - alpha / 255.0) + rgb * (alpha / 255.0)") + except ImportError: + alpha = alpha.astype("float32") / 255.0 + visualized_image = img * (1 - alpha) + rgb * alpha + + visualized_image = visualized_image.astype("uint8") + + return visualized_image + + +class Visualizer: + def __init__(self, img_rgb, metadata, scale=1.0, instance_mode=ColorMode.IMAGE): + """ + Args: + img_rgb: a numpy array of shape (H, W, C), where H and W correspond to + the height and width of the image respectively. C is the number of + color channels. The image is required to be in RGB format since that + is a requirement of the Matplotlib library. The image is also expected + to be in the range [0, 255]. + metadata (MetadataCatalog): image metadata. + """ + self.img = np.asarray(img_rgb).clip(0, 255).astype(np.uint8) + self.metadata = metadata + self.output = VisImage(self.img, scale=scale) + self.cpu_device = torch.device("cpu") + + # too small texts are useless, therefore clamp to 9 + self._default_font_size = max( + np.sqrt(self.output.height * self.output.width) // 90, 10 // scale + ) + self._instance_mode = instance_mode + + def draw_instance_predictions(self, predictions): + """ + Draw instance-level prediction results on an image. + + Args: + predictions (Instances): the output of an instance detection/segmentation + model. Following fields will be used to draw: + "pred_boxes", "pred_classes", "scores", "pred_masks" (or "pred_masks_rle"). + + Returns: + output (VisImage): image object with visualizations. + """ + boxes = predictions.pred_boxes if predictions.has("pred_boxes") else None + scores = predictions.scores if predictions.has("scores") else None + classes = predictions.pred_classes if predictions.has("pred_classes") else None + labels = _create_text_labels(classes, scores, self.metadata.get("thing_classes", None)) + keypoints = predictions.pred_keypoints if predictions.has("pred_keypoints") else None + + if predictions.has("pred_masks"): + masks = np.asarray(predictions.pred_masks) + masks = [GenericMask(x, self.output.height, self.output.width) for x in masks] + else: + masks = None + + if self._instance_mode == ColorMode.SEGMENTATION and self.metadata.get("thing_colors"): + colors = [ + self._jitter([x / 255 for x in self.metadata.thing_colors[c]]) for c in classes + ] + alpha = 0.8 + else: + colors = None + alpha = 0.5 + + if self._instance_mode == ColorMode.IMAGE_BW: + self.output.img = self._create_grayscale_image( + (predictions.pred_masks.any(dim=0) > 0).numpy() + ) + alpha = 0.3 + + self.overlay_instances( + masks=masks, + boxes=boxes, + labels=labels, + keypoints=keypoints, + assigned_colors=colors, + alpha=alpha, + ) + return self.output + + def draw_sem_seg(self, sem_seg, area_threshold=None, alpha=0.8): + """ + Draw semantic segmentation predictions/labels. + + Args: + sem_seg (Tensor or ndarray): the segmentation of shape (H, W). + Each value is the integer label of the pixel. + area_threshold (int): segments with less than `area_threshold` are not drawn. + alpha (float): the larger it is, the more opaque the segmentations are. + + Returns: + output (VisImage): image object with visualizations. + """ + if isinstance(sem_seg, torch.Tensor): + sem_seg = sem_seg.numpy() + labels, areas = np.unique(sem_seg, return_counts=True) + sorted_idxs = np.argsort(-areas).tolist() + labels = labels[sorted_idxs] + for label in filter(lambda l: l < len(self.metadata.stuff_classes), labels): + try: + mask_color = [x / 255 for x in self.metadata.stuff_colors[label]] + except (AttributeError, IndexError): + mask_color = None + + binary_mask = (sem_seg == label).astype(np.uint8) + text = self.metadata.stuff_classes[label] + self.draw_binary_mask( + binary_mask, + color=mask_color, + edge_color=_OFF_WHITE, + text=text, + alpha=alpha, + area_threshold=area_threshold, + ) + return self.output + + def draw_panoptic_seg_predictions( + self, panoptic_seg, segments_info, area_threshold=None, alpha=0.7 + ): + """ + Draw panoptic prediction results on an image. + + Args: + panoptic_seg (Tensor): of shape (height, width) where the values are ids for each + segment. + segments_info (list[dict]): Describe each segment in `panoptic_seg`. + Each dict contains keys "id", "category_id", "isthing". + area_threshold (int): stuff segments with less than `area_threshold` are not drawn. + + Returns: + output (VisImage): image object with visualizations. + """ + pred = _PanopticPrediction(panoptic_seg, segments_info) + + if self._instance_mode == ColorMode.IMAGE_BW: + self.output.img = self._create_grayscale_image(pred.non_empty_mask()) + + # draw mask for all semantic segments first i.e. "stuff" + for mask, sinfo in pred.semantic_masks(): + category_idx = sinfo["category_id"] + try: + mask_color = [x / 255 for x in self.metadata.stuff_colors[category_idx]] + except AttributeError: + mask_color = None + + text = self.metadata.stuff_classes[category_idx] + self.draw_binary_mask( + mask, + color=mask_color, + edge_color=_OFF_WHITE, + text=text, + alpha=alpha, + area_threshold=area_threshold, + ) + + # draw mask for all instances second + all_instances = list(pred.instance_masks()) + if len(all_instances) == 0: + return self.output + masks, sinfo = list(zip(*all_instances)) + category_ids = [x["category_id"] for x in sinfo] + + try: + scores = [x["score"] for x in sinfo] + except KeyError: + scores = None + labels = _create_text_labels(category_ids, scores, self.metadata.thing_classes) + + try: + colors = [random_color(rgb=True, maximum=1) for k in category_ids] + except AttributeError: + colors = None + self.overlay_instances(masks=masks, labels=labels, assigned_colors=colors, alpha=alpha) + + return self.output + + def draw_dataset_dict(self, dic): + """ + Draw annotations/segmentaions in Detectron2 Dataset format. + + Args: + dic (dict): annotation/segmentation data of one image, in Detectron2 Dataset format. + + Returns: + output (VisImage): image object with visualizations. + """ + annos = dic.get("annotations", None) + if annos: + if "segmentation" in annos[0]: + masks = [x["segmentation"] for x in annos] + else: + masks = None + if "keypoints" in annos[0]: + keypts = [x["keypoints"] for x in annos] + keypts = np.array(keypts).reshape(len(annos), -1, 3) + else: + keypts = None + + boxes = [BoxMode.convert(x["bbox"], x["bbox_mode"], BoxMode.XYXY_ABS) for x in annos] + + labels = [x["category_id"] for x in annos] + colors = None + if self._instance_mode == ColorMode.SEGMENTATION and self.metadata.get("thing_colors"): + colors = [ + self._jitter([x / 255 for x in self.metadata.thing_colors[c]]) for c in labels + ] + names = self.metadata.get("thing_classes", None) + if names: + labels = [names[i] for i in labels] + labels = [ + "{}".format(i) + ("|crowd" if a.get("iscrowd", 0) else "") + for i, a in zip(labels, annos) + ] + self.overlay_instances( + labels=labels, boxes=boxes, masks=masks, keypoints=keypts, assigned_colors=colors + ) + + sem_seg = dic.get("sem_seg", None) + if sem_seg is None and "sem_seg_file_name" in dic: + with PathManager.open(dic["sem_seg_file_name"], "rb") as f: + sem_seg = Image.open(f) + sem_seg = np.asarray(sem_seg, dtype="uint8") + if sem_seg is not None: + self.draw_sem_seg(sem_seg, area_threshold=0, alpha=0.5) + return self.output + + def overlay_instances( + self, + *, + boxes=None, + labels=None, + masks=None, + keypoints=None, + assigned_colors=None, + alpha=0.5 + ): + """ + Args: + boxes (Boxes, RotatedBoxes or ndarray): either a :class:`Boxes`, + or an Nx4 numpy array of XYXY_ABS format for the N objects in a single image, + or a :class:`RotatedBoxes`, + or an Nx5 numpy array of (x_center, y_center, width, height, angle_degrees) format + for the N objects in a single image, + labels (list[str]): the text to be displayed for each instance. + masks (masks-like object): Supported types are: + + * :class:`detectron2.structures.PolygonMasks`, + :class:`detectron2.structures.BitMasks`. + * list[list[ndarray]]: contains the segmentation masks for all objects in one image. + The first level of the list corresponds to individual instances. The second + level to all the polygon that compose the instance, and the third level + to the polygon coordinates. The third level should have the format of + [x0, y0, x1, y1, ..., xn, yn] (n >= 3). + * list[ndarray]: each ndarray is a binary mask of shape (H, W). + * list[dict]: each dict is a COCO-style RLE. + keypoints (Keypoint or array like): an array-like object of shape (N, K, 3), + where the N is the number of instances and K is the number of keypoints. + The last dimension corresponds to (x, y, visibility or score). + assigned_colors (list[matplotlib.colors]): a list of colors, where each color + corresponds to each mask or box in the image. Refer to 'matplotlib.colors' + for full list of formats that the colors are accepted in. + + Returns: + output (VisImage): image object with visualizations. + """ + num_instances = None + if boxes is not None: + boxes = self._convert_boxes(boxes) + num_instances = len(boxes) + if masks is not None: + masks = self._convert_masks(masks) + if num_instances: + assert len(masks) == num_instances + else: + num_instances = len(masks) + if keypoints is not None: + if num_instances: + assert len(keypoints) == num_instances + else: + num_instances = len(keypoints) + keypoints = self._convert_keypoints(keypoints) + if labels is not None: + assert len(labels) == num_instances + if assigned_colors is None: + assigned_colors = [random_color(rgb=True, maximum=1) for _ in range(num_instances)] + if num_instances == 0: + return self.output + if boxes is not None and boxes.shape[1] == 5: + return self.overlay_rotated_instances( + boxes=boxes, labels=labels, assigned_colors=assigned_colors + ) + + # Display in largest to smallest order to reduce occlusion. + areas = None + if boxes is not None: + areas = np.prod(boxes[:, 2:] - boxes[:, :2], axis=1) + elif masks is not None: + areas = np.asarray([x.area() for x in masks]) + + if areas is not None: + sorted_idxs = np.argsort(-areas).tolist() + # Re-order overlapped instances in descending order. + boxes = boxes[sorted_idxs] if boxes is not None else None + labels = [labels[k] for k in sorted_idxs] if labels is not None else None + masks = [masks[idx] for idx in sorted_idxs] if masks is not None else None + assigned_colors = [assigned_colors[idx] for idx in sorted_idxs] + keypoints = keypoints[sorted_idxs] if keypoints is not None else None + + for i in range(num_instances): + color = assigned_colors[i] + if boxes is not None: + self.draw_box(boxes[i], edge_color=color) + + if masks is not None: + for segment in masks[i].polygons: + self.draw_polygon(segment.reshape(-1, 2), color, alpha=alpha) + + if labels is not None: + # first get a box + if boxes is not None: + x0, y0, x1, y1 = boxes[i] + text_pos = (x0, y0) # if drawing boxes, put text on the box corner. + horiz_align = "left" + elif masks is not None: + x0, y0, x1, y1 = masks[i].bbox() + + # draw text in the center (defined by median) when box is not drawn + # median is less sensitive to outliers. + text_pos = np.median(masks[i].mask.nonzero(), axis=1)[::-1] + horiz_align = "center" + else: + continue # drawing the box confidence for keypoints isn't very useful. + # for small objects, draw text at the side to avoid occlusion + instance_area = (y1 - y0) * (x1 - x0) + if ( + instance_area < _SMALL_OBJECT_AREA_THRESH * self.output.scale + or y1 - y0 < 40 * self.output.scale + ): + if y1 >= self.output.height - 5: + text_pos = (x1, y0) + else: + text_pos = (x0, y1) + + height_ratio = (y1 - y0) / np.sqrt(self.output.height * self.output.width) + lighter_color = self._change_color_brightness(color, brightness_factor=0.7) + font_size = ( + np.clip((height_ratio - 0.02) / 0.08 + 1, 1.2, 2) + * 0.5 + * self._default_font_size + ) + self.draw_text( + labels[i], + text_pos, + color=lighter_color, + horizontal_alignment=horiz_align, + font_size=font_size, + ) + + # draw keypoints + if keypoints is not None: + for keypoints_per_instance in keypoints: + self.draw_and_connect_keypoints(keypoints_per_instance) + + return self.output + + def overlay_rotated_instances(self, boxes=None, labels=None, assigned_colors=None): + """ + Args: + boxes (ndarray): an Nx5 numpy array of + (x_center, y_center, width, height, angle_degrees) format + for the N objects in a single image. + labels (list[str]): the text to be displayed for each instance. + assigned_colors (list[matplotlib.colors]): a list of colors, where each color + corresponds to each mask or box in the image. Refer to 'matplotlib.colors' + for full list of formats that the colors are accepted in. + + Returns: + output (VisImage): image object with visualizations. + """ + + num_instances = len(boxes) + + if assigned_colors is None: + assigned_colors = [random_color(rgb=True, maximum=1) for _ in range(num_instances)] + if num_instances == 0: + return self.output + + # Display in largest to smallest order to reduce occlusion. + if boxes is not None: + areas = boxes[:, 2] * boxes[:, 3] + + sorted_idxs = np.argsort(-areas).tolist() + # Re-order overlapped instances in descending order. + boxes = boxes[sorted_idxs] + labels = [labels[k] for k in sorted_idxs] if labels is not None else None + colors = [assigned_colors[idx] for idx in sorted_idxs] + + for i in range(num_instances): + self.draw_rotated_box_with_label( + boxes[i], edge_color=colors[i], label=labels[i] if labels is not None else None + ) + + return self.output + + def draw_and_connect_keypoints(self, keypoints): + """ + Draws keypoints of an instance and follows the rules for keypoint connections + to draw lines between appropriate keypoints. This follows color heuristics for + line color. + + Args: + keypoints (Tensor): a tensor of shape (K, 3), where K is the number of keypoints + and the last dimension corresponds to (x, y, probability). + + Returns: + output (VisImage): image object with visualizations. + """ + visible = {} + keypoint_names = self.metadata.get("keypoint_names") + for idx, keypoint in enumerate(keypoints): + # draw keypoint + x, y, prob = keypoint + if prob > _KEYPOINT_THRESHOLD: + self.draw_circle((x, y), color=_RED) + if keypoint_names: + keypoint_name = keypoint_names[idx] + visible[keypoint_name] = (x, y) + + if self.metadata.get("keypoint_connection_rules"): + for kp0, kp1, color in self.metadata.keypoint_connection_rules: + if kp0 in visible and kp1 in visible: + x0, y0 = visible[kp0] + x1, y1 = visible[kp1] + color = tuple(x / 255.0 for x in color) + self.draw_line([x0, x1], [y0, y1], color=color) + + # draw lines from nose to mid-shoulder and mid-shoulder to mid-hip + # Note that this strategy is specific to person keypoints. + # For other keypoints, it should just do nothing + try: + ls_x, ls_y = visible["left_shoulder"] + rs_x, rs_y = visible["right_shoulder"] + mid_shoulder_x, mid_shoulder_y = (ls_x + rs_x) / 2, (ls_y + rs_y) / 2 + except KeyError: + pass + else: + # draw line from nose to mid-shoulder + nose_x, nose_y = visible.get("nose", (None, None)) + if nose_x is not None: + self.draw_line([nose_x, mid_shoulder_x], [nose_y, mid_shoulder_y], color=_RED) + + try: + # draw line from mid-shoulder to mid-hip + lh_x, lh_y = visible["left_hip"] + rh_x, rh_y = visible["right_hip"] + except KeyError: + pass + else: + mid_hip_x, mid_hip_y = (lh_x + rh_x) / 2, (lh_y + rh_y) / 2 + self.draw_line([mid_hip_x, mid_shoulder_x], [mid_hip_y, mid_shoulder_y], color=_RED) + return self.output + + """ + Primitive drawing functions: + """ + + def draw_text( + self, + text, + position, + *, + font_size=None, + color="g", + horizontal_alignment="center", + rotation=0 + ): + """ + Args: + text (str): class label + position (tuple): a tuple of the x and y coordinates to place text on image. + font_size (int, optional): font of the text. If not provided, a font size + proportional to the image width is calculated and used. + color: color of the text. Refer to `matplotlib.colors` for full list + of formats that are accepted. + horizontal_alignment (str): see `matplotlib.text.Text` + rotation: rotation angle in degrees CCW + + Returns: + output (VisImage): image object with text drawn. + """ + if not font_size: + font_size = self._default_font_size + + # since the text background is dark, we don't want the text to be dark + color = np.maximum(list(mplc.to_rgb(color)), 0.2) + color[np.argmax(color)] = max(0.8, np.max(color)) + + x, y = position + self.output.ax.text( + x, + y, + text, + size=font_size * self.output.scale, + family="sans-serif", + bbox={"facecolor": "black", "alpha": 0.8, "pad": 0.7, "edgecolor": "none"}, + verticalalignment="top", + horizontalalignment=horizontal_alignment, + color=color, + zorder=10, + rotation=rotation, + ) + return self.output + + def draw_box(self, box_coord, alpha=0.5, edge_color="g", line_style="-"): + """ + Args: + box_coord (tuple): a tuple containing x0, y0, x1, y1 coordinates, where x0 and y0 + are the coordinates of the image's top left corner. x1 and y1 are the + coordinates of the image's bottom right corner. + alpha (float): blending efficient. Smaller values lead to more transparent masks. + edge_color: color of the outline of the box. Refer to `matplotlib.colors` + for full list of formats that are accepted. + line_style (string): the string to use to create the outline of the boxes. + + Returns: + output (VisImage): image object with box drawn. + """ + x0, y0, x1, y1 = box_coord + width = x1 - x0 + height = y1 - y0 + + linewidth = max(self._default_font_size / 4, 1) + + self.output.ax.add_patch( + mpl.patches.Rectangle( + (x0, y0), + width, + height, + fill=False, + edgecolor=edge_color, + linewidth=linewidth * self.output.scale, + alpha=alpha, + linestyle=line_style, + ) + ) + return self.output + + def draw_rotated_box_with_label( + self, rotated_box, alpha=0.5, edge_color="g", line_style="-", label=None + ): + """ + Args: + rotated_box (tuple): a tuple containing (cnt_x, cnt_y, w, h, angle), + where cnt_x and cnt_y are the center coordinates of the box. + w and h are the width and height of the box. angle represents how + many degrees the box is rotated CCW with regard to the 0-degree box. + alpha (float): blending efficient. Smaller values lead to more transparent masks. + edge_color: color of the outline of the box. Refer to `matplotlib.colors` + for full list of formats that are accepted. + line_style (string): the string to use to create the outline of the boxes. + label (string): label for rotated box. It will not be rendered when set to None. + + Returns: + output (VisImage): image object with box drawn. + """ + cnt_x, cnt_y, w, h, angle = rotated_box + area = w * h + # use thinner lines when the box is small + linewidth = self._default_font_size / ( + 6 if area < _SMALL_OBJECT_AREA_THRESH * self.output.scale else 3 + ) + + theta = angle * math.pi / 180.0 + c = math.cos(theta) + s = math.sin(theta) + rect = [(-w / 2, h / 2), (-w / 2, -h / 2), (w / 2, -h / 2), (w / 2, h / 2)] + # x: left->right ; y: top->down + rotated_rect = [(s * yy + c * xx + cnt_x, c * yy - s * xx + cnt_y) for (xx, yy) in rect] + for k in range(4): + j = (k + 1) % 4 + self.draw_line( + [rotated_rect[k][0], rotated_rect[j][0]], + [rotated_rect[k][1], rotated_rect[j][1]], + color=edge_color, + linestyle="--" if k == 1 else line_style, + linewidth=linewidth, + ) + + if label is not None: + text_pos = rotated_rect[1] # topleft corner + + height_ratio = h / np.sqrt(self.output.height * self.output.width) + label_color = self._change_color_brightness(edge_color, brightness_factor=0.7) + font_size = ( + np.clip((height_ratio - 0.02) / 0.08 + 1, 1.2, 2) * 0.5 * self._default_font_size + ) + self.draw_text(label, text_pos, color=label_color, font_size=font_size, rotation=angle) + + return self.output + + def draw_circle(self, circle_coord, color, radius=3): + """ + Args: + circle_coord (list(int) or tuple(int)): contains the x and y coordinates + of the center of the circle. + color: color of the polygon. Refer to `matplotlib.colors` for a full list of + formats that are accepted. + radius (int): radius of the circle. + + Returns: + output (VisImage): image object with box drawn. + """ + x, y = circle_coord + self.output.ax.add_patch( + mpl.patches.Circle(circle_coord, radius=radius, fill=True, color=color) + ) + return self.output + + def draw_line(self, x_data, y_data, color, linestyle="-", linewidth=None): + """ + Args: + x_data (list[int]): a list containing x values of all the points being drawn. + Length of list should match the length of y_data. + y_data (list[int]): a list containing y values of all the points being drawn. + Length of list should match the length of x_data. + color: color of the line. Refer to `matplotlib.colors` for a full list of + formats that are accepted. + linestyle: style of the line. Refer to `matplotlib.lines.Line2D` + for a full list of formats that are accepted. + linewidth (float or None): width of the line. When it's None, + a default value will be computed and used. + + Returns: + output (VisImage): image object with line drawn. + """ + if linewidth is None: + linewidth = self._default_font_size / 3 + linewidth = max(linewidth, 1) + self.output.ax.add_line( + mpl.lines.Line2D( + x_data, + y_data, + linewidth=linewidth * self.output.scale, + color=color, + linestyle=linestyle, + ) + ) + return self.output + + def draw_binary_mask( + self, binary_mask, color=None, *, edge_color=None, text=None, alpha=0.5, area_threshold=4096 + ): + """ + Args: + binary_mask (ndarray): numpy array of shape (H, W), where H is the image height and + W is the image width. Each value in the array is either a 0 or 1 value of uint8 + type. + color: color of the mask. Refer to `matplotlib.colors` for a full list of + formats that are accepted. If None, will pick a random color. + edge_color: color of the polygon edges. Refer to `matplotlib.colors` for a + full list of formats that are accepted. + text (str): if None, will be drawn in the object's center of mass. + alpha (float): blending efficient. Smaller values lead to more transparent masks. + area_threshold (float): a connected component small than this will not be shown. + + Returns: + output (VisImage): image object with mask drawn. + """ + if color is None: + color = random_color(rgb=True, maximum=1) + if area_threshold is None: + area_threshold = 4096 + + has_valid_segment = False + binary_mask = binary_mask.astype("uint8") # opencv needs uint8 + mask = GenericMask(binary_mask, self.output.height, self.output.width) + shape2d = (binary_mask.shape[0], binary_mask.shape[1]) + + if not mask.has_holes: + # draw polygons for regular masks + for segment in mask.polygons: + area = mask_util.area(mask_util.frPyObjects([segment], shape2d[0], shape2d[1])) + if area < area_threshold: + continue + has_valid_segment = True + segment = segment.reshape(-1, 2) + self.draw_polygon(segment, color=color, edge_color=edge_color, alpha=alpha) + else: + rgba = np.zeros(shape2d + (4,), dtype="float32") + rgba[:, :, :3] = color + rgba[:, :, 3] = (mask.mask == 1).astype("float32") * alpha + has_valid_segment = True + self.output.ax.imshow(rgba) + + if text is not None and has_valid_segment: + # TODO sometimes drawn on wrong objects. the heuristics here can improve. + lighter_color = self._change_color_brightness(color, brightness_factor=0.7) + _num_cc, cc_labels, stats, centroids = cv2.connectedComponentsWithStats(binary_mask, 8) + largest_component_id = np.argmax(stats[1:, -1]) + 1 + + # draw text on the largest component, as well as other very large components. + for cid in range(1, _num_cc): + if cid == largest_component_id or stats[cid, -1] > _LARGE_MASK_AREA_THRESH: + # median is more stable than centroid + # center = centroids[largest_component_id] + center = np.median((cc_labels == cid).nonzero(), axis=1)[::-1] + self.draw_text(text, center, color=lighter_color) + return self.output + + def draw_polygon(self, segment, color, edge_color=None, alpha=0.5): + """ + Args: + segment: numpy array of shape Nx2, containing all the points in the polygon. + color: color of the polygon. Refer to `matplotlib.colors` for a full list of + formats that are accepted. + edge_color: color of the polygon edges. Refer to `matplotlib.colors` for a + full list of formats that are accepted. If not provided, a darker shade + of the polygon color will be used instead. + alpha (float): blending efficient. Smaller values lead to more transparent masks. + + Returns: + output (VisImage): image object with polygon drawn. + """ + if edge_color is None: + # make edge color darker than the polygon color + if alpha > 0.8: + edge_color = self._change_color_brightness(color, brightness_factor=-0.7) + else: + edge_color = color + edge_color = mplc.to_rgb(edge_color) + (1,) + + polygon = mpl.patches.Polygon( + segment, + fill=True, + facecolor=mplc.to_rgb(color) + (alpha,), + edgecolor=edge_color, + linewidth=max(self._default_font_size // 15 * self.output.scale, 1), + ) + self.output.ax.add_patch(polygon) + return self.output + + """ + Internal methods: + """ + + def _jitter(self, color): + """ + Randomly modifies given color to produce a slightly different color than the color given. + + Args: + color (tuple[double]): a tuple of 3 elements, containing the RGB values of the color + picked. The values in the list are in the [0.0, 1.0] range. + + Returns: + jittered_color (tuple[double]): a tuple of 3 elements, containing the RGB values of the + color after being jittered. The values in the list are in the [0.0, 1.0] range. + """ + color = mplc.to_rgb(color) + vec = np.random.rand(3) + # better to do it in another color space + vec = vec / np.linalg.norm(vec) * 0.5 + res = np.clip(vec + color, 0, 1) + return tuple(res) + + def _create_grayscale_image(self, mask=None): + """ + Create a grayscale version of the original image. + The colors in masked area, if given, will be kept. + """ + img_bw = self.img.astype("f4").mean(axis=2) + img_bw = np.stack([img_bw] * 3, axis=2) + if mask is not None: + img_bw[mask] = self.img[mask] + return img_bw + + def _change_color_brightness(self, color, brightness_factor): + """ + Depending on the brightness_factor, gives a lighter or darker color i.e. a color with + less or more saturation than the original color. + + Args: + color: color of the polygon. Refer to `matplotlib.colors` for a full list of + formats that are accepted. + brightness_factor (float): a value in [-1.0, 1.0] range. A lightness factor of + 0 will correspond to no change, a factor in [-1.0, 0) range will result in + a darker color and a factor in (0, 1.0] range will result in a lighter color. + + Returns: + modified_color (tuple[double]): a tuple containing the RGB values of the + modified color. Each value in the tuple is in the [0.0, 1.0] range. + """ + assert brightness_factor >= -1.0 and brightness_factor <= 1.0 + color = mplc.to_rgb(color) + polygon_color = colorsys.rgb_to_hls(*mplc.to_rgb(color)) + modified_lightness = polygon_color[1] + (brightness_factor * polygon_color[1]) + modified_lightness = 0.0 if modified_lightness < 0.0 else modified_lightness + modified_lightness = 1.0 if modified_lightness > 1.0 else modified_lightness + modified_color = colorsys.hls_to_rgb(polygon_color[0], modified_lightness, polygon_color[2]) + return modified_color + + def _convert_boxes(self, boxes): + """ + Convert different format of boxes to an NxB array, where B = 4 or 5 is the box dimension. + """ + if isinstance(boxes, Boxes) or isinstance(boxes, RotatedBoxes): + return boxes.tensor.numpy() + else: + return np.asarray(boxes) + + def _convert_masks(self, masks_or_polygons): + """ + Convert different format of masks or polygons to a tuple of masks and polygons. + + Returns: + list[GenericMask]: + """ + + m = masks_or_polygons + if isinstance(m, PolygonMasks): + m = m.polygons + if isinstance(m, BitMasks): + m = m.tensor.numpy() + if isinstance(m, torch.Tensor): + m = m.numpy() + ret = [] + for x in m: + if isinstance(x, GenericMask): + ret.append(x) + else: + ret.append(GenericMask(x, self.output.height, self.output.width)) + return ret + + def _convert_keypoints(self, keypoints): + if isinstance(keypoints, Keypoints): + keypoints = keypoints.tensor + keypoints = np.asarray(keypoints) + return keypoints + + def get_output(self): + """ + Returns: + output (VisImage): the image output containing the visualizations added + to the image. + """ + return self.output diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/README.md new file mode 100644 index 0000000000000000000000000000000000000000..cc0d3297b2d436f279c3546c16c86f296402f6c5 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/README.md @@ -0,0 +1,7 @@ + +## Some scripts for developers to use, include: + +- `linter.sh`: lint the codebase before commit +- `run_{inference,instant}_tests.sh`: run inference/training for a few iterations. + Note that these tests require 2 GPUs. +- `parse_results.sh`: parse results from a log file. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/linter.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/linter.sh new file mode 100755 index 0000000000000000000000000000000000000000..fd7081dbc27b85e5323d25085fb79c7ee3b54e4a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/linter.sh @@ -0,0 +1,46 @@ +#!/bin/bash -e +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +# Run this script at project root by "./dev/linter.sh" before you commit + +vergte() { + [ "$2" = "$(echo -e "$1\\n$2" | sort -V | head -n1)" ] +} + +{ + black --version | grep -E "(19.3b0.*6733274)|(19.3b0\\+8)" > /dev/null +} || { + echo "Linter requires 'black @ git+https://github.com/psf/black@673327449f86fce558adde153bb6cbe54bfebad2' !" + exit 1 +} + +ISORT_TARGET_VERSION="4.3.21" +ISORT_VERSION=$(isort -v | grep VERSION | awk '{print $2}') +vergte "$ISORT_VERSION" "$ISORT_TARGET_VERSION" || { + echo "Linter requires isort>=${ISORT_TARGET_VERSION} !" + exit 1 +} + +set -v + +echo "Running isort ..." +isort -y -sp . --atomic + +echo "Running black ..." +black -l 100 . + +echo "Running flake8 ..." +if [ -x "$(command -v flake8-3)" ]; then + flake8-3 . +else + python3 -m flake8 . +fi + +# echo "Running mypy ..." +# Pytorch does not have enough type annotations +# mypy detectron2/solver detectron2/structures detectron2/config + +echo "Running clang-format ..." +find . -regex ".*\.\(cpp\|c\|cc\|cu\|cxx\|h\|hh\|hpp\|hxx\|tcc\|mm\|m\)" -print0 | xargs -0 clang-format -i + +command -v arc > /dev/null && arc lint diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/packaging/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/packaging/README.md new file mode 100644 index 0000000000000000000000000000000000000000..095684fcc1c5593805158c81aa0168263eb57ced --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/packaging/README.md @@ -0,0 +1,17 @@ + +## To build a cu101 wheel for release: + +``` +$ nvidia-docker run -it --storage-opt "size=20GB" --name pt pytorch/manylinux-cuda101 +# inside the container: +# git clone https://github.com/facebookresearch/detectron2/ +# cd detectron2 +# export CU_VERSION=cu101 D2_VERSION_SUFFIX= PYTHON_VERSION=3.7 PYTORCH_VERSION=1.4 +# ./dev/packaging/build_wheel.sh +``` + +## To build all wheels for `CUDA {9.2,10.0,10.1}` x `Python {3.6,3.7,3.8}`: +``` +./dev/packaging/build_all_wheels.sh +./dev/packaging/gen_wheel_index.sh /path/to/wheels +``` diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/packaging/build_all_wheels.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/packaging/build_all_wheels.sh new file mode 100755 index 0000000000000000000000000000000000000000..eb64dea70cda26f5d101c414af43645ef7e3a349 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/packaging/build_all_wheels.sh @@ -0,0 +1,57 @@ +#!/bin/bash -e +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +PYTORCH_VERSION=1.5 + +build_for_one_cuda() { + cu=$1 + + case "$cu" in + cu*) + container_name=manylinux-cuda${cu/cu/} + ;; + cpu) + container_name=manylinux-cuda101 + ;; + *) + echo "Unrecognized cu=$cu" + exit 1 + ;; + esac + + echo "Launching container $container_name ..." + + for py in 3.6 3.7 3.8; do + docker run -itd \ + --name $container_name \ + --mount type=bind,source="$(pwd)",target=/detectron2 \ + pytorch/$container_name + + cat </dev/null 2>&1 && pwd )" +. "$script_dir/pkg_helpers.bash" + +echo "Build Settings:" +echo "CU_VERSION: $CU_VERSION" # e.g. cu101 +echo "D2_VERSION_SUFFIX: $D2_VERSION_SUFFIX" # e.g. +cu101 or "" +echo "PYTHON_VERSION: $PYTHON_VERSION" # e.g. 3.6 +echo "PYTORCH_VERSION: $PYTORCH_VERSION" # e.g. 1.4 + +setup_cuda +setup_wheel_python +yum install ninja-build -y && ln -sv /usr/bin/ninja-build /usr/bin/ninja + +export TORCH_VERSION_SUFFIX="+$CU_VERSION" +if [[ "$CU_VERSION" == "cu102" ]]; then + export TORCH_VERSION_SUFFIX="" +fi +pip_install pip numpy -U +pip_install "torch==$PYTORCH_VERSION$TORCH_VERSION_SUFFIX" \ + -f https://download.pytorch.org/whl/$CU_VERSION/torch_stable.html + +# use separate directories to allow parallel build +BASE_BUILD_DIR=build/$CU_VERSION/$PYTHON_VERSION +python setup.py \ + build -b $BASE_BUILD_DIR \ + bdist_wheel -b $BASE_BUILD_DIR/build_dist -d wheels/$CU_VERSION diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/packaging/gen_wheel_index.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/packaging/gen_wheel_index.sh new file mode 100755 index 0000000000000000000000000000000000000000..44d6041cdf45afdd39a85d413f08373e8516999b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/packaging/gen_wheel_index.sh @@ -0,0 +1,27 @@ +#!/bin/bash -e +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + + +root=$1 +if [[ -z "$root" ]]; then + echo "Usage: ./gen_wheel_index.sh /path/to/wheels" + exit +fi + +index=$root/index.html + +cd "$root" +for cu in cpu cu92 cu100 cu101 cu102; do + cd $cu + echo "Creating $PWD/index.html ..." + for whl in *.whl; do + echo "$whl
" + done > index.html + cd "$root" +done + +echo "Creating $index ..." +for whl in $(find . -type f -name '*.whl' -printf '%P\n' | sort); do + echo "$whl
" +done > "$index" + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/packaging/pkg_helpers.bash b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/packaging/pkg_helpers.bash new file mode 100755 index 0000000000000000000000000000000000000000..51e6185c7fba6ba0f7a325c467993196f1c9b4ef --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/packaging/pkg_helpers.bash @@ -0,0 +1,57 @@ +#!/bin/bash -e +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +# Function to retry functions that sometimes timeout or have flaky failures +retry () { + $* || (sleep 1 && $*) || (sleep 2 && $*) || (sleep 4 && $*) || (sleep 8 && $*) +} +# Install with pip a bit more robustly than the default +pip_install() { + retry pip install --progress-bar off "$@" +} + + +setup_cuda() { + # Now work out the CUDA settings + # Like other torch domain libraries, we choose common GPU architectures only. + export FORCE_CUDA=1 + case "$CU_VERSION" in + cu102) + export CUDA_HOME=/usr/local/cuda-10.2/ + export TORCH_CUDA_ARCH_LIST="3.5;3.7;5.0;5.2;6.0+PTX;6.1+PTX;7.0+PTX;7.5+PTX" + ;; + cu101) + export CUDA_HOME=/usr/local/cuda-10.1/ + export TORCH_CUDA_ARCH_LIST="3.5;3.7;5.0;5.2;6.0+PTX;6.1+PTX;7.0+PTX;7.5+PTX" + ;; + cu100) + export CUDA_HOME=/usr/local/cuda-10.0/ + export TORCH_CUDA_ARCH_LIST="3.5;3.7;5.0;5.2;6.0+PTX;6.1+PTX;7.0+PTX;7.5+PTX" + ;; + cu92) + export CUDA_HOME=/usr/local/cuda-9.2/ + export TORCH_CUDA_ARCH_LIST="3.5;3.7;5.0;5.2;6.0+PTX;6.1+PTX;7.0+PTX" + ;; + cpu) + unset FORCE_CUDA + export CUDA_VISIBLE_DEVICES= + ;; + *) + echo "Unrecognized CU_VERSION=$CU_VERSION" + exit 1 + ;; + esac +} + +setup_wheel_python() { + case "$PYTHON_VERSION" in + 3.6) python_abi=cp36-cp36m ;; + 3.7) python_abi=cp37-cp37m ;; + 3.8) python_abi=cp38-cp38 ;; + *) + echo "Unrecognized PYTHON_VERSION=$PYTHON_VERSION" + exit 1 + ;; + esac + export PATH="/opt/python/$python_abi/bin:$PATH" +} diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/parse_results.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/parse_results.sh new file mode 100755 index 0000000000000000000000000000000000000000..874b688889049e869854273c83182e5b019315b3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/parse_results.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +# A shell script that parses metrics from the log file. +# Make it easier for developers to track performance of models. + +LOG="$1" + +if [[ -z "$LOG" ]]; then + echo "Usage: $0 /path/to/log/file" + exit 1 +fi + +# [12/15 11:47:32] trainer INFO: Total training time: 12:15:04.446477 (0.4900 s / it) +# [12/15 11:49:03] inference INFO: Total inference time: 0:01:25.326167 (0.13652186737060548 s / demo per device, on 8 devices) +# [12/15 11:49:03] inference INFO: Total inference pure compute time: ..... + +# training time +trainspeed=$(grep -o 'Overall training.*' "$LOG" | grep -Eo '\(.*\)' | grep -o '[0-9\.]*') +echo "Training speed: $trainspeed s/it" + +# inference time: there could be multiple inference during training +inferencespeed=$(grep -o 'Total inference pure.*' "$LOG" | tail -n1 | grep -Eo '\(.*\)' | grep -o '[0-9\.]*' | head -n1) +echo "Inference speed: $inferencespeed s/it" + +# [12/15 11:47:18] trainer INFO: eta: 0:00:00 iter: 90000 loss: 0.5407 (0.7256) loss_classifier: 0.1744 (0.2446) loss_box_reg: 0.0838 (0.1160) loss_mask: 0.2159 (0.2722) loss_objectness: 0.0244 (0.0429) loss_rpn_box_reg: 0.0279 (0.0500) time: 0.4487 (0.4899) data: 0.0076 (0.0975) lr: 0.000200 max mem: 4161 +memory=$(grep -o 'max[_ ]mem: [0-9]*' "$LOG" | tail -n1 | grep -o '[0-9]*') +echo "Training memory: $memory MB" + +echo "Easy to copypaste:" +echo "$trainspeed","$inferencespeed","$memory" + +echo "------------------------------" + +# [12/26 17:26:32] engine.coco_evaluation: copypaste: Task: bbox +# [12/26 17:26:32] engine.coco_evaluation: copypaste: AP,AP50,AP75,APs,APm,APl +# [12/26 17:26:32] engine.coco_evaluation: copypaste: 0.0017,0.0024,0.0017,0.0005,0.0019,0.0011 +# [12/26 17:26:32] engine.coco_evaluation: copypaste: Task: segm +# [12/26 17:26:32] engine.coco_evaluation: copypaste: AP,AP50,AP75,APs,APm,APl +# [12/26 17:26:32] engine.coco_evaluation: copypaste: 0.0014,0.0021,0.0016,0.0005,0.0016,0.0011 + +echo "COCO Results:" +num_tasks=$(grep -o 'copypaste:.*Task.*' "$LOG" | sort -u | wc -l) +# each task has 3 lines +grep -o 'copypaste:.*' "$LOG" | cut -d ' ' -f 2- | tail -n $((num_tasks * 3)) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/run_inference_tests.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/run_inference_tests.sh new file mode 100755 index 0000000000000000000000000000000000000000..17e422d576e5fe9efcd85790954c569c962657d6 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/run_inference_tests.sh @@ -0,0 +1,44 @@ +#!/bin/bash -e +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +BIN="python tools/train_net.py" +OUTPUT="inference_test_output" +NUM_GPUS=2 + +CFG_LIST=( "${@:1}" ) + +if [ ${#CFG_LIST[@]} -eq 0 ]; then + CFG_LIST=( ./configs/quick_schedules/*inference_acc_test.yaml ) +fi + +echo "========================================================================" +echo "Configs to run:" +echo "${CFG_LIST[@]}" +echo "========================================================================" + + +for cfg in "${CFG_LIST[@]}"; do + echo "========================================================================" + echo "Running $cfg ..." + echo "========================================================================" + $BIN \ + --eval-only \ + --num-gpus $NUM_GPUS \ + --config-file "$cfg" \ + OUTPUT_DIR $OUTPUT + rm -rf $OUTPUT +done + + +echo "========================================================================" +echo "Running demo.py ..." +echo "========================================================================" +DEMO_BIN="python demo/demo.py" +COCO_DIR=datasets/coco/val2014 +mkdir -pv $OUTPUT + +set -v + +$DEMO_BIN --config-file ./configs/quick_schedules/panoptic_fpn_R_50_inference_acc_test.yaml \ + --input $COCO_DIR/COCO_val2014_0000001933* --output $OUTPUT +rm -rf $OUTPUT diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/run_instant_tests.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/run_instant_tests.sh new file mode 100755 index 0000000000000000000000000000000000000000..2c51de649262e7371fb173210c8edc377e8177e0 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/dev/run_instant_tests.sh @@ -0,0 +1,27 @@ +#!/bin/bash -e +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +BIN="python tools/train_net.py" +OUTPUT="instant_test_output" +NUM_GPUS=2 + +CFG_LIST=( "${@:1}" ) +if [ ${#CFG_LIST[@]} -eq 0 ]; then + CFG_LIST=( ./configs/quick_schedules/*instant_test.yaml ) +fi + +echo "========================================================================" +echo "Configs to run:" +echo "${CFG_LIST[@]}" +echo "========================================================================" + +for cfg in "${CFG_LIST[@]}"; do + echo "========================================================================" + echo "Running $cfg ..." + echo "========================================================================" + $BIN --num-gpus $NUM_GPUS --config-file "$cfg" \ + SOLVER.IMS_PER_BATCH $(($NUM_GPUS * 2)) \ + OUTPUT_DIR "$OUTPUT" + rm -rf "$OUTPUT" +done + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docker/Dockerfile b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docker/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..2a8603903e36eafb3a61fac0a086a919cc67fe38 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docker/Dockerfile @@ -0,0 +1,49 @@ +FROM nvidia/cuda:10.1-cudnn7-devel + +ENV DEBIAN_FRONTEND noninteractive +RUN apt-get update && apt-get install -y \ + python3-opencv ca-certificates python3-dev git wget sudo \ + cmake ninja-build protobuf-compiler libprotobuf-dev && \ + rm -rf /var/lib/apt/lists/* +RUN ln -sv /usr/bin/python3 /usr/bin/python + +# create a non-root user +ARG USER_ID=1000 +RUN useradd -m --no-log-init --system --uid ${USER_ID} appuser -g sudo +RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers +USER appuser +WORKDIR /home/appuser + +ENV PATH="/home/appuser/.local/bin:${PATH}" +RUN wget https://bootstrap.pypa.io/get-pip.py && \ + python3 get-pip.py --user && \ + rm get-pip.py + +# install dependencies +# See https://pytorch.org/ for other options if you use a different version of CUDA +RUN pip install --user tensorboard cython +RUN pip install --user torch==1.5+cu101 torchvision==0.6+cu101 -f https://download.pytorch.org/whl/torch_stable.html +RUN pip install --user 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI' + +RUN pip install --user 'git+https://github.com/facebookresearch/fvcore' +# install detectron2 +RUN git clone https://github.com/facebookresearch/detectron2 detectron2_repo +# set FORCE_CUDA because during `docker build` cuda is not accessible +ENV FORCE_CUDA="1" +# This will by default build detectron2 for all common cuda architectures and take a lot more time, +# because inside `docker build`, there is no way to tell which architecture will be used. +ARG TORCH_CUDA_ARCH_LIST="Kepler;Kepler+Tesla;Maxwell;Maxwell+Tegra;Pascal;Volta;Turing" +ENV TORCH_CUDA_ARCH_LIST="${TORCH_CUDA_ARCH_LIST}" + +RUN pip install --user -e detectron2_repo + +# Set a fixed model cache directory. +ENV FVCORE_CACHE="/tmp" +WORKDIR /home/appuser/detectron2_repo + +# run detectron2 under user "appuser": +# wget http://images.cocodataset.org/val2017/000000439715.jpg -O input.jpg +# python3 demo/demo.py \ + #--config-file configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml \ + #--input input.jpg --output outputs/ \ + #--opts MODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docker/Dockerfile-circleci b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docker/Dockerfile-circleci new file mode 100644 index 0000000000000000000000000000000000000000..bc0be845adc247eb458d212ae5352c594cd80a72 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docker/Dockerfile-circleci @@ -0,0 +1,17 @@ +FROM nvidia/cuda:10.1-cudnn7-devel +# This dockerfile only aims to provide an environment for unittest on CircleCI + +ENV DEBIAN_FRONTEND noninteractive +RUN apt-get update && apt-get install -y \ + python3-opencv ca-certificates python3-dev git wget sudo ninja-build && \ + rm -rf /var/lib/apt/lists/* + +RUN wget -q https://bootstrap.pypa.io/get-pip.py && \ + python3 get-pip.py && \ + rm get-pip.py + +# install dependencies +# See https://pytorch.org/ for other options if you use a different version of CUDA +RUN pip install tensorboard cython +RUN pip install torch==1.5+cu101 torchvision==0.6+cu101 -f https://download.pytorch.org/whl/torch_stable.html +RUN pip install 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI' diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docker/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docker/README.md new file mode 100644 index 0000000000000000000000000000000000000000..760c4054d0e4fa56a67ab4b59c14979498e2f94a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docker/README.md @@ -0,0 +1,36 @@ + +## Use the container (with docker ≥ 19.03) + +``` +cd docker/ +# Build: +docker build --build-arg USER_ID=$UID -t detectron2:v0 . +# Run: +docker run --gpus all -it \ + --shm-size=8gb --env="DISPLAY" --volume="/tmp/.X11-unix:/tmp/.X11-unix:rw" \ + --name=detectron2 detectron2:v0 + +# Grant docker access to host X server to show images +xhost +local:`docker inspect --format='{{ .Config.Hostname }}' detectron2` +``` + +## Use the container (with docker < 19.03) + +Install docker-compose and nvidia-docker2, then run: +``` +cd docker && USER_ID=$UID docker-compose run detectron2 +``` + +#### Using a persistent cache directory + +You can prevent models from being re-downloaded on every run, +by storing them in a cache directory. + +To do this, add `--volume=$HOME/.torch/fvcore_cache:/tmp:rw` in the run command. + +## Install new dependencies +Add the following to `Dockerfile` to make persistent changes. +``` +RUN sudo apt-get update && sudo apt-get install -y vim +``` +Or run them in the container to make temporary changes. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docker/docker-compose.yml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docker/docker-compose.yml new file mode 100644 index 0000000000000000000000000000000000000000..e660f44645a5cc164cd5a59f2cdcf7e1ded60c2e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docker/docker-compose.yml @@ -0,0 +1,18 @@ +version: "2.3" +services: + detectron2: + build: + context: . + dockerfile: Dockerfile + args: + USER_ID: ${USER_ID:-1000} + runtime: nvidia # TODO: Exchange with "gpu: all" in the future (see https://github.com/facebookresearch/detectron2/pull/197/commits/00545e1f376918db4a8ce264d427a07c1e896c5a). + shm_size: "8gb" + ulimits: + memlock: -1 + stack: 67108864 + volumes: + - /tmp/.X11-unix:/tmp/.X11-unix:ro + environment: + - DISPLAY=$DISPLAY + - NVIDIA_VISIBLE_DEVICES=all diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/.gitignore b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..e35d8850c9688b1ce82711694692cc574a799396 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/.gitignore @@ -0,0 +1 @@ +_build diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/Makefile b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..d537643dd411736a5f309383cfef52ea7d5e4599 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/Makefile @@ -0,0 +1,19 @@ +# Minimal makefile for Sphinx documentation +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/README.md new file mode 100644 index 0000000000000000000000000000000000000000..2c65c3676b488f3654b7e3231e1cfd06df48d4be --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/README.md @@ -0,0 +1,16 @@ +# Read the docs: + +The latest documentation built from this directory is available at [detectron2.readthedocs.io](https://detectron2.readthedocs.io/). +Documents in this directory are not meant to be read on github. + +# Build the docs: + +1. Install detectron2 according to [INSTALL.md](INSTALL.md). +2. Install additional libraries required to build docs: + - docutils==0.16 + - Sphinx==3.0.0 + - recommonmark==0.6.0 + - sphinx_rtd_theme + - mock + +3. Run `make html` from this directory. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/conf.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..44e9f2b4db549a3a5ef1420b27d408915e86657c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/conf.py @@ -0,0 +1,335 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +# flake8: noqa + +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +import mock +from sphinx.domains import Domain +from typing import Dict, List, Tuple + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +import sphinx_rtd_theme + + +class GithubURLDomain(Domain): + """ + Resolve certain links in markdown files to github source. + """ + + name = "githuburl" + ROOT = "https://github.com/facebookresearch/detectron2/blob/master/" + LINKED_DOC = ["tutorials/install", "tutorials/getting_started"] + + def resolve_any_xref(self, env, fromdocname, builder, target, node, contnode): + github_url = None + if not target.endswith("html") and target.startswith("../../"): + url = target.replace("../", "") + github_url = url + if fromdocname in self.LINKED_DOC: + # unresolved links in these docs are all github links + github_url = target + + if github_url is not None: + if github_url.endswith("MODEL_ZOO") or github_url.endswith("README"): + # bug of recommonmark. + # https://github.com/readthedocs/recommonmark/blob/ddd56e7717e9745f11300059e4268e204138a6b1/recommonmark/parser.py#L152-L155 + github_url += ".md" + print("Ref {} resolved to github:{}".format(target, github_url)) + contnode["refuri"] = self.ROOT + github_url + return [("githuburl:any", contnode)] + else: + return [] + + +# to support markdown +from recommonmark.parser import CommonMarkParser + +sys.path.insert(0, os.path.abspath("../")) +os.environ["DOC_BUILDING"] = "True" +DEPLOY = os.environ.get("READTHEDOCS") == "True" + + +# -- Project information ----------------------------------------------------- + +# fmt: off +try: + import torch # noqa +except ImportError: + for m in [ + "torch", "torchvision", "torch.nn", "torch.nn.parallel", "torch.distributed", "torch.multiprocessing", "torch.autograd", + "torch.autograd.function", "torch.nn.modules", "torch.nn.modules.utils", "torch.utils", "torch.utils.data", "torch.onnx", + "torchvision", "torchvision.ops", + ]: + sys.modules[m] = mock.Mock(name=m) + sys.modules['torch'].__version__ = "1.5" # fake version + +for m in [ + "cv2", "scipy", "portalocker", "detectron2._C", + "pycocotools", "pycocotools.mask", "pycocotools.coco", "pycocotools.cocoeval", + "google", "google.protobuf", "google.protobuf.internal", "onnx", + "caffe2", "caffe2.proto", "caffe2.python", "caffe2.python.utils", "caffe2.python.onnx", "caffe2.python.onnx.backend", +]: + sys.modules[m] = mock.Mock(name=m) +# fmt: on +sys.modules["cv2"].__version__ = "3.4" + +import detectron2 # isort: skip + + +project = "detectron2" +copyright = "2019-2020, detectron2 contributors" +author = "detectron2 contributors" + +# The short X.Y version +version = detectron2.__version__ +# The full version, including alpha/beta/rc tags +release = version + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +needs_sphinx = "3.0" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "recommonmark", + "sphinx.ext.autodoc", + "sphinx.ext.napoleon", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.coverage", + "sphinx.ext.mathjax", + "sphinx.ext.viewcode", + "sphinx.ext.githubpages", +] + +# -- Configurations for plugins ------------ +napoleon_google_docstring = True +napoleon_include_init_with_doc = True +napoleon_include_special_with_doc = True +napoleon_numpy_docstring = False +napoleon_use_rtype = False +autodoc_inherit_docstrings = False +autodoc_member_order = "bysource" + +if DEPLOY: + intersphinx_timeout = 10 +else: + # skip this when building locally + intersphinx_timeout = 0.1 +intersphinx_mapping = { + "python": ("https://docs.python.org/3.6", None), + "numpy": ("https://docs.scipy.org/doc/numpy/", None), + "torch": ("https://pytorch.org/docs/master/", None), +} +# ------------------------- + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +source_suffix = [".rst", ".md"] + +# The master toctree document. +master_doc = "index" + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "build", "README.md", "tutorials/README.md"] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + + +# -- Options for HTML output ------------------------------------------------- + +html_theme = "sphinx_rtd_theme" +html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = "detectron2doc" + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, "detectron2.tex", "detectron2 Documentation", "detectron2 contributors", "manual") +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, "detectron2", "detectron2 Documentation", [author], 1)] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "detectron2", + "detectron2 Documentation", + author, + "detectron2", + "One line description of project.", + "Miscellaneous", + ) +] + + +# -- Options for todo extension ---------------------------------------------- + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +_DEPRECATED_NAMES = set() + + +def autodoc_skip_member(app, what, name, obj, skip, options): + # we hide something deliberately + if getattr(obj, "__HIDE_SPHINX_DOC__", False): + return True + # Hide some names that are deprecated or not intended to be used + if name in _DEPRECATED_NAMES: + return True + return None + + +_PAPER_DATA = { + "resnet": ("1512.03385", "Deep Residual Learning for Image Recognition"), + "fpn": ("1612.03144", "Feature Pyramid Networks for Object Detection"), + "mask r-cnn": ("1703.06870", "Mask R-CNN"), + "faster r-cnn": ( + "1506.01497", + "Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks", + ), + "deformconv": ("1703.06211", "Deformable Convolutional Networks"), + "deformconv2": ("1811.11168", "Deformable ConvNets v2: More Deformable, Better Results"), + "panopticfpn": ("1901.02446", "Panoptic Feature Pyramid Networks"), + "retinanet": ("1708.02002", "Focal Loss for Dense Object Detection"), + "cascade r-cnn": ("1712.00726", "Cascade R-CNN: Delving into High Quality Object Detection"), + "lvis": ("1908.03195", "LVIS: A Dataset for Large Vocabulary Instance Segmentation"), + "rrpn": ("1703.01086", "Arbitrary-Oriented Scene Text Detection via Rotation Proposals"), + "in1k1h": ("1706.02677", "Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour"), +} + + +def paper_ref_role( + typ: str, + rawtext: str, + text: str, + lineno: int, + inliner, + options: Dict = {}, + content: List[str] = [], +): + """ + Parse :paper:`xxx`. Similar to the "extlinks" sphinx extension. + """ + from docutils import nodes, utils + from sphinx.util.nodes import split_explicit_title + + text = utils.unescape(text) + has_explicit_title, title, link = split_explicit_title(text) + link = link.lower() + if link not in _PAPER_DATA: + inliner.reporter.warning("Cannot find paper " + link) + paper_url, paper_title = "#", link + else: + paper_url, paper_title = _PAPER_DATA[link] + if "/" not in paper_url: + paper_url = "https://arxiv.org/abs/" + paper_url + if not has_explicit_title: + title = paper_title + pnode = nodes.reference(title, title, internal=False, refuri=paper_url) + return [pnode], [] + + +def setup(app): + from recommonmark.transform import AutoStructify + + app.add_domain(GithubURLDomain) + app.connect("autodoc-skip-member", autodoc_skip_member) + app.add_role("paper", paper_ref_role) + app.add_config_value( + "recommonmark_config", + {"enable_math": True, "enable_inline_math": True, "enable_eval_rst": True}, + True, + ) + app.add_transform(AutoStructify) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/index.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..8634b7b12ab906c10a78d6053428029799282ffd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/index.rst @@ -0,0 +1,14 @@ +.. detectron2 documentation master file, created by + sphinx-quickstart on Sat Sep 21 13:46:45 2019. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to detectron2's documentation! +====================================== + +.. toctree:: + :maxdepth: 2 + + tutorials/index + notes/index + modules/index diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/checkpoint.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/checkpoint.rst new file mode 100644 index 0000000000000000000000000000000000000000..616cb186c40212d7a0ca311d21691245b2fce996 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/checkpoint.rst @@ -0,0 +1,7 @@ +detectron2.checkpoint package +============================= + +.. automodule:: detectron2.checkpoint + :members: + :undoc-members: + :show-inheritance: diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/config.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/config.rst new file mode 100644 index 0000000000000000000000000000000000000000..034bd5f5e8a79d9eb2109f86b7aa12eea9c8b786 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/config.rst @@ -0,0 +1,17 @@ +detectron2.config package +========================= + +.. automodule:: detectron2.config + :members: + :undoc-members: + :show-inheritance: + :inherited-members: + + +Config References +----------------- + +.. literalinclude:: ../../detectron2/config/defaults.py + :language: python + :linenos: + :lines: 4- diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/data.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/data.rst new file mode 100644 index 0000000000000000000000000000000000000000..3697f0e22f3351a68ee40e4cadbd3ee6d978af8d --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/data.rst @@ -0,0 +1,40 @@ +detectron2.data package +======================= + +.. automodule:: detectron2.data + :members: + :undoc-members: + :show-inheritance: + +detectron2.data.detection\_utils module +--------------------------------------- + +.. automodule:: detectron2.data.detection_utils + :members: + :undoc-members: + :show-inheritance: + +detectron2.data.datasets module +--------------------------------------- + +.. automodule:: detectron2.data.datasets + :members: + :undoc-members: + :show-inheritance: + +detectron2.data.samplers module +--------------------------------------- + +.. automodule:: detectron2.data.samplers + :members: + :undoc-members: + :show-inheritance: + + +detectron2.data.transforms module +--------------------------------------- + +.. automodule:: detectron2.data.transforms + :members: + :undoc-members: + :show-inheritance: diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/engine.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/engine.rst new file mode 100644 index 0000000000000000000000000000000000000000..bb8b533aee225b1096fe4353b03533208f92732e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/engine.rst @@ -0,0 +1,25 @@ +detectron2.engine package +========================= + + +.. automodule:: detectron2.engine + :members: + :undoc-members: + :show-inheritance: + + +detectron2.engine.defaults module +--------------------------------- + +.. automodule:: detectron2.engine.defaults + :members: + :undoc-members: + :show-inheritance: + +detectron2.engine.hooks module +--------------------------------- + +.. automodule:: detectron2.engine.hooks + :members: + :undoc-members: + :show-inheritance: diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/evaluation.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/evaluation.rst new file mode 100644 index 0000000000000000000000000000000000000000..d9d34ff1a21c42b33ce2ad8b4415052af194397f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/evaluation.rst @@ -0,0 +1,7 @@ +detectron2.evaluation package +============================= + +.. automodule:: detectron2.evaluation + :members: + :undoc-members: + :show-inheritance: diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/export.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/export.rst new file mode 100644 index 0000000000000000000000000000000000000000..bb7c3c9173cae323e67cb9330b292fefc40ec760 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/export.rst @@ -0,0 +1,7 @@ +detectron2.export package +========================= + +.. automodule:: detectron2.export + :members: + :undoc-members: + :show-inheritance: diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/index.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..1b246f570070b4f8ef47d00968498d49f0310a6e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/index.rst @@ -0,0 +1,17 @@ +API Documentation +================== + +.. toctree:: + + checkpoint + config + data + engine + evaluation + layers + model_zoo + modeling + solver + structures + utils + export diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/layers.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/layers.rst new file mode 100644 index 0000000000000000000000000000000000000000..6aeb5213a4b27edeb7c0b2bdb816fd1af8d22ce4 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/layers.rst @@ -0,0 +1,7 @@ +detectron2.layers package +========================= + +.. automodule:: detectron2.layers + :members: + :undoc-members: + :show-inheritance: diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/model_zoo.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/model_zoo.rst new file mode 100644 index 0000000000000000000000000000000000000000..8b1c7d598f509db2361928aac1be4f25854d9f93 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/model_zoo.rst @@ -0,0 +1,7 @@ +detectron2.model_zoo package +============================ + +.. automodule:: detectron2.model_zoo + :members: + :undoc-members: + :show-inheritance: diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/modeling.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/modeling.rst new file mode 100644 index 0000000000000000000000000000000000000000..58ccd2c591774f3766f71da00b6938a0f4f3f592 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/modeling.rst @@ -0,0 +1,58 @@ +detectron2.modeling package +=========================== + +.. automodule:: detectron2.modeling + :members: + :undoc-members: + :show-inheritance: + + +detectron2.modeling.poolers module +--------------------------------------- + +.. automodule:: detectron2.modeling.poolers + :members: + :undoc-members: + :show-inheritance: + + +detectron2.modeling.sampling module +------------------------------------ + +.. automodule:: detectron2.modeling.sampling + :members: + :undoc-members: + :show-inheritance: + + +detectron2.modeling.box_regression module +------------------------------------------ + +.. automodule:: detectron2.modeling.box_regression + :members: + :undoc-members: + :show-inheritance: + + +Model Registries +----------------- + +These are different registries provided in modeling. +Each registry provide you the ability to replace it with your customized component, +without having to modify detectron2's code. + +Note that it is impossible to allow users to customize any line of code directly. +Even just to add one line at some place, +you'll likely need to find out the smallest registry which contains that line, +and register your component to that registry. + + +.. autodata:: detectron2.modeling.META_ARCH_REGISTRY +.. autodata:: detectron2.modeling.BACKBONE_REGISTRY +.. autodata:: detectron2.modeling.PROPOSAL_GENERATOR_REGISTRY +.. autodata:: detectron2.modeling.RPN_HEAD_REGISTRY +.. autodata:: detectron2.modeling.ANCHOR_GENERATOR_REGISTRY +.. autodata:: detectron2.modeling.ROI_HEADS_REGISTRY +.. autodata:: detectron2.modeling.ROI_BOX_HEAD_REGISTRY +.. autodata:: detectron2.modeling.ROI_MASK_HEAD_REGISTRY +.. autodata:: detectron2.modeling.ROI_KEYPOINT_HEAD_REGISTRY diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/solver.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/solver.rst new file mode 100644 index 0000000000000000000000000000000000000000..7f4a49f2ebaef2760b91eb7cecd32dcbff038efb --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/solver.rst @@ -0,0 +1,7 @@ +detectron2.solver package +========================= + +.. automodule:: detectron2.solver + :members: + :undoc-members: + :show-inheritance: diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/structures.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/structures.rst new file mode 100644 index 0000000000000000000000000000000000000000..5701c61abf5f74f61807e131f708304a8c9bab82 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/structures.rst @@ -0,0 +1,7 @@ +detectron2.structures package +============================= + +.. automodule:: detectron2.structures + :members: + :undoc-members: + :show-inheritance: diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/utils.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/utils.rst new file mode 100644 index 0000000000000000000000000000000000000000..8b57292ac0e655f40756b19c8eea259bddb62aab --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/modules/utils.rst @@ -0,0 +1,80 @@ +detectron2.utils package +======================== + +detectron2.utils.colormap module +-------------------------------- + +.. automodule:: detectron2.utils.colormap + :members: + :undoc-members: + :show-inheritance: + +detectron2.utils.comm module +---------------------------- + +.. automodule:: detectron2.utils.comm + :members: + :undoc-members: + :show-inheritance: + + +detectron2.utils.events module +------------------------------ + +.. automodule:: detectron2.utils.events + :members: + :undoc-members: + :show-inheritance: + + +detectron2.utils.logger module +------------------------------ + +.. automodule:: detectron2.utils.logger + :members: + :undoc-members: + :show-inheritance: + + +detectron2.utils.registry module +-------------------------------- + +.. automodule:: detectron2.utils.registry + :members: + :undoc-members: + :show-inheritance: + +detectron2.utils.memory module +---------------------------------- + +.. automodule:: detectron2.utils.memory + :members: + :undoc-members: + :show-inheritance: + + +detectron2.utils.analysis module +---------------------------------- + +.. automodule:: detectron2.utils.analysis + :members: + :undoc-members: + :show-inheritance: + + +detectron2.utils.visualizer module +---------------------------------- + +.. automodule:: detectron2.utils.visualizer + :members: + :undoc-members: + :show-inheritance: + +detectron2.utils.video\_visualizer module +----------------------------------------- + +.. automodule:: detectron2.utils.video_visualizer + :members: + :undoc-members: + :show-inheritance: + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/benchmarks.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/benchmarks.md new file mode 100644 index 0000000000000000000000000000000000000000..963f9210b39ce3ae248541644362631cb325d2b2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/benchmarks.md @@ -0,0 +1,196 @@ + +# Benchmarks + +Here we benchmark the training speed of a Mask R-CNN in detectron2, +with some other popular open source Mask R-CNN implementations. + + +### Settings + +* Hardware: 8 NVIDIA V100s with NVLink. +* Software: Python 3.7, CUDA 10.1, cuDNN 7.6.5, PyTorch 1.5, + TensorFlow 1.15.0rc2, Keras 2.2.5, MxNet 1.6.0b20190820. +* Model: an end-to-end R-50-FPN Mask-RCNN model, using the same hyperparameter as the + [Detectron baseline config](https://github.com/facebookresearch/Detectron/blob/master/configs/12_2017_baselines/e2e_mask_rcnn_R-50-FPN_1x.yaml) + (it does no have scale augmentation). +* Metrics: We use the average throughput in iterations 100-500 to skip GPU warmup time. + Note that for R-CNN-style models, the throughput of a model typically changes during training, because + it depends on the predictions of the model. Therefore this metric is not directly comparable with + "train speed" in model zoo, which is the average speed of the entire training run. + + +### Main Results + +```eval_rst ++-------------------------------+--------------------+ +| Implementation | Throughput (img/s) | ++===============================+====================+ +| |D2| |PT| | 62 | ++-------------------------------+--------------------+ +| mmdetection_ |PT| | 53 | ++-------------------------------+--------------------+ +| maskrcnn-benchmark_ |PT| | 53 | ++-------------------------------+--------------------+ +| tensorpack_ |TF| | 50 | ++-------------------------------+--------------------+ +| simpledet_ |mxnet| | 39 | ++-------------------------------+--------------------+ +| Detectron_ |C2| | 19 | ++-------------------------------+--------------------+ +| `matterport/Mask_RCNN`__ |TF| | 14 | ++-------------------------------+--------------------+ + +.. _maskrcnn-benchmark: https://github.com/facebookresearch/maskrcnn-benchmark/ +.. _tensorpack: https://github.com/tensorpack/tensorpack/tree/master/examples/FasterRCNN +.. _mmdetection: https://github.com/open-mmlab/mmdetection/ +.. _simpledet: https://github.com/TuSimple/simpledet/ +.. _Detectron: https://github.com/facebookresearch/Detectron +__ https://github.com/matterport/Mask_RCNN/ + +.. |D2| image:: https://github.com/facebookresearch/detectron2/raw/master/.github/Detectron2-Logo-Horz.svg?sanitize=true + :height: 15pt + :target: https://github.com/facebookresearch/detectron2/ +.. |PT| image:: https://pytorch.org/assets/images/logo-icon.svg + :width: 15pt + :height: 15pt + :target: https://pytorch.org +.. |TF| image:: https://static.nvidiagrid.net/ngc/containers/tensorflow.png + :width: 15pt + :height: 15pt + :target: https://tensorflow.org +.. |mxnet| image:: https://github.com/dmlc/web-data/raw/master/mxnet/image/mxnet_favicon.png + :width: 15pt + :height: 15pt + :target: https://mxnet.apache.org/ +.. |C2| image:: https://caffe2.ai/static/logo.svg + :width: 15pt + :height: 15pt + :target: https://caffe2.ai +``` + + +Details for each implementation: + +* __Detectron2__: with release v0.1.2, run: + ``` + python tools/train_net.py --config-file configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml --num-gpus 8 + ``` + +* __mmdetection__: at commit `b0d845f`, run + ``` + ./tools/dist_train.sh configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py 8 + ``` + +* __maskrcnn-benchmark__: use commit `0ce8f6f` with `sed -i ‘s/torch.uint8/torch.bool/g’ **/*.py; sed -i 's/AT_CHECK/TORCH_CHECK/g' **/*.cu` + to make it compatible with PyTorch 1.5. Then, run training with + ``` + python -m torch.distributed.launch --nproc_per_node=8 tools/train_net.py --config-file configs/e2e_mask_rcnn_R_50_FPN_1x.yaml + ``` + The speed we observed is faster than its model zoo, likely due to different software versions. + +* __tensorpack__: at commit `caafda`, `export TF_CUDNN_USE_AUTOTUNE=0`, then run + ``` + mpirun -np 8 ./train.py --config DATA.BASEDIR=/data/coco TRAINER=horovod BACKBONE.STRIDE_1X1=True TRAIN.STEPS_PER_EPOCH=50 --load ImageNet-R50-AlignPadding.npz + ``` + +* __SimpleDet__: at commit `9187a1`, run + ``` + python detection_train.py --config config/mask_r50v1_fpn_1x.py + ``` + +* __Detectron__: run + ``` + python tools/train_net.py --cfg configs/12_2017_baselines/e2e_mask_rcnn_R-50-FPN_1x.yaml + ``` + Note that many of its ops run on CPUs, therefore the performance is limited. + +* __matterport/Mask_RCNN__: at commit `3deaec`, apply the following diff, `export TF_CUDNN_USE_AUTOTUNE=0`, then run + ``` + python coco.py train --dataset=/data/coco/ --model=imagenet + ``` + Note that many small details in this implementation might be different + from Detectron's standards. + +
+ + (diff to make it use the same hyperparameters - click to expand) + + + ```diff + diff --git i/mrcnn/model.py w/mrcnn/model.py + index 62cb2b0..61d7779 100644 + --- i/mrcnn/model.py + +++ w/mrcnn/model.py + @@ -2367,8 +2367,8 @@ class MaskRCNN(): + epochs=epochs, + steps_per_epoch=self.config.STEPS_PER_EPOCH, + callbacks=callbacks, + - validation_data=val_generator, + - validation_steps=self.config.VALIDATION_STEPS, + + #validation_data=val_generator, + + #validation_steps=self.config.VALIDATION_STEPS, + max_queue_size=100, + workers=workers, + use_multiprocessing=True, + diff --git i/mrcnn/parallel_model.py w/mrcnn/parallel_model.py + index d2bf53b..060172a 100644 + --- i/mrcnn/parallel_model.py + +++ w/mrcnn/parallel_model.py + @@ -32,6 +32,7 @@ class ParallelModel(KM.Model): + keras_model: The Keras model to parallelize + gpu_count: Number of GPUs. Must be > 1 + """ + + super().__init__() + self.inner_model = keras_model + self.gpu_count = gpu_count + merged_outputs = self.make_parallel() + diff --git i/samples/coco/coco.py w/samples/coco/coco.py + index 5d172b5..239ed75 100644 + --- i/samples/coco/coco.py + +++ w/samples/coco/coco.py + @@ -81,7 +81,10 @@ class CocoConfig(Config): + IMAGES_PER_GPU = 2 + + # Uncomment to train on 8 GPUs (default is 1) + - # GPU_COUNT = 8 + + GPU_COUNT = 8 + + BACKBONE = "resnet50" + + STEPS_PER_EPOCH = 50 + + TRAIN_ROIS_PER_IMAGE = 512 + + # Number of classes (including background) + NUM_CLASSES = 1 + 80 # COCO has 80 classes + @@ -496,29 +499,10 @@ if __name__ == '__main__': + # *** This training schedule is an example. Update to your needs *** + + # Training - Stage 1 + - print("Training network heads") + model.train(dataset_train, dataset_val, + learning_rate=config.LEARNING_RATE, + epochs=40, + - layers='heads', + - augmentation=augmentation) + - + - # Training - Stage 2 + - # Finetune layers from ResNet stage 4 and up + - print("Fine tune Resnet stage 4 and up") + - model.train(dataset_train, dataset_val, + - learning_rate=config.LEARNING_RATE, + - epochs=120, + - layers='4+', + - augmentation=augmentation) + - + - # Training - Stage 3 + - # Fine tune all layers + - print("Fine tune all layers") + - model.train(dataset_train, dataset_val, + - learning_rate=config.LEARNING_RATE / 10, + - epochs=160, + - layers='all', + + layers='3+', + augmentation=augmentation) + + elif args.command == "evaluate": + ``` + +
diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/changelog.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/changelog.md new file mode 100644 index 0000000000000000000000000000000000000000..c0d4f5900bc64dbc4d2ce2d9bd31d32b9ee39f8f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/changelog.md @@ -0,0 +1,26 @@ +# Change Log + +### Releases +See release log at +[https://github.com/facebookresearch/detectron2/releases](https://github.com/facebookresearch/detectron2/releases). + +### Notable Backward Incompatible Changes: + +* 03/30/2020: Custom box head's `output_size` changed to `output_shape`. +* 02/14/2020,02/18/2020: Mask head and keypoint head now include logic for losses & inference. Custom heads + should overwrite the feature computation by `layers()` method. +* 11/11/2019: `detectron2.data.detection_utils.read_image` transposes images with exif information. + +### Config Version Change Log + +* v1: Rename `RPN_HEAD.NAME` to `RPN.HEAD_NAME`. +* v2: A batch of rename of many configurations before release. + +### Silent Regression in Historical Versions: + +We list a few silent regressions since they may silently produce incorrect results and will be hard to debug. + +* 04/01/2020 - 05/11/2020: Bad accuracy if `TRAIN_ON_PRED_BOXES` is set to True. +* 03/30/2020 - 04/01/2020: ResNets are not correctly built. +* 12/19/2019 - 12/26/2019: Using aspect ratio grouping causes a drop in accuracy. +* release - 11/9/2019: Test time augmentation does not predict the last category. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/compatibility.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/compatibility.md new file mode 100644 index 0000000000000000000000000000000000000000..f7b66c2e384b162864fb96a2fed44ba3084b8226 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/compatibility.md @@ -0,0 +1,83 @@ +# Compatibility with Other Libraries + +## Compatibility with Detectron (and maskrcnn-benchmark) + +Detectron2 addresses some legacy issues left in Detectron. As a result, their models +are not compatible: +running inference with the same model weights will produce different results in the two code bases. + +The major differences regarding inference are: + +- The height and width of a box with corners (x1, y1) and (x2, y2) is now computed more naturally as + width = x2 - x1 and height = y2 - y1; + In Detectron, a "+ 1" was added both height and width. + + Note that the relevant ops in Caffe2 have [adopted this change of convention](https://github.com/pytorch/pytorch/pull/20550) + with an extra option. + So it is still possible to run inference with a Detectron2-trained model in Caffe2. + + The change in height/width calculations most notably changes: + - encoding/decoding in bounding box regression. + - non-maximum suppression. The effect here is very negligible, though. + +- RPN now uses simpler anchors with fewer quantization artifacts. + + In Detectron, the anchors were quantized and + [do not have accurate areas](https://github.com/facebookresearch/Detectron/issues/227). + In Detectron2, the anchors are center-aligned to feature grid points and not quantized. + +- Classification layers have a different ordering of class labels. + + This involves any trainable parameter with shape (..., num_categories + 1, ...). + In Detectron2, integer labels [0, K-1] correspond to the K = num_categories object categories + and the label "K" corresponds to the special "background" category. + In Detectron, label "0" means background, and labels [1, K] correspond to the K categories. + +- ROIAlign is implemented differently. The new implementation is [available in Caffe2](https://github.com/pytorch/pytorch/pull/23706). + + 1. All the ROIs are shifted by half a pixel compared to Detectron in order to create better image-feature-map alignment. + See `layers/roi_align.py` for details. + To enable the old behavior, use `ROIAlign(aligned=False)`, or `POOLER_TYPE=ROIAlign` instead of + `ROIAlignV2` (the default). + + 1. The ROIs are not required to have a minimum size of 1. + This will lead to tiny differences in the output, but should be negligible. + +- Mask inference function is different. + + In Detectron2, the "paste_mask" function is different and should be more accurate than in Detectron. This change + can improve mask AP on COCO by ~0.5% absolute. + +There are some other differences in training as well, but they won't affect +model-level compatibility. The major ones are: + +- We fixed a [bug](https://github.com/facebookresearch/Detectron/issues/459) in + Detectron, by making `RPN.POST_NMS_TOPK_TRAIN` per-image, rather than per-batch. + The fix may lead to a small accuracy drop for a few models (e.g. keypoint + detection) and will require some parameter tuning to match the Detectron results. +- For simplicity, we change the default loss in bounding box regression to L1 loss, instead of smooth L1 loss. + We have observed that this tends to slightly decrease box AP50 while improving box AP for higher + overlap thresholds (and leading to a slight overall improvement in box AP). +- We interpret the coordinates in COCO bounding box and segmentation annotations + as coordinates in range `[0, width]` or `[0, height]`. The coordinates in + COCO keypoint annotations are interpreted as pixel indices in range `[0, width - 1]` or `[0, height - 1]`. + Note that this affects how flip augmentation is implemented. + + +We will later share more details and rationale behind the above mentioned issues +about pixels, coordinates, and "+1"s. + + +## Compatibility with Caffe2 + +As mentioned above, despite the incompatibilities with Detectron, the relevant +ops have been implemented in Caffe2. +Therefore, models trained with detectron2 can be converted in Caffe2. +See [Deployment](../tutorials/deployment.md) for the tutorial. + +## Compatibility with TensorFlow + +Most ops are available in TensorFlow, although some tiny differences in +the implementation of resize / ROIAlign / padding need to be addressed. +A working conversion script is provided by [tensorpack FasterRCNN](https://github.com/tensorpack/tensorpack/tree/master/examples/FasterRCNN/convert_d2) +to run a standard detectron2 model in TensorFlow. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/contributing.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/contributing.md new file mode 100644 index 0000000000000000000000000000000000000000..81936dfedb495dd5cd21da2bfcf9819b97ed1dff --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/contributing.md @@ -0,0 +1,49 @@ +# Contributing to detectron2 + +## Issues +We use GitHub issues to track public bugs and questions. +Please make sure to follow one of the +[issue templates](https://github.com/facebookresearch/detectron2/issues/new/choose) +when reporting any issues. + +Facebook has a [bounty program](https://www.facebook.com/whitehat/) for the safe +disclosure of security bugs. In those cases, please go through the process +outlined on that page and do not file a public issue. + +## Pull Requests +We actively welcome your pull requests. + +However, if you're adding any significant features (e.g. > 50 lines), please +make sure to have a corresponding issue to discuss your motivation and proposals, +before sending a PR. We do not always accept new features, and we take the following +factors into consideration: + +1. Whether the same feature can be achieved without modifying detectron2. +Detectron2 is designed so that you can implement many extensions from the outside, e.g. +those in [projects](https://github.com/facebookresearch/detectron2/tree/master/projects). +If some part is not as extensible, you can also bring up the issue to make it more extensible. +2. Whether the feature is potentially useful to a large audience, or only to a small portion of users. +3. Whether the proposed solution has a good design / interface. +4. Whether the proposed solution adds extra mental/practical overhead to users who don't + need such feature. +5. Whether the proposed solution breaks existing APIs. + +When sending a PR, please do: + +1. If a PR contains multiple orthogonal changes, split it to several PRs. +2. If you've added code that should be tested, add tests. +3. For PRs that need experiments (e.g. adding a new model or new methods), + you don't need to update model zoo, but do provide experiment results in the description of the PR. +4. If APIs are changed, update the documentation. +5. Make sure your code lints with `./dev/linter.sh`. + + +## Contributor License Agreement ("CLA") +In order to accept your pull request, we need you to submit a CLA. You only need +to do this once to work on any of Facebook's open source projects. + +Complete your CLA here: + +## License +By contributing to detectron2, you agree that your contributions will be licensed +under the LICENSE file in the root directory of this source tree. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/index.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..63cf907be7bb15f5316af6d44a46df601755a86b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/notes/index.rst @@ -0,0 +1,10 @@ +Notes +====================================== + +.. toctree:: + :maxdepth: 2 + + benchmarks + compatibility + contributing + changelog diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1ca9c94d042ef838143a45490fe6b4556c19f3c9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/README.md @@ -0,0 +1,4 @@ +# Read the docs: + +The latest documentation built from this directory is available at [detectron2.readthedocs.io](https://detectron2.readthedocs.io/). +Documents in this directory are not meant to be read on github. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/builtin_datasets.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/builtin_datasets.md new file mode 100644 index 0000000000000000000000000000000000000000..1a2633f95e6f6a5e54c8beca102a490036478587 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/builtin_datasets.md @@ -0,0 +1,99 @@ +# Setup Builtin Datasets + +Detectron2 has builtin support for a few datasets. +The datasets are assumed to exist in a directory specified by the environment variable +`DETECTRON2_DATASETS`. +Under this directory, detectron2 expects to find datasets in the structure described below. + +You can set the location for builtin datasets by `export DETECTRON2_DATASETS=/path/to/datasets`. +If left unset, the default is `./datasets` relative to your current working directory. + +The [model zoo](https://github.com/facebookresearch/detectron2/blob/master/MODEL_ZOO.md) +contains configs and models that use these builtin datasets. + +## Expected dataset structure for COCO instance/keypoint detection: + +``` +coco/ + annotations/ + instances_{train,val}2017.json + person_keypoints_{train,val}2017.json + {train,val}2017/ + # image files that are mentioned in the corresponding json +``` + +You can use the 2014 version of the dataset as well. + +Some of the builtin tests (`dev/run_*_tests.sh`) uses a tiny version of the COCO dataset, +which you can download with `./prepare_for_tests.sh`. + +## Expected dataset structure for PanopticFPN: + +``` +coco/ + annotations/ + panoptic_{train,val}2017.json + panoptic_{train,val}2017/ # png annotations + panoptic_stuff_{train,val}2017/ # generated by the script mentioned below +``` + +Install panopticapi by: +``` +pip install git+https://github.com/cocodataset/panopticapi.git +``` +Then, run `python prepare_panoptic_fpn.py`, to extract semantic annotations from panoptic annotations. + +## Expected dataset structure for LVIS instance segmentation: +``` +coco/ + {train,val,test}2017/ +lvis/ + lvis_v0.5_{train,val}.json + lvis_v0.5_image_info_test.json +``` + +Install lvis-api by: +``` +pip install git+https://github.com/lvis-dataset/lvis-api.git +``` + +Run `python prepare_cocofied_lvis.py` to prepare "cocofied" LVIS annotations for evaluation of models trained on the COCO dataset. + +## Expected dataset structure for cityscapes: +``` +cityscapes/ + gtFine/ + train/ + aachen/ + color.png, instanceIds.png, labelIds.png, polygons.json, + labelTrainIds.png + ... + val/ + test/ + leftImg8bit/ + train/ + val/ + test/ +``` +Install cityscapes scripts by: +``` +pip install git+https://github.com/mcordts/cityscapesScripts.git +``` + +Note: labelTrainIds.png are created using cityscapesescript with: +``` +CITYSCAPES_DATASET=$DETECTRON2_DATASETS/cityscapes python cityscapesscripts/preparation/createTrainIdLabelImgs.py +``` +They are not needed for instance segmentation. + +## Expected dataset structure for Pascal VOC: +``` +VOC20{07,12}/ + Annotations/ + ImageSets/ + Main/ + trainval.txt + test.txt + # train.txt or val.txt, if you use these splits + JPEGImages/ +``` diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/configs.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/configs.md new file mode 100644 index 0000000000000000000000000000000000000000..ea82583825b51955993ca87d14c17ffb3ab031f4 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/configs.md @@ -0,0 +1,58 @@ +# Configs + +Detectron2 provides a key-value based config system that can be +used to obtain standard, common behaviors. + +Detectron2's config system uses YAML and [yacs](https://github.com/rbgirshick/yacs). +In addition to the [basic operations](../modules/config.html#detectron2.config.CfgNode) +that access and update a config, we provide the following extra functionalities: + +1. The config can have `_BASE_: base.yaml` field, which will load a base config first. + Values in the base config will be overwritten in sub-configs, if there are any conflicts. + We provided several base configs for standard model architectures. +2. We provide config versioning, for backward compatibility. + If your config file is versioned with a config line like `VERSION: 2`, + detectron2 will still recognize it even if we change some keys in the future. + +"Config" is a very limited abstraction. +We do not expect all features in detectron2 to be available through configs. +If you need something that's not available in the config space, +please write code using detectron2's API. + +### Basic Usage + +Some basic usage of the `CfgNode` object is shown here. See more in [documentation](../modules/config.html#detectron2.config.CfgNode). +```python +from detectron2.config import get_cfg +cfg = get_cfg() # obtain detectron2's default config +cfg.xxx = yyy # add new configs for your own custom components +cfg.merge_from_file("my_cfg.yaml") # load values from a file + +cfg.merge_from_list(["MODEL.WEIGHTS", "weights.pth"]) # can also load values from a list of str +print(cfg.dump()) # print formatted configs +``` + +Many builtin tools in detectron2 accepts command line config overwrite: +Key-value pairs provided in the command line will overwrite the existing values in the config file. +For example, [demo.py](../../demo/demo.py) can be used with +``` +./demo.py --config-file config.yaml [--other-options] \ + --opts MODEL.WEIGHTS /path/to/weights INPUT.MIN_SIZE_TEST 1000 +``` + +To see a list of available configs in detectron2 and what they mean, +check [Config References](../modules/config.html#config-references) + + +### Best Practice with Configs + +1. Treat the configs you write as "code": avoid copying them or duplicating them; use `_BASE_` + to share common parts between configs. + +2. Keep the configs you write simple: don't include keys that do not affect the experimental setting. + +3. Keep a version number in your configs (or the base config), e.g., `VERSION: 2`, + for backward compatibility. + We print a warning when reading a config without version number. + The official configs do not include version number because they are meant to + be always up-to-date. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/data_loading.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/data_loading.md new file mode 100644 index 0000000000000000000000000000000000000000..bb037ca534ccbb0cf82c456d0cd54544520b3a3f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/data_loading.md @@ -0,0 +1,77 @@ + +# Use Custom Dataloaders + +## How the Existing Dataloader Works + +Detectron2 contains a builtin data loading pipeline. +It's good to understand how it works, in case you need to write a custom one. + +Detectron2 provides two functions +[build_detection_{train,test}_loader](../modules/data.html#detectron2.data.build_detection_train_loader) +that create a default data loader from a given config. +Here is how `build_detection_{train,test}_loader` work: + +1. It takes the name of a registered dataset (e.g., "coco_2017_train") and loads a `list[dict]` representing the dataset items + in a lightweight, canonical format. These dataset items are not yet ready to be used by the model (e.g., images are + not loaded into memory, random augmentations have not been applied, etc.). + Details about the dataset format and dataset registration can be found in + [datasets](./datasets.md). +2. Each dict in this list is mapped by a function ("mapper"): + * Users can customize this mapping function by specifying the "mapper" argument in + `build_detection_{train,test}_loader`. The default mapper is [DatasetMapper](../modules/data.html#detectron2.data.DatasetMapper). + * The output format of such function can be arbitrary, as long as it is accepted by the consumer of this data loader (usually the model). + The outputs of the default mapper, after batching, follow the default model input format documented in + [Use Models](./models.html#model-input-format). + * The role of the mapper is to transform the lightweight, canonical representation of a dataset item into a format + that is ready for the model to consume (including, e.g., read images, perform random data augmentation and convert to torch Tensors). + If you would like to perform custom transformations to data, you often want a custom mapper. +3. The outputs of the mapper are batched (simply into a list). +4. This batched data is the output of the data loader. Typically, it's also the input of + `model.forward()`. + + +## Write a Custom Dataloader + +Using a different "mapper" with `build_detection_{train,test}_loader(mapper=)` works for most use cases +of custom data loading. +For example, if you want to resize all images to a fixed size for Mask R-CNN training, write this: + +```python +from detectron2.data import build_detection_train_loader +from detectron2.data import transforms as T +from detectron2.data import detection_utils as utils + +def mapper(dataset_dict): + # Implement a mapper, similar to the default DatasetMapper, but with your own customizations + dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code below + image = utils.read_image(dataset_dict["file_name"], format="BGR") + image, transforms = T.apply_transform_gens([T.Resize((800, 800))], image) + dataset_dict["image"] = torch.as_tensor(image.transpose(2, 0, 1).astype("float32")) + + annos = [ + utils.transform_instance_annotations(obj, transforms, image.shape[:2]) + for obj in dataset_dict.pop("annotations") + if obj.get("iscrowd", 0) == 0 + ] + instances = utils.annotations_to_instances(annos, image.shape[:2]) + dataset_dict["instances"] = utils.filter_empty_instances(instances) + return dataset_dict + +data_loader = build_detection_train_loader(cfg, mapper=mapper) +# use this dataloader instead of the default +``` +Refer to [API documentation of detectron2.data](../modules/data) for details. + +If you want to change not only the mapper (e.g., to write different sampling or batching logic), +you can write your own data loader. The data loader is simply a +python iterator that produces [the format](./models.md) your model accepts. +You can implement it using any tools you like. + +## Use a Custom Dataloader + +If you use [DefaultTrainer](../modules/engine.html#detectron2.engine.defaults.DefaultTrainer), +you can overwrite its `build_{train,test}_loader` method to use your own dataloader. +See the [densepose dataloader](../../projects/DensePose/train_net.py) +for an example. + +If you write your own training loop, you can plug in your data loader easily. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/datasets.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/datasets.md new file mode 100644 index 0000000000000000000000000000000000000000..8dc1c0c55598887e4de73e988567753ebf4538e2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/datasets.md @@ -0,0 +1,221 @@ +# Use Custom Datasets + +Datasets that have builtin support in detectron2 are listed in [datasets](../../datasets). +If you want to use a custom dataset while also reusing detectron2's data loaders, +you will need to + +1. __Register__ your dataset (i.e., tell detectron2 how to obtain your dataset). +2. Optionally, __register metadata__ for your dataset. + +Next, we explain the above two concepts in detail. + +The [Colab tutorial](https://colab.research.google.com/drive/16jcaJoc6bCFAQ96jDe2HwtXj7BMD_-m5) +has a live example of how to register and train on a dataset of custom formats. + +### Register a Dataset + +To let detectron2 know how to obtain a dataset named "my_dataset", you will implement +a function that returns the items in your dataset and then tell detectron2 about this +function: +```python +def my_dataset_function(): + ... + return list[dict] in the following format + +from detectron2.data import DatasetCatalog +DatasetCatalog.register("my_dataset", my_dataset_function) +``` + +Here, the snippet associates a dataset "my_dataset" with a function that returns the data. +The registration stays effective until the process exists. + +The function can processes data from its original format into either one of the following: +1. Detectron2's standard dataset dict, described below. This will work with many other builtin + features in detectron2, so it's recommended to use it when it's sufficient for your task. +2. Your custom dataset dict. You can also return arbitrary dicts in your own format, + such as adding extra keys for new tasks. + Then you will need to handle them properly downstream as well. + See below for more details. + +#### Standard Dataset Dicts + +For standard tasks +(instance detection, instance/semantic/panoptic segmentation, keypoint detection), +we load the original dataset into `list[dict]` with a specification similar to COCO's json annotations. +This is our standard representation for a dataset. + +Each dict contains information about one image. +The dict may have the following fields, +and the required fields vary based on what the dataloader or the task needs (see more below). + ++ `file_name`: the full path to the image file. Will apply rotation and flipping if the image has such exif information. ++ `height`, `width`: integer. The shape of image. ++ `image_id` (str or int): a unique id that identifies this image. Used + during evaluation to identify the images, but a dataset may use it for different purposes. ++ `annotations` (list[dict]): each dict corresponds to annotations of one instance + in this image. Required by instance detection/segmentation or keypoint detection tasks. + + Images with empty `annotations` will by default be removed from training, + but can be included using `DATALOADER.FILTER_EMPTY_ANNOTATIONS`. + + Each dict contains the following keys, of which `bbox`,`bbox_mode` and `category_id` are required: + + `bbox` (list[float]): list of 4 numbers representing the bounding box of the instance. + + `bbox_mode` (int): the format of bbox. + It must be a member of + [structures.BoxMode](../modules/structures.html#detectron2.structures.BoxMode). + Currently supports: `BoxMode.XYXY_ABS`, `BoxMode.XYWH_ABS`. + + `category_id` (int): an integer in the range [0, num_categories) representing the category label. + The value num_categories is reserved to represent the "background" category, if applicable. + + `segmentation` (list[list[float]] or dict): the segmentation mask of the instance. + + If `list[list[float]]`, it represents a list of polygons, one for each connected component + of the object. Each `list[float]` is one simple polygon in the format of `[x1, y1, ..., xn, yn]`. + The Xs and Ys are either relative coordinates in [0, 1], or absolute coordinates, + depend on whether "bbox_mode" is relative. + + If `dict`, it represents the per-pixel segmentation mask in COCO's RLE format. The dict should have + keys "size" and "counts". You can convert a uint8 segmentation mask of 0s and 1s into + RLE format by `pycocotools.mask.encode(np.asarray(mask, order="F"))`. + + `keypoints` (list[float]): in the format of [x1, y1, v1,..., xn, yn, vn]. + v[i] means the [visibility](http://cocodataset.org/#format-data) of this keypoint. + `n` must be equal to the number of keypoint categories. + The Xs and Ys are either relative coordinates in [0, 1], or absolute coordinates, + depend on whether "bbox_mode" is relative. + + Note that the coordinate annotations in COCO format are integers in range [0, H-1 or W-1]. + By default, detectron2 adds 0.5 to absolute keypoint coordinates to convert them from discrete + pixel indices to floating point coordinates. + + `iscrowd`: 0 (default) or 1. Whether this instance is labeled as COCO's "crowd + region". Don't include this field if you don't know what it means. ++ `sem_seg_file_name`: the full path to the ground truth semantic segmentation file. + Required by semantic segmentation task. + It should be an image whose pixel values are integer labels. + + +Fast R-CNN (with precomputed proposals) is rarely used today. +To train a Fast R-CNN, the following extra keys are needed: + ++ `proposal_boxes` (array): 2D numpy array with shape (K, 4) representing K precomputed proposal boxes for this image. ++ `proposal_objectness_logits` (array): numpy array with shape (K, ), which corresponds to the objectness + logits of proposals in 'proposal_boxes'. ++ `proposal_bbox_mode` (int): the format of the precomputed proposal bbox. + It must be a member of + [structures.BoxMode](../modules/structures.html#detectron2.structures.BoxMode). + Default is `BoxMode.XYXY_ABS`. + +#### Custom Dataset Dicts for New Tasks + +In the `list[dict]` that your dataset function returns, the dictionary can also have arbitrary custom data. +This will be useful for a new task that needs extra information not supported +by the standard dataset dicts. In this case, you need to make sure the downstream code can handle your data +correctly. Usually this requires writing a new `mapper` for the dataloader (see [Use Custom Dataloaders](./data_loading.md)). + +When designing a custom format, note that all dicts are stored in memory +(sometimes serialized and with multiple copies). +To save memory, each dict is meant to contain small but sufficient information +about each sample, such as file names and annotations. +Loading full samples typically happens in the data loader. + +For attributes shared among the entire dataset, use `Metadata` (see below). +To avoid extra memory, do not save such information repeatly for each sample. + +### "Metadata" for Datasets + +Each dataset is associated with some metadata, accessible through +`MetadataCatalog.get(dataset_name).some_metadata`. +Metadata is a key-value mapping that contains information that's shared among +the entire dataset, and usually is used to interpret what's in the dataset, e.g., +names of classes, colors of classes, root of files, etc. +This information will be useful for augmentation, evaluation, visualization, logging, etc. +The structure of metadata depends on the what is needed from the corresponding downstream code. + +If you register a new dataset through `DatasetCatalog.register`, +you may also want to add its corresponding metadata through +`MetadataCatalog.get(dataset_name).some_key = some_value`, to enable any features that need the metadata. +You can do it like this (using the metadata key "thing_classes" as an example): + +```python +from detectron2.data import MetadataCatalog +MetadataCatalog.get("my_dataset").thing_classes = ["person", "dog"] +``` + +Here is a list of metadata keys that are used by builtin features in detectron2. +If you add your own dataset without these metadata, some features may be +unavailable to you: + +* `thing_classes` (list[str]): Used by all instance detection/segmentation tasks. + A list of names for each instance/thing category. + If you load a COCO format dataset, it will be automatically set by the function `load_coco_json`. + +* `thing_colors` (list[tuple(r, g, b)]): Pre-defined color (in [0, 255]) for each thing category. + Used for visualization. If not given, random colors are used. + +* `stuff_classes` (list[str]): Used by semantic and panoptic segmentation tasks. + A list of names for each stuff category. + +* `stuff_colors` (list[tuple(r, g, b)]): Pre-defined color (in [0, 255]) for each stuff category. + Used for visualization. If not given, random colors are used. + +* `keypoint_names` (list[str]): Used by keypoint localization. A list of names for each keypoint. + +* `keypoint_flip_map` (list[tuple[str]]): Used by the keypoint localization task. A list of pairs of names, + where each pair are the two keypoints that should be flipped if the image is + flipped horizontally during augmentation. +* `keypoint_connection_rules`: list[tuple(str, str, (r, g, b))]. Each tuple specifies a pair of keypoints + that are connected and the color to use for the line between them when visualized. + +Some additional metadata that are specific to the evaluation of certain datasets (e.g. COCO): + +* `thing_dataset_id_to_contiguous_id` (dict[int->int]): Used by all instance detection/segmentation tasks in the COCO format. + A mapping from instance class ids in the dataset to contiguous ids in range [0, #class). + Will be automatically set by the function `load_coco_json`. + +* `stuff_dataset_id_to_contiguous_id` (dict[int->int]): Used when generating prediction json files for + semantic/panoptic segmentation. + A mapping from semantic segmentation class ids in the dataset + to contiguous ids in [0, num_categories). It is useful for evaluation only. + +* `json_file`: The COCO annotation json file. Used by COCO evaluation for COCO-format datasets. +* `panoptic_root`, `panoptic_json`: Used by panoptic evaluation. +* `evaluator_type`: Used by the builtin main training script to select + evaluator. Don't use it in a new training script. + You can just provide the [DatasetEvaluator](../modules/evaluation.html#detectron2.evaluation.DatasetEvaluator) + for your dataset directly in your main script. + +NOTE: For background on the concept of "thing" and "stuff", see +[On Seeing Stuff: The Perception of Materials by Humans and Machines](http://persci.mit.edu/pub_pdfs/adelson_spie_01.pdf). +In detectron2, the term "thing" is used for instance-level tasks, +and "stuff" is used for semantic segmentation tasks. +Both are used in panoptic segmentation. + +### Register a COCO Format Dataset + +If your dataset is already a json file in the COCO format, +the dataset and its associated metadata can be registered easily with: +```python +from detectron2.data.datasets import register_coco_instances +register_coco_instances("my_dataset", {}, "json_annotation.json", "path/to/image/dir") +``` + +If your dataset is in COCO format but with extra custom per-instance annotations, +the [load_coco_json](../modules/data.html#detectron2.data.datasets.load_coco_json) +function might be useful. + +### Update the Config for New Datasets + +Once you've registered the dataset, you can use the name of the dataset (e.g., "my_dataset" in +example above) in `cfg.DATASETS.{TRAIN,TEST}`. +There are other configs you might want to change to train or evaluate on new datasets: + +* `MODEL.ROI_HEADS.NUM_CLASSES` and `MODEL.RETINANET.NUM_CLASSES` are the number of thing classes + for R-CNN and RetinaNet models, respectively. +* `MODEL.ROI_KEYPOINT_HEAD.NUM_KEYPOINTS` sets the number of keypoints for Keypoint R-CNN. + You'll also need to set [Keypoint OKS](http://cocodataset.org/#keypoints-eval) + with `TEST.KEYPOINT_OKS_SIGMAS` for evaluation. +* `MODEL.SEM_SEG_HEAD.NUM_CLASSES` sets the number of stuff classes for Semantic FPN & Panoptic FPN. +* If you're training Fast R-CNN (with precomputed proposals), `DATASETS.PROPOSAL_FILES_{TRAIN,TEST}` + need to match the datasets. The format of proposal files are documented + [here](../modules/data.html#detectron2.data.load_proposals_into_dataset). + +New models +(e.g. [TensorMask](../../projects/TensorMask), +[PointRend](../../projects/PointRend)) +often have similar configs of their own that need to be changed as well. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/deployment.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/deployment.md new file mode 100644 index 0000000000000000000000000000000000000000..a473247abf7df74e35b6de71c018f1aa34eaf435 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/deployment.md @@ -0,0 +1,92 @@ +# Deployment + +## Caffe2 Deployment +We currently support converting a detectron2 model to Caffe2 format through ONNX. +The converted Caffe2 model is able to run without detectron2 dependency in either Python or C++. +It has a runtime optimized for CPU & mobile inference, but not for GPU inference. + +Caffe2 conversion requires PyTorch ≥ 1.4 and ONNX ≥ 1.6. + +### Coverage + +It supports 3 most common meta architectures: `GeneralizedRCNN`, `RetinaNet`, `PanopticFPN`, +and most official models under these 3 meta architectures. + +Users' custom extensions under these architectures (added through registration) are supported +as long as they do not contain control flow or operators not available in Caffe2 (e.g. deformable convolution). +For example, custom backbones and heads are often supported out of the box. + +### Usage + +The conversion APIs are documented at [the API documentation](../modules/export). +We provide a tool, `caffe2_converter.py` as an example that uses +these APIs to convert a standard model. + +To convert an official Mask R-CNN trained on COCO, first +[prepare the COCO dataset](../../datasets/), then pick the model from [Model Zoo](../../MODEL_ZOO.md), and run: +``` +cd tools/deploy/ && ./caffe2_converter.py --config-file ../../configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml \ + --output ./caffe2_model --run-eval \ + MODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl \ + MODEL.DEVICE cpu +``` + +Note that: +1. The conversion needs valid sample inputs & weights to trace the model. That's why the script requires the dataset. + You can modify the script to obtain sample inputs in other ways. +2. With the `--run-eval` flag, it will evaluate the converted models to verify its accuracy. + The accuracy is typically slightly different (within 0.1 AP) from PyTorch due to + numerical precisions between different implementations. + It's recommended to always verify the accuracy in case your custom model is not supported by the + conversion. + +The converted model is available at the specified `caffe2_model/` directory. Two files `model.pb` +and `model_init.pb` that contain network structure and network parameters are necessary for deployment. +These files can then be loaded in C++ or Python using Caffe2's APIs. + +The script generates `model.svg` file which contains a visualization of the network. +You can also load `model.pb` to tools such as [netron](https://github.com/lutzroeder/netron) to visualize it. + +### Use the model in C++/Python + +The model can be loaded in C++. An example [caffe2_mask_rcnn.cpp](../../tools/deploy/) is given, +which performs CPU/GPU inference using `COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x`. + +The C++ example needs to be built with: +* PyTorch with caffe2 inside +* gflags, glog, opencv +* protobuf headers that match the version of your caffe2 +* MKL headers if caffe2 is built with MKL + +The following can compile the example inside [official detectron2 docker](../../docker/): +``` +sudo apt update && sudo apt install libgflags-dev libgoogle-glog-dev libopencv-dev +pip install mkl-include +wget https://github.com/protocolbuffers/protobuf/releases/download/v3.6.1/protobuf-cpp-3.6.1.tar.gz +tar xf protobuf-cpp-3.6.1.tar.gz +export CPATH=$(readlink -f ./protobuf-3.6.1/src/):$HOME/.local/include +export CMAKE_PREFIX_PATH=$HOME/.local/lib/python3.6/site-packages/torch/ +mkdir build && cd build +cmake -DTORCH_CUDA_ARCH_LIST=$TORCH_CUDA_ARCH_LIST .. && make + +# To run: +./caffe2_mask_rcnn --predict_net=./model.pb --init_net=./model_init.pb --input=input.jpg +``` + +Note that: + +* All converted models (the .pb files) take two input tensors: + "data" is an NCHW image, and "im_info" is an Nx3 tensor consisting of (height, width, 1.0) for + each image (the shape of "data" might be larger than that in "im_info" due to padding). + +* The converted models do not contain post-processing operations that + transform raw layer outputs into formatted predictions. + The example only produces raw outputs (28x28 masks) from the final + layers that are not post-processed, because in actual deployment, an application often needs + its custom lightweight post-processing (e.g. full-image masks for every detected object is often not necessary). + +We also provide a python wrapper around the converted model, in the +[Caffe2Model.\_\_call\_\_](../modules/export.html#detectron2.export.Caffe2Model.__call__) method. +This method has an interface that's identical to the [pytorch versions of models](./models.md), +and it internally applies pre/post-processing code to match the formats. +They can serve as a reference for pre/post-processing in actual deployment. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/evaluation.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/evaluation.md new file mode 100644 index 0000000000000000000000000000000000000000..c71adb7eb2e554e5ea848f1feb44bbee01a13f8e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/evaluation.md @@ -0,0 +1,43 @@ + +# Evaluation + +Evaluation is a process that takes a number of inputs/outputs pairs and aggregate them. +You can always [use the model](./models.md) directly and just parse its inputs/outputs manually to perform +evaluation. +Alternatively, evaluation is implemented in detectron2 using the [DatasetEvaluator](../modules/evaluation.html#detectron2.evaluation.DatasetEvaluator) +interface. + +Detectron2 includes a few `DatasetEvaluator` that computes metrics using standard dataset-specific +APIs (e.g., COCO, LVIS). +You can also implement your own `DatasetEvaluator` that performs some other jobs +using the inputs/outputs pairs. +For example, to count how many instances are detected on the validation set: + +``` +class Counter(DatasetEvaluator): + def reset(self): + self.count = 0 + def process(self, inputs, outputs): + for output in outputs: + self.count += len(output["instances"]) + def evaluate(self): + # save self.count somewhere, or print it, or return it. + return {"count": self.count} +``` + +Once you have some `DatasetEvaluator`, you can run it with +[inference_on_dataset](../modules/evaluation.html#detectron2.evaluation.inference_on_dataset). +For example, + +```python +val_results = inference_on_dataset( + model, + val_data_loader, + DatasetEvaluators([COCOEvaluator(...), Counter()])) +``` +Compared to running the evaluation manually using the model, the benefit of this function is that +you can merge evaluators together using [DatasetEvaluators](../modules/evaluation.html#detectron2.evaluation.DatasetEvaluators). +In this way you can run all evaluations without having to go through the dataset multiple times. + +The `inference_on_dataset` function also provides accurate speed benchmarks for the +given model and dataset. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/extend.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/extend.md new file mode 100644 index 0000000000000000000000000000000000000000..4232185757139e45078bf58c4f0fffb5fa0e4c04 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/extend.md @@ -0,0 +1,53 @@ +# Extend Detectron2's Defaults + +__Research is about doing things in new ways__. +This brings a tension in how to create abstractions in code, +which is a challenge for any research engineering project of a significant size: + +1. On one hand, it needs to have very thin abstractions to allow for the possibility of doing + everything in new ways. It should be reasonably easy to break existing + abstractions and replace them with new ones. + +2. On the other hand, such a project also needs reasonably high-level + abstractions, so that users can easily do things in standard ways, + without worrying too much about the details that only certain researchers care about. + +In detectron2, there are two types of interfaces that address this tension together: + +1. Functions and classes that take a config (`cfg`) argument + (sometimes with only a few extra arguments). + + Such functions and classes implement + the "standard default" behavior: it will read what it needs from the + config and do the "standard" thing. + Users only need to load a given config and pass it around, without having to worry about + which arguments are used and what they all mean. + +2. Functions and classes that have well-defined explicit arguments. + + Each of these is a small building block of the entire system. + They require users' expertise to understand what each argument should be, + and require more effort to stitch together to a larger system. + But they can be stitched together in more flexible ways. + + When you need to implement something not supported by the "standard defaults" + included in detectron2, these well-defined components can be reused. + +3. (experimental) A few classes are implemented with the + [@configurable](../../modules/config.html#detectron2.config.configurable) + decorator - they can be called with either a config, or with explicit arguments. + Their explicit argument interfaces are currently __experimental__ and subject to change. + + +If you only need the standard behavior, the [Beginner's Tutorial](./getting_started.md) +should suffice. If you need to extend detectron2 to your own needs, +see the following tutorials for more details: + +* Detectron2 includes a few standard datasets. To use custom ones, see + [Use Custom Datasets](./datasets.md). +* Detectron2 contains the standard logic that creates a data loader for training/testing from a + dataset, but you can write your own as well. See [Use Custom Data Loaders](./data_loading.md). +* Detectron2 implements many standard detection models, and provide ways for you + to overwrite their behaviors. See [Use Models](./models.md) and [Write Models](./write-models.md). +* Detectron2 provides a default training loop that is good for common training tasks. + You can customize it with hooks, or write your own loop instead. See [training](./training.md). diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/getting_started.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/getting_started.md new file mode 100644 index 0000000000000000000000000000000000000000..acaf13f02c906b45ffc2f49ee5a0ce01d82b4786 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/getting_started.md @@ -0,0 +1,79 @@ +## Getting Started with Detectron2 + +This document provides a brief intro of the usage of builtin command-line tools in detectron2. + +For a tutorial that involves actual coding with the API, +see our [Colab Notebook](https://colab.research.google.com/drive/16jcaJoc6bCFAQ96jDe2HwtXj7BMD_-m5) +which covers how to run inference with an +existing model, and how to train a builtin model on a custom dataset. + +For more advanced tutorials, refer to our [documentation](https://detectron2.readthedocs.io/tutorials/extend.html). + + +### Inference Demo with Pre-trained Models + +1. Pick a model and its config file from + [model zoo](MODEL_ZOO.md), + for example, `mask_rcnn_R_50_FPN_3x.yaml`. +2. We provide `demo.py` that is able to run builtin standard models. Run it with: +``` +cd demo/ +python demo.py --config-file ../configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml \ + --input input1.jpg input2.jpg \ + [--other-options] + --opts MODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl +``` +The configs are made for training, therefore we need to specify `MODEL.WEIGHTS` to a model from model zoo for evaluation. +This command will run the inference and show visualizations in an OpenCV window. + +For details of the command line arguments, see `demo.py -h` or look at its source code +to understand its behavior. Some common arguments are: +* To run __on your webcam__, replace `--input files` with `--webcam`. +* To run __on a video__, replace `--input files` with `--video-input video.mp4`. +* To run __on cpu__, add `MODEL.DEVICE cpu` after `--opts`. +* To save outputs to a directory (for images) or a file (for webcam or video), use `--output`. + + +### Training & Evaluation in Command Line + +We provide a script in "tools/{,plain_}train_net.py", that is made to train +all the configs provided in detectron2. +You may want to use it as a reference to write your own training script. + +To train a model with "train_net.py", first +setup the corresponding datasets following +[datasets/README.md](./datasets/README.md), +then run: +``` +cd tools/ +./train_net.py --num-gpus 8 \ + --config-file ../configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml +``` + +The configs are made for 8-GPU training. +To train on 1 GPU, you may need to [change some parameters](https://arxiv.org/abs/1706.02677), e.g.: +``` +./train_net.py \ + --config-file ../configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml \ + --num-gpus 1 SOLVER.IMS_PER_BATCH 2 SOLVER.BASE_LR 0.0025 +``` + +For most models, CPU training is not supported. + +To evaluate a model's performance, use +``` +./train_net.py \ + --config-file ../configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml \ + --eval-only MODEL.WEIGHTS /path/to/checkpoint_file +``` +For more options, see `./train_net.py -h`. + +### Use Detectron2 APIs in Your Code + +See our [Colab Notebook](https://colab.research.google.com/drive/16jcaJoc6bCFAQ96jDe2HwtXj7BMD_-m5) +to learn how to use detectron2 APIs to: +1. run inference with an existing model +2. train a builtin model on a custom dataset + +See [detectron2/projects](https://github.com/facebookresearch/detectron2/tree/master/projects) +for more ways to build your project on detectron2. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/index.rst b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..896e71e64139a35a566bbdd76e4b57006af35e2d --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/index.rst @@ -0,0 +1,18 @@ +Tutorials +====================================== + +.. toctree:: + :maxdepth: 2 + + install + getting_started + builtin_datasets + extend + datasets + data_loading + models + write-models + training + evaluation + configs + deployment diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/install.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/install.md new file mode 100644 index 0000000000000000000000000000000000000000..3985f8ae4f5ecde26b310b4ab01c49b922f742e9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/install.md @@ -0,0 +1,184 @@ +## Installation + +Our [Colab Notebook](https://colab.research.google.com/drive/16jcaJoc6bCFAQ96jDe2HwtXj7BMD_-m5) +has step-by-step instructions that install detectron2. +The [Dockerfile](docker) +also installs detectron2 with a few simple commands. + +### Requirements +- Linux or macOS with Python ≥ 3.6 +- PyTorch ≥ 1.4 +- [torchvision](https://github.com/pytorch/vision/) that matches the PyTorch installation. + You can install them together at [pytorch.org](https://pytorch.org) to make sure of this. +- OpenCV, optional, needed by demo and visualization +- pycocotools: `pip install cython; pip install -U 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI'` + + +### Build Detectron2 from Source + +gcc & g++ ≥ 5 are required. [ninja](https://ninja-build.org/) is recommended for faster build. +After having them, run: +``` +python -m pip install 'git+https://github.com/facebookresearch/detectron2.git' +# (add --user if you don't have permission) + +# Or, to install it from a local clone: +git clone https://github.com/facebookresearch/detectron2.git +python -m pip install -e detectron2 + +# Or if you are on macOS +# CC=clang CXX=clang++ python -m pip install -e . +``` + +To __rebuild__ detectron2 that's built from a local clone, use `rm -rf build/ **/*.so` to clean the +old build first. You often need to rebuild detectron2 after reinstalling PyTorch. + +### Install Pre-Built Detectron2 (Linux only) +``` +# for CUDA 10.1: +python -m pip install detectron2 -f https://dl.fbaipublicfiles.com/detectron2/wheels/cu101/index.html +``` +You can replace cu101 with "cu{100,92}" or "cpu". + +Note that: +1. Such installation has to be used with certain version of official PyTorch release. + See [releases](https://github.com/facebookresearch/detectron2/releases) for requirements. + It will not work with a different version of PyTorch or a non-official build of PyTorch. +2. Such installation is out-of-date w.r.t. master branch of detectron2. It may not be + compatible with the master branch of a research project that uses detectron2 (e.g. those in + [projects](projects) or [meshrcnn](https://github.com/facebookresearch/meshrcnn/)). + +### Common Installation Issues + +If you met issues using the pre-built detectron2, please uninstall it and try building it from source. + +Click each issue for its solutions: + +
+ +Undefined torch/aten/caffe2 symbols, or segmentation fault immediately when running the library. + +
+ +This usually happens when detectron2 or torchvision is not +compiled with the version of PyTorch you're running. + +Pre-built torchvision or detectron2 has to work with the corresponding official release of pytorch. +If the error comes from a pre-built torchvision, uninstall torchvision and pytorch and reinstall them +following [pytorch.org](http://pytorch.org). So the versions will match. + +If the error comes from a pre-built detectron2, check [release notes](https://github.com/facebookresearch/detectron2/releases) +to see the corresponding pytorch version required for each pre-built detectron2. + +If the error comes from detectron2 or torchvision that you built manually from source, +remove files you built (`build/`, `**/*.so`) and rebuild it so it can pick up the version of pytorch currently in your environment. + +If you cannot resolve this problem, please include the output of `gdb -ex "r" -ex "bt" -ex "quit" --args python -m detectron2.utils.collect_env` +in your issue. +
+ +
+ +Undefined C++ symbols (e.g. `GLIBCXX`) or C++ symbols not found. + +
+Usually it's because the library is compiled with a newer C++ compiler but run with an old C++ runtime. + +This often happens with old anaconda. +Try `conda update libgcc`. Then rebuild detectron2. + +The fundamental solution is to run the code with proper C++ runtime. +One way is to use `LD_PRELOAD=/path/to/libstdc++.so`. + +
+ +
+ +"Not compiled with GPU support" or "Detectron2 CUDA Compiler: not available". + +
+CUDA is not found when building detectron2. +You should make sure + +``` +python -c 'import torch; from torch.utils.cpp_extension import CUDA_HOME; print(torch.cuda.is_available(), CUDA_HOME)' +``` + +print valid outputs at the time you build detectron2. + +Most models can run inference (but not training) without GPU support. To use CPUs, set `MODEL.DEVICE='cpu'` in the config. +
+ +
+ +"invalid device function" or "no kernel image is available for execution". + +
+Two possibilities: + +* You build detectron2 with one version of CUDA but run it with a different version. + + To check whether it is the case, + use `python -m detectron2.utils.collect_env` to find out inconsistent CUDA versions. + In the output of this command, you should expect "Detectron2 CUDA Compiler", "CUDA_HOME", "PyTorch built with - CUDA" + to contain cuda libraries of the same version. + + When they are inconsistent, + you need to either install a different build of PyTorch (or build by yourself) + to match your local CUDA installation, or install a different version of CUDA to match PyTorch. + +* Detectron2 or PyTorch/torchvision is not built for the correct GPU architecture (compute compatibility). + + The GPU architecture for PyTorch/detectron2/torchvision is available in the "architecture flags" in + `python -m detectron2.utils.collect_env`. + + The GPU architecture flags of detectron2/torchvision by default matches the GPU model detected + during compilation. This means the compiled code may not work on a different GPU model. + To overwrite the GPU architecture for detectron2/torchvision, use `TORCH_CUDA_ARCH_LIST` environment variable during compilation. + + For example, `export TORCH_CUDA_ARCH_LIST=6.0,7.0` makes it compile for both P100s and V100s. + Visit [developer.nvidia.com/cuda-gpus](https://developer.nvidia.com/cuda-gpus) to find out + the correct compute compatibility number for your device. + +
+ +
+ +Undefined CUDA symbols; cannot open libcudart.so; other nvcc failures. + +
+The version of NVCC you use to build detectron2 or torchvision does +not match the version of CUDA you are running with. +This often happens when using anaconda's CUDA runtime. + +Use `python -m detectron2.utils.collect_env` to find out inconsistent CUDA versions. +In the output of this command, you should expect "Detectron2 CUDA Compiler", "CUDA_HOME", "PyTorch built with - CUDA" +to contain cuda libraries of the same version. + +When they are inconsistent, +you need to either install a different build of PyTorch (or build by yourself) +to match your local CUDA installation, or install a different version of CUDA to match PyTorch. +
+ + +
+ +"ImportError: cannot import name '_C'". + +
+Please build and install detectron2 following the instructions above. + +If you are running code from detectron2's root directory, `cd` to a different one. +Otherwise you may not import the code that you installed. +
+ +
+ +ONNX conversion segfault after some "TraceWarning". + +
+The ONNX package is compiled with too old compiler. + +Please build and install ONNX from its source code using a compiler +whose version is closer to what's used by PyTorch (available in `torch.__config__.show()`). +
diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/models.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/models.md new file mode 100644 index 0000000000000000000000000000000000000000..456f36d1c03f657ba0b63eb6f26506c4b1b0d60f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/models.md @@ -0,0 +1,151 @@ +# Use Models + +Models (and their sub-models) in detectron2 are built by +functions such as `build_model`, `build_backbone`, `build_roi_heads`: +```python +from detectron2.modeling import build_model +model = build_model(cfg) # returns a torch.nn.Module +``` + +`build_model` only builds the model structure, and fill it with random parameters. +See below for how to load an existing checkpoint to the model, +and how to use the `model` object. + +### Load/Save a Checkpoint +```python +from detectron2.checkpoint import DetectionCheckpointer +DetectionCheckpointer(model).load(file_path) # load a file to model + +checkpointer = DetectionCheckpointer(model, save_dir="output") +checkpointer.save("model_999") # save to output/model_999.pth +``` + +Detectron2's checkpointer recognizes models in pytorch's `.pth` format, as well as the `.pkl` files +in our model zoo. +See [API doc](../modules/checkpoint.html#detectron2.checkpoint.DetectionCheckpointer) +for more details about its usage. + +The model files can be arbitrarily manipulated using `torch.{load,save}` for `.pth` files or +`pickle.{dump,load}` for `.pkl` files. + +### Use a Model + +A model can be called by `outputs = model(inputs)`, where `inputs` is a `list[dict]`. +Each dict corresponds to one image and the required keys +depend on the type of model, and whether the model is in training or evaluation mode. +For example, in order to do inference, +all existing models expect the "image" key, and optionally "height" and "width". +The detailed format of inputs and outputs of existing models are explained below. + +When in training mode, all models are required to be used under an `EventStorage`. +The training statistics will be put into the storage: +```python +from detectron2.utils.events import EventStorage +with EventStorage() as storage: + losses = model(inputs) +``` + +If you only want to do simple inference using an existing model, +[DefaultPredictor](../modules/engine.html#detectron2.engine.defaults.DefaultPredictor) +is a wrapper around model that provides such basic functionality. +It includes default behavior including model loading, preprocessing, +and operates on single image rather than batches. + +### Model Input Format + +Users can implement custom models that support any arbitrary input format. +Here we describe the standard input format that all builtin models support in detectron2. +They all take a `list[dict]` as the inputs. Each dict +corresponds to information about one image. + +The dict may contain the following keys: + +* "image": `Tensor` in (C, H, W) format. The meaning of channels are defined by `cfg.INPUT.FORMAT`. + Image normalization, if any, will be performed inside the model using + `cfg.MODEL.PIXEL_{MEAN,STD}`. +* "instances": an [Instances](../modules/structures.html#detectron2.structures.Instances) + object, with the following fields: + + "gt_boxes": a [Boxes](../modules/structures.html#detectron2.structures.Boxes) object storing N boxes, one for each instance. + + "gt_classes": `Tensor` of long type, a vector of N labels, in range [0, num_categories). + + "gt_masks": a [PolygonMasks](../modules/structures.html#detectron2.structures.PolygonMasks) + or [BitMasks](../modules/structures.html#detectron2.structures.BitMasks) object storing N masks, one for each instance. + + "gt_keypoints": a [Keypoints](../modules/structures.html#detectron2.structures.Keypoints) + object storing N keypoint sets, one for each instance. +* "proposals": an [Instances](../modules/structures.html#detectron2.structures.Instances) + object used only in Fast R-CNN style models, with the following fields: + + "proposal_boxes": a [Boxes](../modules/structures.html#detectron2.structures.Boxes) object storing P proposal boxes. + + "objectness_logits": `Tensor`, a vector of P scores, one for each proposal. +* "height", "width": the **desired** output height and width, which is not necessarily the same + as the height or width of the `image` input field. + For example, the `image` input field might be a resized image, + but you may want the outputs to be in **original** resolution. + + If provided, the model will produce output in this resolution, + rather than in the resolution of the `image` as input into the model. This is more efficient and accurate. +* "sem_seg": `Tensor[int]` in (H, W) format. The semantic segmentation ground truth. + Values represent category labels starting from 0. + + +#### How it connects to data loader: + +The output of the default [DatasetMapper]( ../modules/data.html#detectron2.data.DatasetMapper) is a dict +that follows the above format. +After the data loader performs batching, it becomes `list[dict]` which the builtin models support. + + +### Model Output Format + +When in training mode, the builtin models output a `dict[str->ScalarTensor]` with all the losses. + +When in inference mode, the builtin models output a `list[dict]`, one dict for each image. +Based on the tasks the model is doing, each dict may contain the following fields: + +* "instances": [Instances](../modules/structures.html#detectron2.structures.Instances) + object with the following fields: + * "pred_boxes": [Boxes](../modules/structures.html#detectron2.structures.Boxes) object storing N boxes, one for each detected instance. + * "scores": `Tensor`, a vector of N scores. + * "pred_classes": `Tensor`, a vector of N labels in range [0, num_categories). + + "pred_masks": a `Tensor` of shape (N, H, W), masks for each detected instance. + + "pred_keypoints": a `Tensor` of shape (N, num_keypoint, 3). + Each row in the last dimension is (x, y, score). Scores are larger than 0. +* "sem_seg": `Tensor` of (num_categories, H, W), the semantic segmentation prediction. +* "proposals": [Instances](../modules/structures.html#detectron2.structures.Instances) + object with the following fields: + * "proposal_boxes": [Boxes](../modules/structures.html#detectron2.structures.Boxes) + object storing N boxes. + * "objectness_logits": a torch vector of N scores. +* "panoptic_seg": A tuple of `(Tensor, list[dict])`. The tensor has shape (H, W), where each element + represent the segment id of the pixel. Each dict describes one segment id and has the following fields: + * "id": the segment id + * "isthing": whether the segment is a thing or stuff + * "category_id": the category id of this segment. It represents the thing + class id when `isthing==True`, and the stuff class id otherwise. + + +### Partially execute a model: + +Sometimes you may want to obtain an intermediate tensor inside a model. +Since there are typically hundreds of intermediate tensors, there isn't an API that provides you +the intermediate result you need. +You have the following options: + +1. Write a (sub)model. Following the [tutorial](./write-models.md), you can + rewrite a model component (e.g. a head of a model), such that it + does the same thing as the existing component, but returns the output + you need. +2. Partially execute a model. You can create the model as usual, + but use custom code to execute it instead of its `forward()`. For example, + the following code obtains mask features before mask head. + +```python +images = ImageList.from_tensors(...) # preprocessed input tensor +model = build_model(cfg) +features = model.backbone(images.tensor) +proposals, _ = model.proposal_generator(images, features) +instances = model.roi_heads._forward_box(features, proposals) +mask_features = [features[f] for f in model.roi_heads.in_features] +mask_features = model.roi_heads.mask_pooler(mask_features, [x.pred_boxes for x in instances]) +``` + +Note that both options require you to read the existing forward code to understand +how to write code to obtain the outputs you need. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/training.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/training.md new file mode 100644 index 0000000000000000000000000000000000000000..dc7d537254c398252e3b91c25e33489aa91709c4 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/training.md @@ -0,0 +1,50 @@ +# Training + +From the previous tutorials, you may now have a custom model and data loader. + +You are free to create your own optimizer, and write the training logic: it's +usually easy with PyTorch, and allow researchers to see the entire training +logic more clearly and have full control. +One such example is provided in [tools/plain_train_net.py](../../tools/plain_train_net.py). + +We also provide a standarized "trainer" abstraction with a +[minimal hook system](../modules/engine.html#detectron2.engine.HookBase) +that helps simplify the standard types of training. + +You can use +[SimpleTrainer().train()](../modules/engine.html#detectron2.engine.SimpleTrainer) +which provides minimal abstraction for single-cost single-optimizer single-data-source training. +The builtin `train_net.py` script uses +[DefaultTrainer().train()](../modules/engine.html#detectron2.engine.defaults.DefaultTrainer), +which includes more standard default behavior that one might want to opt in, +including default configurations for learning rate schedule, +logging, evaluation, checkpointing etc. +This also means that it's less likely to support some non-standard behavior +you might want during research. + +To customize the training loops, you can: + +1. If your customization is similar to what `DefaultTrainer` is already doing, +you can change behavior of `DefaultTrainer` by overwriting [its methods](../modules/engine.html#detectron2.engine.defaults.DefaultTrainer) +in a subclass, like what [tools/train_net.py](../../tools/train_net.py) does. +2. If you need something very novel, you can start from [tools/plain_train_net.py](../../tools/plain_train_net.py) to implement them yourself. + +### Logging of Metrics + +During training, metrics are saved to a centralized [EventStorage](../modules/utils.html#detectron2.utils.events.EventStorage). +You can use the following code to access it and log metrics to it: +``` +from detectron2.utils.events import get_event_storage + +# inside the model: +if self.training: + value = # compute the value from inputs + storage = get_event_storage() + storage.put_scalar("some_accuracy", value) +``` + +Refer to its documentation for more details. + +Metrics are then saved to various destinations with [EventWriter](../modules/utils.html#module-detectron2.utils.events). +DefaultTrainer enables a few `EventWriter` with default configurations. +See above for how to customize them. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/write-models.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/write-models.md new file mode 100644 index 0000000000000000000000000000000000000000..bb87d586d609ca94240f32f2eaab7eadb0d07b93 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/docs/tutorials/write-models.md @@ -0,0 +1,39 @@ +# Write Models + +If you are trying to do something completely new, you may wish to implement +a model entirely from scratch within detectron2. However, in many situations you may +be interested in modifying or extending some components of an existing model. +Therefore, we also provide a registration mechanism that lets you override the +behavior of certain internal components of standard models. + +For example, to add a new backbone, import this code in your code: +```python +from detectron2.modeling import BACKBONE_REGISTRY, Backbone, ShapeSpec + +@BACKBONE_REGISTRY.register() +class ToyBackBone(Backbone): + def __init__(self, cfg, input_shape): + # create your own backbone + self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=16, padding=3) + + def forward(self, image): + return {"conv1": self.conv1(image)} + + def output_shape(self): + return {"conv1": ShapeSpec(channels=64, stride=16)} +``` +Then, you can use `cfg.MODEL.BACKBONE.NAME = 'ToyBackBone'` in your config object. +`build_model(cfg)` will then call your `ToyBackBone` instead. + +As another example, to add new abilities to the ROI heads in the Generalized R-CNN meta-architecture, +you can implement a new +[ROIHeads](../modules/modeling.html#detectron2.modeling.ROIHeads) subclass and put it in the `ROI_HEADS_REGISTRY`. +See [densepose in detectron2](../../projects/DensePose) +and [meshrcnn](https://github.com/facebookresearch/meshrcnn) +for examples that implement new ROIHeads to perform new tasks. +And [projects/](../../projects/) +contains more examples that implement different architectures. + +A complete list of registries can be found in [API documentation](../modules/modeling.html#model-registries). +You can register components in these registries to customize different parts of a model, or the +entire model. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/README.md new file mode 100644 index 0000000000000000000000000000000000000000..fd2f1ee3382365ab53ae44471c90266dff42d883 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/README.md @@ -0,0 +1,54 @@ +# DensePose in Detectron2 +**Dense Human Pose Estimation In The Wild** + +_Rıza Alp Güler, Natalia Neverova, Iasonas Kokkinos_ + +[[`densepose.org`](https://densepose.org)] [[`arXiv`](https://arxiv.org/abs/1802.00434)] [[`BibTeX`](#CitingDensePose)] + +Dense human pose estimation aims at mapping all human pixels of an RGB image to the 3D surface of the human body. + +
+ +
+ +In this repository, we provide the code to train and evaluate DensePose-RCNN. We also provide tools to visualize +DensePose annotation and results. + +# Quick Start + +See [ Getting Started ](doc/GETTING_STARTED.md) + +# Model Zoo and Baselines + +We provide a number of baseline results and trained models available for download. See [Model Zoo](doc/MODEL_ZOO.md) for details. + +# License + +Detectron2 is released under the [Apache 2.0 license](../../LICENSE) + +## Citing DensePose + +If you use DensePose, please take the references from the following BibTeX entries: + +For DensePose with estimated confidences: + +``` +@InProceedings{Neverova2019DensePoseConfidences, + title = {Correlated Uncertainty for Learning Dense Correspondences from Noisy Labels}, + author = {Neverova, Natalia and Novotny, David and Vedaldi, Andrea}, + journal = {Advances in Neural Information Processing Systems}, + year = {2019}, +} +``` + +For the original DensePose: + +``` +@InProceedings{Guler2018DensePose, + title={DensePose: Dense Human Pose Estimation In The Wild}, + author={R\{i}za Alp G\"uler, Natalia Neverova, Iasonas Kokkinos}, + journal={The IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year={2018} +} +``` + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/apply_net.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/apply_net.py new file mode 100755 index 0000000000000000000000000000000000000000..7262f7c059b42225b809429654d34f29dbd2801f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/apply_net.py @@ -0,0 +1,318 @@ +#!/usr/bin/env python3 +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import argparse +import glob +import logging +import os +import pickle +import sys +from typing import Any, ClassVar, Dict, List +import torch + +from detectron2.config import get_cfg +from detectron2.data.detection_utils import read_image +from detectron2.engine.defaults import DefaultPredictor +from detectron2.structures.boxes import BoxMode +from detectron2.structures.instances import Instances +from detectron2.utils.logger import setup_logger + +from densepose import add_densepose_config +from densepose.utils.logger import verbosity_to_level +from densepose.vis.base import CompoundVisualizer +from densepose.vis.bounding_box import ScoredBoundingBoxVisualizer +from densepose.vis.densepose import ( + DensePoseResultsContourVisualizer, + DensePoseResultsFineSegmentationVisualizer, + DensePoseResultsUVisualizer, + DensePoseResultsVVisualizer, +) +from densepose.vis.extractor import CompoundExtractor, create_extractor + +DOC = """Apply Net - a tool to print / visualize DensePose results +""" + +LOGGER_NAME = "apply_net" +logger = logging.getLogger(LOGGER_NAME) + +_ACTION_REGISTRY: Dict[str, "Action"] = {} + + +class Action(object): + @classmethod + def add_arguments(cls: type, parser: argparse.ArgumentParser): + parser.add_argument( + "-v", + "--verbosity", + action="count", + help="Verbose mode. Multiple -v options increase the verbosity.", + ) + + +def register_action(cls: type): + """ + Decorator for action classes to automate action registration + """ + global _ACTION_REGISTRY + _ACTION_REGISTRY[cls.COMMAND] = cls + return cls + + +class InferenceAction(Action): + @classmethod + def add_arguments(cls: type, parser: argparse.ArgumentParser): + super(InferenceAction, cls).add_arguments(parser) + parser.add_argument("cfg", metavar="", help="Config file") + parser.add_argument("model", metavar="", help="Model file") + parser.add_argument("input", metavar="", help="Input data") + parser.add_argument( + "--opts", + help="Modify config options using the command-line 'KEY VALUE' pairs", + default=[], + nargs=argparse.REMAINDER, + ) + + @classmethod + def execute(cls: type, args: argparse.Namespace): + logger.info(f"Loading config from {args.cfg}") + opts = [] + cfg = cls.setup_config(args.cfg, args.model, args, opts) + logger.info(f"Loading model from {args.model}") + predictor = DefaultPredictor(cfg) + logger.info(f"Loading data from {args.input}") + file_list = cls._get_input_file_list(args.input) + if len(file_list) == 0: + logger.warning(f"No input images for {args.input}") + return + context = cls.create_context(args) + for file_name in file_list: + img = read_image(file_name, format="BGR") # predictor expects BGR image. + with torch.no_grad(): + outputs = predictor(img)["instances"] + cls.execute_on_outputs(context, {"file_name": file_name, "image": img}, outputs) + cls.postexecute(context) + + @classmethod + def setup_config( + cls: type, config_fpath: str, model_fpath: str, args: argparse.Namespace, opts: List[str] + ): + cfg = get_cfg() + add_densepose_config(cfg) + cfg.merge_from_file(config_fpath) + cfg.merge_from_list(args.opts) + if opts: + cfg.merge_from_list(opts) + cfg.MODEL.WEIGHTS = model_fpath + cfg.freeze() + return cfg + + @classmethod + def _get_input_file_list(cls: type, input_spec: str): + if os.path.isdir(input_spec): + file_list = [ + os.path.join(input_spec, fname) + for fname in os.listdir(input_spec) + if os.path.isfile(os.path.join(input_spec, fname)) + ] + elif os.path.isfile(input_spec): + file_list = [input_spec] + else: + file_list = glob.glob(input_spec) + return file_list + + +@register_action +class DumpAction(InferenceAction): + """ + Dump action that outputs results to a pickle file + """ + + COMMAND: ClassVar[str] = "dump" + + @classmethod + def add_parser(cls: type, subparsers: argparse._SubParsersAction): + parser = subparsers.add_parser(cls.COMMAND, help="Dump model outputs to a file.") + cls.add_arguments(parser) + parser.set_defaults(func=cls.execute) + + @classmethod + def add_arguments(cls: type, parser: argparse.ArgumentParser): + super(DumpAction, cls).add_arguments(parser) + parser.add_argument( + "--output", + metavar="", + default="results.pkl", + help="File name to save dump to", + ) + + @classmethod + def execute_on_outputs( + cls: type, context: Dict[str, Any], entry: Dict[str, Any], outputs: Instances + ): + image_fpath = entry["file_name"] + logger.info(f"Processing {image_fpath}") + result = {"file_name": image_fpath} + if outputs.has("scores"): + result["scores"] = outputs.get("scores").cpu() + if outputs.has("pred_boxes"): + result["pred_boxes_XYXY"] = outputs.get("pred_boxes").tensor.cpu() + if outputs.has("pred_densepose"): + boxes_XYWH = BoxMode.convert( + result["pred_boxes_XYXY"], BoxMode.XYXY_ABS, BoxMode.XYWH_ABS + ) + result["pred_densepose"] = outputs.get("pred_densepose").to_result(boxes_XYWH) + context["results"].append(result) + + @classmethod + def create_context(cls: type, args: argparse.Namespace): + context = {"results": [], "out_fname": args.output} + return context + + @classmethod + def postexecute(cls: type, context: Dict[str, Any]): + out_fname = context["out_fname"] + out_dir = os.path.dirname(out_fname) + if len(out_dir) > 0 and not os.path.exists(out_dir): + os.makedirs(out_dir) + with open(out_fname, "wb") as hFile: + pickle.dump(context["results"], hFile) + logger.info(f"Output saved to {out_fname}") + + +@register_action +class ShowAction(InferenceAction): + """ + Show action that visualizes selected entries on an image + """ + + COMMAND: ClassVar[str] = "show" + VISUALIZERS: ClassVar[Dict[str, object]] = { + "dp_contour": DensePoseResultsContourVisualizer, + "dp_segm": DensePoseResultsFineSegmentationVisualizer, + "dp_u": DensePoseResultsUVisualizer, + "dp_v": DensePoseResultsVVisualizer, + "bbox": ScoredBoundingBoxVisualizer, + } + + @classmethod + def add_parser(cls: type, subparsers: argparse._SubParsersAction): + parser = subparsers.add_parser(cls.COMMAND, help="Visualize selected entries") + cls.add_arguments(parser) + parser.set_defaults(func=cls.execute) + + @classmethod + def add_arguments(cls: type, parser: argparse.ArgumentParser): + super(ShowAction, cls).add_arguments(parser) + parser.add_argument( + "visualizations", + metavar="", + help="Comma separated list of visualizations, possible values: " + "[{}]".format(",".join(sorted(cls.VISUALIZERS.keys()))), + ) + parser.add_argument( + "--min_score", + metavar="", + default=0.8, + type=float, + help="Minimum detection score to visualize", + ) + parser.add_argument( + "--nms_thresh", metavar="", default=None, type=float, help="NMS threshold" + ) + parser.add_argument( + "--output", + metavar="", + default="outputres.png", + help="File name to save output to", + ) + + @classmethod + def setup_config( + cls: type, config_fpath: str, model_fpath: str, args: argparse.Namespace, opts: List[str] + ): + opts.append("MODEL.ROI_HEADS.SCORE_THRESH_TEST") + opts.append(str(args.min_score)) + if args.nms_thresh is not None: + opts.append("MODEL.ROI_HEADS.NMS_THRESH_TEST") + opts.append(str(args.nms_thresh)) + cfg = super(ShowAction, cls).setup_config(config_fpath, model_fpath, args, opts) + return cfg + + @classmethod + def execute_on_outputs( + cls: type, context: Dict[str, Any], entry: Dict[str, Any], outputs: Instances + ): + import cv2 + import numpy as np + + visualizer = context["visualizer"] + extractor = context["extractor"] + image_fpath = entry["file_name"] + logger.info(f"Processing {image_fpath}") + image = cv2.cvtColor(entry["image"], cv2.COLOR_BGR2GRAY) + image = np.tile(image[:, :, np.newaxis], [1, 1, 3]) + data = extractor(outputs) + image_vis = visualizer.visualize(image, data) + entry_idx = context["entry_idx"] + 1 + out_fname = cls._get_out_fname(entry_idx, context["out_fname"]) + out_dir = os.path.dirname(out_fname) + if len(out_dir) > 0 and not os.path.exists(out_dir): + os.makedirs(out_dir) + cv2.imwrite(out_fname, image_vis) + logger.info(f"Output saved to {out_fname}") + context["entry_idx"] += 1 + + @classmethod + def postexecute(cls: type, context: Dict[str, Any]): + pass + + @classmethod + def _get_out_fname(cls: type, entry_idx: int, fname_base: str): + base, ext = os.path.splitext(fname_base) + return base + ".{0:04d}".format(entry_idx) + ext + + @classmethod + def create_context(cls: type, args: argparse.Namespace) -> Dict[str, Any]: + vis_specs = args.visualizations.split(",") + visualizers = [] + extractors = [] + for vis_spec in vis_specs: + vis = cls.VISUALIZERS[vis_spec]() + visualizers.append(vis) + extractor = create_extractor(vis) + extractors.append(extractor) + visualizer = CompoundVisualizer(visualizers) + extractor = CompoundExtractor(extractors) + context = { + "extractor": extractor, + "visualizer": visualizer, + "out_fname": args.output, + "entry_idx": 0, + } + return context + + +def create_argument_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + description=DOC, + formatter_class=lambda prog: argparse.HelpFormatter(prog, max_help_position=120), + ) + parser.set_defaults(func=lambda _: parser.print_help(sys.stdout)) + subparsers = parser.add_subparsers(title="Actions") + for _, action in _ACTION_REGISTRY.items(): + action.add_parser(subparsers) + return parser + + +def main(): + parser = create_argument_parser() + args = parser.parse_args() + verbosity = args.verbosity if hasattr(args, "verbosity") else None + global logger + logger = setup_logger(name=LOGGER_NAME) + logger.setLevel(verbosity_to_level(verbosity)) + args.func(args) + + +if __name__ == "__main__": + main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/Base-DensePose-RCNN-FPN.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/Base-DensePose-RCNN-FPN.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3ed1bcd68744a22472cc8b391993e4175013dc42 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/Base-DensePose-RCNN-FPN.yaml @@ -0,0 +1,47 @@ +MODEL: + META_ARCHITECTURE: "GeneralizedRCNN" + BACKBONE: + NAME: "build_resnet_fpn_backbone" + RESNETS: + OUT_FEATURES: ["res2", "res3", "res4", "res5"] + FPN: + IN_FEATURES: ["res2", "res3", "res4", "res5"] + ANCHOR_GENERATOR: + SIZES: [[32], [64], [128], [256], [512]] # One size for each in feature map + ASPECT_RATIOS: [[0.5, 1.0, 2.0]] # Three aspect ratios (same for all in feature maps) + RPN: + IN_FEATURES: ["p2", "p3", "p4", "p5", "p6"] + PRE_NMS_TOPK_TRAIN: 2000 # Per FPN level + PRE_NMS_TOPK_TEST: 1000 # Per FPN level + # Detectron1 uses 2000 proposals per-batch, + # (See "modeling/rpn/rpn_outputs.py" for details of this legacy issue) + # which is approximately 1000 proposals per-image since the default batch size for FPN is 2. + POST_NMS_TOPK_TRAIN: 1000 + POST_NMS_TOPK_TEST: 1000 + + DENSEPOSE_ON: True + ROI_HEADS: + NAME: "DensePoseROIHeads" + IN_FEATURES: ["p2", "p3", "p4", "p5"] + NUM_CLASSES: 1 + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_FC: 2 + POOLER_RESOLUTION: 7 + POOLER_SAMPLING_RATIO: 2 + POOLER_TYPE: "ROIAlign" + ROI_DENSEPOSE_HEAD: + NAME: "DensePoseV1ConvXHead" + POOLER_TYPE: "ROIAlign" + NUM_COARSE_SEGM_CHANNELS: 2 +DATASETS: + TRAIN: ("densepose_coco_2014_train", "densepose_coco_2014_valminusminival") + TEST: ("densepose_coco_2014_minival",) +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.01 + STEPS: (60000, 80000) + MAX_ITER: 90000 + WARMUP_FACTOR: 0.1 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_DL_WC1_s1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_DL_WC1_s1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..15475b1ac3bb7272a7ebc0061a55119ffd2591b9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_DL_WC1_s1x.yaml @@ -0,0 +1,16 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 + ROI_DENSEPOSE_HEAD: + NAME: "DensePoseDeepLabHead" + UV_CONFIDENCE: + ENABLED: True + TYPE: "iid_iso" + POINT_REGRESSION_WEIGHTS: 0.0005 +SOLVER: + CLIP_GRADIENTS: + ENABLED: True + MAX_ITER: 130000 + STEPS: (100000, 120000) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_DL_WC2_s1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_DL_WC2_s1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7546b967ab89129c9a276f19b1cf2d6b59f1a462 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_DL_WC2_s1x.yaml @@ -0,0 +1,16 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 + ROI_DENSEPOSE_HEAD: + NAME: "DensePoseDeepLabHead" + UV_CONFIDENCE: + ENABLED: True + TYPE: "indep_aniso" + POINT_REGRESSION_WEIGHTS: 0.0005 +SOLVER: + CLIP_GRADIENTS: + ENABLED: True + MAX_ITER: 130000 + STEPS: (100000, 120000) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_DL_s1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_DL_s1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..045f7f02f1b4eb0c0ef1733c3ac65e3aa70168de --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_DL_s1x.yaml @@ -0,0 +1,10 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 + ROI_DENSEPOSE_HEAD: + NAME: "DensePoseDeepLabHead" +SOLVER: + MAX_ITER: 130000 + STEPS: (100000, 120000) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_WC1_s1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_WC1_s1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ace62094fbc4ce2024810333c11c7a955d8eeb22 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_WC1_s1x.yaml @@ -0,0 +1,16 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 + ROI_DENSEPOSE_HEAD: + UV_CONFIDENCE: + ENABLED: True + TYPE: "iid_iso" + POINT_REGRESSION_WEIGHTS: 0.0005 +SOLVER: + CLIP_GRADIENTS: + ENABLED: True + MAX_ITER: 130000 + STEPS: (100000, 120000) + WARMUP_FACTOR: 0.025 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_WC2_s1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_WC2_s1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..766c098f6dcdd1fb3f67957d7d1d982b37747b96 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_WC2_s1x.yaml @@ -0,0 +1,16 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 + ROI_DENSEPOSE_HEAD: + UV_CONFIDENCE: + ENABLED: True + TYPE: "indep_aniso" + POINT_REGRESSION_WEIGHTS: 0.0005 +SOLVER: + CLIP_GRADIENTS: + ENABLED: True + MAX_ITER: 130000 + STEPS: (100000, 120000) + WARMUP_FACTOR: 0.025 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_s1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_s1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..af44fb767edf9bf093463e62f93e070d0d019c5a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_s1x.yaml @@ -0,0 +1,8 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 +SOLVER: + MAX_ITER: 130000 + STEPS: (100000, 120000) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_s1x_legacy.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_s1x_legacy.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8e79a1b9549cf19ed4a43cf9caf3dc88f6133310 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_101_FPN_s1x_legacy.yaml @@ -0,0 +1,17 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 + ROI_DENSEPOSE_HEAD: + NUM_COARSE_SEGM_CHANNELS: 15 + POOLER_RESOLUTION: 14 + HEATMAP_SIZE: 56 + INDEX_WEIGHTS: 2.0 + PART_WEIGHTS: 0.3 + POINT_REGRESSION_WEIGHTS: 0.1 + DECODER_ON: False +SOLVER: + BASE_LR: 0.002 + MAX_ITER: 130000 + STEPS: (100000, 120000) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_DL_WC1_s1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_DL_WC1_s1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f3720eff56ce042a68da6c99f484b963cae2c7d9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_DL_WC1_s1x.yaml @@ -0,0 +1,16 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 + ROI_DENSEPOSE_HEAD: + NAME: "DensePoseDeepLabHead" + UV_CONFIDENCE: + ENABLED: True + TYPE: "iid_iso" + POINT_REGRESSION_WEIGHTS: 0.0005 +SOLVER: + CLIP_GRADIENTS: + ENABLED: True + MAX_ITER: 130000 + STEPS: (100000, 120000) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_DL_WC2_s1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_DL_WC2_s1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5a47cc05e6e9dc882778c6b502d93cbcec88fb88 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_DL_WC2_s1x.yaml @@ -0,0 +1,16 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 + ROI_DENSEPOSE_HEAD: + NAME: "DensePoseDeepLabHead" + UV_CONFIDENCE: + ENABLED: True + TYPE: "indep_aniso" + POINT_REGRESSION_WEIGHTS: 0.0005 +SOLVER: + CLIP_GRADIENTS: + ENABLED: True + MAX_ITER: 130000 + STEPS: (100000, 120000) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_DL_s1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_DL_s1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..52a170b4a28289ad943314f77256e34800d23121 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_DL_s1x.yaml @@ -0,0 +1,10 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 + ROI_DENSEPOSE_HEAD: + NAME: "DensePoseDeepLabHead" +SOLVER: + MAX_ITER: 130000 + STEPS: (100000, 120000) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_WC1_s1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_WC1_s1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d36e54256ac22f1b01604e54430da24972f06eeb --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_WC1_s1x.yaml @@ -0,0 +1,16 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 + ROI_DENSEPOSE_HEAD: + UV_CONFIDENCE: + ENABLED: True + TYPE: "iid_iso" + POINT_REGRESSION_WEIGHTS: 0.0005 +SOLVER: + CLIP_GRADIENTS: + ENABLED: True + MAX_ITER: 130000 + STEPS: (100000, 120000) + WARMUP_FACTOR: 0.025 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_WC2_s1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_WC2_s1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e880d469564a3757ba3f4d708054074cefda49b6 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_WC2_s1x.yaml @@ -0,0 +1,16 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 + ROI_DENSEPOSE_HEAD: + UV_CONFIDENCE: + ENABLED: True + TYPE: "indep_aniso" + POINT_REGRESSION_WEIGHTS: 0.0005 +SOLVER: + CLIP_GRADIENTS: + ENABLED: True + MAX_ITER: 130000 + STEPS: (100000, 120000) + WARMUP_FACTOR: 0.025 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_s1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_s1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d2dd14c6f92f3850b99e6f1c828c0fcee52120e1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_s1x.yaml @@ -0,0 +1,8 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +SOLVER: + MAX_ITER: 130000 + STEPS: (100000, 120000) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_s1x_legacy.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_s1x_legacy.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6c5391f3b3c3d437312a290d29b0656cb3804b25 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/densepose_rcnn_R_50_FPN_s1x_legacy.yaml @@ -0,0 +1,17 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 + ROI_DENSEPOSE_HEAD: + NUM_COARSE_SEGM_CHANNELS: 15 + POOLER_RESOLUTION: 14 + HEATMAP_SIZE: 56 + INDEX_WEIGHTS: 2.0 + PART_WEIGHTS: 0.3 + POINT_REGRESSION_WEIGHTS: 0.1 + DECODER_ON: False +SOLVER: + BASE_LR: 0.002 + MAX_ITER: 130000 + STEPS: (100000, 120000) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/evolution/Base-RCNN-FPN-MC.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/evolution/Base-RCNN-FPN-MC.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5a20882a9fd275bac3e3cf49c128684c73085ca1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/evolution/Base-RCNN-FPN-MC.yaml @@ -0,0 +1,91 @@ +MODEL: + META_ARCHITECTURE: "GeneralizedRCNN" + BACKBONE: + NAME: "build_resnet_fpn_backbone" + RESNETS: + OUT_FEATURES: ["res2", "res3", "res4", "res5"] + FPN: + IN_FEATURES: ["res2", "res3", "res4", "res5"] + ANCHOR_GENERATOR: + SIZES: [[32], [64], [128], [256], [512]] # One size for each in feature map + ASPECT_RATIOS: [[0.5, 1.0, 2.0]] # Three aspect ratios (same for all in feature maps) + RPN: + IN_FEATURES: ["p2", "p3", "p4", "p5", "p6"] + PRE_NMS_TOPK_TRAIN: 2000 # Per FPN level + PRE_NMS_TOPK_TEST: 1000 # Per FPN level + # Detectron1 uses 2000 proposals per-batch, + # (See "modeling/rpn/rpn_outputs.py" for details of this legacy issue) + # which is approximately 1000 proposals per-image since the default batch size for FPN is 2. + POST_NMS_TOPK_TRAIN: 1000 + POST_NMS_TOPK_TEST: 1000 + ROI_HEADS: + NAME: "StandardROIHeads" + IN_FEATURES: ["p2", "p3", "p4", "p5"] + NUM_CLASSES: 1 + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_FC: 2 + POOLER_RESOLUTION: 7 + ROI_MASK_HEAD: + NAME: "MaskRCNNConvUpsampleHead" + NUM_CONV: 4 + POOLER_RESOLUTION: 14 +DATASETS: + TRAIN: ("base_coco_2017_train",) + TEST: ("base_coco_2017_val", "densepose_chimps") + CATEGORY_MAPS: + "base_coco_2017_train": + "16": 1 # bird -> person + "17": 1 # cat -> person + "18": 1 # dog -> person + "19": 1 # horse -> person + "20": 1 # sheep -> person + "21": 1 # cow -> person + "22": 1 # elephant -> person + "23": 1 # bear -> person + "24": 1 # zebra -> person + "25": 1 # girafe -> person + "base_coco_2017_val": + "16": 1 # bird -> person + "17": 1 # cat -> person + "18": 1 # dog -> person + "19": 1 # horse -> person + "20": 1 # sheep -> person + "21": 1 # cow -> person + "22": 1 # elephant -> person + "23": 1 # bear -> person + "24": 1 # zebra -> person + "25": 1 # girafe -> person + WHITELISTED_CATEGORIES: + "base_coco_2017_train": + - 1 # person + - 16 # bird + - 17 # cat + - 18 # dog + - 19 # horse + - 20 # sheep + - 21 # cow + - 22 # elephant + - 23 # bear + - 24 # zebra + - 25 # girafe + "base_coco_2017_val": + - 1 # person + - 16 # bird + - 17 # cat + - 18 # dog + - 19 # horse + - 20 # sheep + - 21 # cow + - 22 # elephant + - 23 # bear + - 24 # zebra + - 25 # girafe +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.02 + STEPS: (60000, 80000) + MAX_ITER: 90000 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +VERSION: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/evolution/faster_rcnn_R_50_FPN_1x_MC.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/evolution/faster_rcnn_R_50_FPN_1x_MC.yaml new file mode 100644 index 0000000000000000000000000000000000000000..80139ad9e40c09fdd862cdac80aa18c5cabc0a1e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/evolution/faster_rcnn_R_50_FPN_1x_MC.yaml @@ -0,0 +1,7 @@ +_BASE_: "Base-RCNN-FPN-MC.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + DENSEPOSE_ON: False + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_DL_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_DL_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b90989eef81e27d23119d2cd4627e8cea211ac51 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_DL_instant_test.yaml @@ -0,0 +1,11 @@ +_BASE_: "../Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + ROI_DENSEPOSE_HEAD: + NAME: "DensePoseDeepLabHead" +DATASETS: + TRAIN: ("densepose_coco_2014_minival_100",) + TEST: ("densepose_coco_2014_minival_100",) +SOLVER: + MAX_ITER: 40 + STEPS: (30,) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_TTA_inference_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_TTA_inference_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7d412740340d924bacc3baa57f32bfea0b871511 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_TTA_inference_acc_test.yaml @@ -0,0 +1,13 @@ +_BASE_: "../densepose_rcnn_R_50_FPN_s1x.yaml" +MODEL: + WEIGHTS: "https://dl.fbaipublicfiles.com/densepose/densepose_rcnn_R_50_FPN_s1x/165712039/model_final_162be9.pkl" +DATASETS: + TRAIN: () + TEST: ("densepose_coco_2014_minival_100",) +TEST: + AUG: + ENABLED: True + MIN_SIZES: (400, 500, 600, 700, 800, 900, 1000, 1100, 1200) + MAX_SIZE: 4000 + FLIP: True + EXPECTED_RESULTS: [["bbox_TTA", "AP", 61.74, 0.03], ["densepose_gps_TTA", "AP", 60.22, 0.03], ["densepose_gpsm_TTA", "AP", 63.85, 0.03]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_WC1_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_WC1_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f0fe61151adf255baba717f3e65ff6fab52829a6 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_WC1_instant_test.yaml @@ -0,0 +1,19 @@ +_BASE_: "../Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 + ROI_DENSEPOSE_HEAD: + UV_CONFIDENCE: + ENABLED: True + TYPE: "iid_iso" + POINT_REGRESSION_WEIGHTS: 0.0005 +DATASETS: + TRAIN: ("densepose_coco_2014_minival_100",) + TEST: ("densepose_coco_2014_minival_100",) +SOLVER: + CLIP_GRADIENTS: + ENABLED: True + MAX_ITER: 40 + STEPS: (30,) + WARMUP_FACTOR: 0.025 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_WC2_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_WC2_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f0d9358c8846452314697a19b5e2ea9e075ddaeb --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_WC2_instant_test.yaml @@ -0,0 +1,19 @@ +_BASE_: "../Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 + ROI_DENSEPOSE_HEAD: + UV_CONFIDENCE: + ENABLED: True + TYPE: "indep_aniso" + POINT_REGRESSION_WEIGHTS: 0.0005 +DATASETS: + TRAIN: ("densepose_coco_2014_minival_100",) + TEST: ("densepose_coco_2014_minival_100",) +SOLVER: + CLIP_GRADIENTS: + ENABLED: True + MAX_ITER: 40 + STEPS: (30,) + WARMUP_FACTOR: 0.025 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_inference_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3c5a7d20989e774cbba2b443e3026a2361201d0f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,8 @@ +_BASE_: "../densepose_rcnn_R_50_FPN_s1x.yaml" +MODEL: + WEIGHTS: "https://dl.fbaipublicfiles.com/densepose/densepose_rcnn_R_50_FPN_s1x/165712039/model_final_162be9.pkl" +DATASETS: + TRAIN: () + TEST: ("densepose_coco_2014_minival_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 59.27, 0.025], ["densepose_gps", "AP", 60.11, 0.02], ["densepose_gpsm", "AP", 64.20, 0.02]] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_instant_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..057c8768186e8a818228aa2f028ba3007374c571 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_instant_test.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" +DATASETS: + TRAIN: ("densepose_coco_2014_minival_100",) + TEST: ("densepose_coco_2014_minival_100",) +SOLVER: + MAX_ITER: 40 + STEPS: (30,) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_training_acc_test.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_training_acc_test.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b991160c79e5a95feac22be30deea10d200178d4 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/configs/quick_schedules/densepose_rcnn_R_50_FPN_training_acc_test.yaml @@ -0,0 +1,14 @@ +_BASE_: "../Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + ROI_HEADS: + NUM_CLASSES: 1 +DATASETS: + TRAIN: ("densepose_coco_2014_minival",) + TEST: ("densepose_coco_2014_minival",) +SOLVER: + MAX_ITER: 6000 + STEPS: (5500, 5800) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 58.27, 1.0], ["densepose_gps", "AP", 42.47, 1.5], ["densepose_gpsm", "AP", 49.20, 1.5]] + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..aea5a1a9c3e63ce168a41545322599ccc4adbbb8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/__init__.py @@ -0,0 +1,9 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .data.datasets import builtin # just to register data +from .config import add_densepose_config, add_dataset_category_config +from .densepose_head import ROI_DENSEPOSE_HEAD_REGISTRY +from .evaluator import DensePoseCOCOEvaluator +from .roi_head import DensePoseROIHeads +from .data.structures import DensePoseDataRelative, DensePoseList, DensePoseTransformData +from .modeling.test_time_augmentation import DensePoseGeneralizedRCNNWithTTA +from .utils.transform import load_from_cfg diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/config.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/config.py new file mode 100644 index 0000000000000000000000000000000000000000..2d76056b362beb7c0832e775b9e3415dd42767a5 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/config.py @@ -0,0 +1,68 @@ +# -*- coding = utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from detectron2.config import CfgNode as CN + + +def add_dataset_category_config(cfg: CN): + """ + Add config for additional category-related dataset options + - category whitelisting + - category mapping + """ + _C = cfg + _C.DATASETS.CATEGORY_MAPS = CN(new_allowed=True) + _C.DATASETS.WHITELISTED_CATEGORIES = CN(new_allowed=True) + + +def add_densepose_config(cfg: CN): + """ + Add config for densepose head. + """ + _C = cfg + + _C.MODEL.DENSEPOSE_ON = True + + _C.MODEL.ROI_DENSEPOSE_HEAD = CN() + _C.MODEL.ROI_DENSEPOSE_HEAD.NAME = "" + _C.MODEL.ROI_DENSEPOSE_HEAD.NUM_STACKED_CONVS = 8 + # Number of parts used for point labels + _C.MODEL.ROI_DENSEPOSE_HEAD.NUM_PATCHES = 24 + _C.MODEL.ROI_DENSEPOSE_HEAD.DECONV_KERNEL = 4 + _C.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_DIM = 512 + _C.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_KERNEL = 3 + _C.MODEL.ROI_DENSEPOSE_HEAD.UP_SCALE = 2 + _C.MODEL.ROI_DENSEPOSE_HEAD.HEATMAP_SIZE = 112 + _C.MODEL.ROI_DENSEPOSE_HEAD.POOLER_TYPE = "ROIAlignV2" + _C.MODEL.ROI_DENSEPOSE_HEAD.POOLER_RESOLUTION = 28 + _C.MODEL.ROI_DENSEPOSE_HEAD.POOLER_SAMPLING_RATIO = 2 + _C.MODEL.ROI_DENSEPOSE_HEAD.NUM_COARSE_SEGM_CHANNELS = 2 # 15 or 2 + # Overlap threshold for an RoI to be considered foreground (if >= FG_IOU_THRESHOLD) + _C.MODEL.ROI_DENSEPOSE_HEAD.FG_IOU_THRESHOLD = 0.7 + # Loss weights for annotation masks.(14 Parts) + _C.MODEL.ROI_DENSEPOSE_HEAD.INDEX_WEIGHTS = 5.0 + # Loss weights for surface parts. (24 Parts) + _C.MODEL.ROI_DENSEPOSE_HEAD.PART_WEIGHTS = 1.0 + # Loss weights for UV regression. + _C.MODEL.ROI_DENSEPOSE_HEAD.POINT_REGRESSION_WEIGHTS = 0.01 + # For Decoder + _C.MODEL.ROI_DENSEPOSE_HEAD.DECODER_ON = True + _C.MODEL.ROI_DENSEPOSE_HEAD.DECODER_NUM_CLASSES = 256 + _C.MODEL.ROI_DENSEPOSE_HEAD.DECODER_CONV_DIMS = 256 + _C.MODEL.ROI_DENSEPOSE_HEAD.DECODER_NORM = "" + _C.MODEL.ROI_DENSEPOSE_HEAD.DECODER_COMMON_STRIDE = 4 + # For DeepLab head + _C.MODEL.ROI_DENSEPOSE_HEAD.DEEPLAB = CN() + _C.MODEL.ROI_DENSEPOSE_HEAD.DEEPLAB.NORM = "GN" + _C.MODEL.ROI_DENSEPOSE_HEAD.DEEPLAB.NONLOCAL_ON = 0 + # Confidences + # Enable learning confidences (variances) along with the actual values + _C.MODEL.ROI_DENSEPOSE_HEAD.UV_CONFIDENCE = CN({"ENABLED": False}) + # UV confidence lower bound + _C.MODEL.ROI_DENSEPOSE_HEAD.UV_CONFIDENCE.EPSILON = 0.01 + # Statistical model type for confidence learning, possible values: + # - "iid_iso": statistically independent identically distributed residuals + # with isotropic covariance + # - "indep_aniso": statistically independent residuals with anisotropic + # covariances + _C.MODEL.ROI_DENSEPOSE_HEAD.UV_CONFIDENCE.TYPE = "iid_iso" diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5484f59dc6aa8b1d54dd6771c1e4c490fad7e20e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/__init__.py @@ -0,0 +1,9 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from .build import build_detection_test_loader, build_detection_train_loader +from .dataset_mapper import DatasetMapper + +# ensure the builtin data are registered +from . import datasets + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/build.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/build.py new file mode 100644 index 0000000000000000000000000000000000000000..c722ec12ffacf26ee0babe766b023566b2e79543 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/build.py @@ -0,0 +1,405 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import itertools +import logging +import numpy as np +import operator +from typing import Any, Callable, Collection, Dict, Iterable, List, Optional +import torch + +from detectron2.config import CfgNode +from detectron2.data import samplers +from detectron2.data.build import ( + load_proposals_into_dataset, + print_instances_class_histogram, + trivial_batch_collator, + worker_init_reset_seed, +) +from detectron2.data.catalog import DatasetCatalog, MetadataCatalog +from detectron2.data.common import AspectRatioGroupedDataset, DatasetFromList, MapDataset +from detectron2.utils.comm import get_world_size + +from .dataset_mapper import DatasetMapper +from .datasets.coco import DENSEPOSE_KEYS_WITHOUT_MASK as DENSEPOSE_COCO_KEYS_WITHOUT_MASK +from .datasets.coco import DENSEPOSE_MASK_KEY as DENSEPOSE_COCO_MASK_KEY + +__all__ = ["build_detection_train_loader", "build_detection_test_loader"] + + +Instance = Dict[str, Any] +InstancePredicate = Callable[[Instance], bool] + + +def _compute_num_images_per_worker(cfg: CfgNode): + num_workers = get_world_size() + images_per_batch = cfg.SOLVER.IMS_PER_BATCH + assert ( + images_per_batch % num_workers == 0 + ), "SOLVER.IMS_PER_BATCH ({}) must be divisible by the number of workers ({}).".format( + images_per_batch, num_workers + ) + assert ( + images_per_batch >= num_workers + ), "SOLVER.IMS_PER_BATCH ({}) must be larger than the number of workers ({}).".format( + images_per_batch, num_workers + ) + images_per_worker = images_per_batch // num_workers + return images_per_worker + + +def _map_category_id_to_contiguous_id(dataset_name: str, dataset_dicts: Iterable[Instance]): + meta = MetadataCatalog.get(dataset_name) + for dataset_dict in dataset_dicts: + for ann in dataset_dict["annotations"]: + ann["category_id"] = meta.thing_dataset_id_to_contiguous_id[ann["category_id"]] + + +def _add_category_id_to_contiguous_id_maps_to_metadata(dataset_names: Iterable[str]): + # merge categories for all data + merged_categories = {} + for dataset_name in dataset_names: + meta = MetadataCatalog.get(dataset_name) + for cat_id, cat_name in meta.categories.items(): + if cat_id not in merged_categories: + merged_categories[cat_id] = (cat_name, dataset_name) + continue + cat_name_other, dataset_name_other = merged_categories[cat_id] + if cat_name_other != cat_name: + raise ValueError( + f"Incompatible categories for category ID {cat_id}: " + f'dataset {dataset_name} value "{cat_name}", ' + f'dataset {dataset_name_other} value "{cat_name_other}"' + ) + + merged_cat_id_to_cont_id = {} + for i, cat_id in enumerate(sorted(merged_categories.keys())): + merged_cat_id_to_cont_id[cat_id] = i + + # add category maps to metadata + for dataset_name in dataset_names: + meta = MetadataCatalog.get(dataset_name) + categories = meta.get("categories") + meta.thing_classes = [categories[cat_id] for cat_id in sorted(categories.keys())] + meta.thing_dataset_id_to_contiguous_id = { + cat_id: merged_cat_id_to_cont_id[cat_id] for cat_id in sorted(categories.keys()) + } + meta.thing_contiguous_id_to_dataset_id = { + merged_cat_id_to_cont_id[cat_id]: cat_id for cat_id in sorted(categories.keys()) + } + + +def _maybe_create_general_keep_instance_predicate(cfg: CfgNode) -> Optional[InstancePredicate]: + def has_annotations(instance: Instance) -> bool: + return "annotations" in instance + + def has_only_crowd_anotations(instance: Instance) -> bool: + for ann in instance["annotations"]: + if ann.get("is_crowd", 0) == 0: + return False + return True + + def general_keep_instance_predicate(instance: Instance) -> bool: + return has_annotations(instance) and not has_only_crowd_anotations(instance) + + if not cfg.DATALOADER.FILTER_EMPTY_ANNOTATIONS: + return None + return general_keep_instance_predicate + + +def _maybe_create_keypoints_keep_instance_predicate(cfg: CfgNode) -> Optional[InstancePredicate]: + + min_num_keypoints = cfg.MODEL.ROI_KEYPOINT_HEAD.MIN_KEYPOINTS_PER_IMAGE + + def has_sufficient_num_keypoints(instance: Instance) -> bool: + num_kpts = sum( + (np.array(ann["keypoints"][2::3]) > 0).sum() + for ann in instance["annotations"] + if "keypoints" in ann + ) + return num_kpts >= min_num_keypoints + + if cfg.MODEL.KEYPOINT_ON and (min_num_keypoints > 0): + return has_sufficient_num_keypoints + return None + + +def _maybe_create_mask_keep_instance_predicate(cfg: CfgNode) -> Optional[InstancePredicate]: + if not cfg.MODEL.MASK_ON: + return None + + def has_mask_annotations(instance: Instance) -> bool: + return any("segmentation" in ann for ann in instance["annotations"]) + + return has_mask_annotations + + +def _maybe_create_densepose_keep_instance_predicate(cfg: CfgNode) -> Optional[InstancePredicate]: + if not cfg.MODEL.DENSEPOSE_ON: + return None + + def has_densepose_annotations(instance: Instance) -> bool: + for ann in instance["annotations"]: + if all(key in ann for key in DENSEPOSE_COCO_KEYS_WITHOUT_MASK) and ( + (DENSEPOSE_COCO_MASK_KEY in ann) or ("segmentation" in ann) + ): + return True + return False + + return has_densepose_annotations + + +def _maybe_create_specific_keep_instance_predicate(cfg: CfgNode) -> Optional[InstancePredicate]: + specific_predicate_creators = [ + _maybe_create_keypoints_keep_instance_predicate, + _maybe_create_mask_keep_instance_predicate, + _maybe_create_densepose_keep_instance_predicate, + ] + predicates = [creator(cfg) for creator in specific_predicate_creators] + predicates = [p for p in predicates if p is not None] + if not predicates: + return None + + def combined_predicate(instance: Instance) -> bool: + return any(p(instance) for p in predicates) + + return combined_predicate + + +def _get_train_keep_instance_predicate(cfg: CfgNode): + general_keep_predicate = _maybe_create_general_keep_instance_predicate(cfg) + combined_specific_keep_predicate = _maybe_create_specific_keep_instance_predicate(cfg) + + def combined_general_specific_keep_predicate(instance: Instance) -> bool: + return general_keep_predicate(instance) and combined_specific_keep_predicate(instance) + + if (general_keep_predicate is None) and (combined_specific_keep_predicate is None): + return None + if general_keep_predicate is None: + return combined_specific_keep_predicate + if combined_specific_keep_predicate is None: + return general_keep_predicate + return combined_general_specific_keep_predicate + + +def _get_test_keep_instance_predicate(cfg: CfgNode): + general_keep_predicate = _maybe_create_general_keep_instance_predicate(cfg) + return general_keep_predicate + + +def _maybe_filter_and_map_categories( + dataset_name: str, dataset_dicts: List[Instance] +) -> List[Instance]: + meta = MetadataCatalog.get(dataset_name) + whitelisted_categories = meta.get("whitelisted_categories") + category_map = meta.get("category_map", {}) + if whitelisted_categories is None and not category_map: + return dataset_dicts + filtered_dataset_dicts = [] + for dataset_dict in dataset_dicts: + anns = [] + for ann in dataset_dict["annotations"]: + cat_id = ann["category_id"] + if whitelisted_categories is not None and cat_id not in whitelisted_categories: + continue + ann["category_id"] = category_map.get(cat_id, cat_id) + anns.append(ann) + dataset_dict["annotations"] = anns + filtered_dataset_dicts.append(dataset_dict) + return filtered_dataset_dicts + + +def _add_category_whitelists_to_metadata(cfg: CfgNode): + for dataset_name, whitelisted_cat_ids in cfg.DATASETS.WHITELISTED_CATEGORIES.items(): + meta = MetadataCatalog.get(dataset_name) + meta.whitelisted_categories = whitelisted_cat_ids + logger = logging.getLogger(__name__) + logger.info( + "Whitelisted categories for dataset {}: {}".format( + dataset_name, meta.whitelisted_categories + ) + ) + + +def _add_category_maps_to_metadata(cfg: CfgNode): + for dataset_name, category_map in cfg.DATASETS.CATEGORY_MAPS.items(): + category_map = { + int(cat_id_src): int(cat_id_dst) for cat_id_src, cat_id_dst in category_map.items() + } + meta = MetadataCatalog.get(dataset_name) + meta.category_map = category_map + logger = logging.getLogger(__name__) + logger.info("Category maps for dataset {}: {}".format(dataset_name, meta.category_map)) + + +def combine_detection_dataset_dicts( + dataset_names: Collection[str], + keep_instance_predicate: Optional[InstancePredicate] = None, + proposal_files: Optional[Collection[str]] = None, +) -> List[Instance]: + """ + Load and prepare dataset dicts for training / testing + + Args: + dataset_names (Collection[str]): a list of dataset names + keep_instance_predicate (Callable: Dict[str, Any] -> bool): predicate + applied to instance dicts which defines whether to keep the instance + proposal_files (Collection[str]): if given, a list of object proposal files + that match each dataset in `dataset_names`. + """ + assert len(dataset_names) + if proposal_files is None: + proposal_files = [None] * len(dataset_names) + assert len(dataset_names) == len(proposal_files) + # load annotations and dataset metadata + dataset_map = {} + for dataset_name in dataset_names: + dataset_dicts = DatasetCatalog.get(dataset_name) + dataset_map[dataset_name] = dataset_dicts + # initialize category maps + _add_category_id_to_contiguous_id_maps_to_metadata(dataset_names) + # apply category maps + all_datasets_dicts = [] + for dataset_name, proposal_file in zip(dataset_names, proposal_files): + dataset_dicts = dataset_map[dataset_name] + assert len(dataset_dicts), f"Dataset '{dataset_name}' is empty!" + if proposal_file is not None: + dataset_dicts = load_proposals_into_dataset(dataset_dicts, proposal_file) + dataset_dicts = _maybe_filter_and_map_categories(dataset_name, dataset_dicts) + _map_category_id_to_contiguous_id(dataset_name, dataset_dicts) + print_instances_class_histogram( + dataset_dicts, MetadataCatalog.get(dataset_name).thing_classes + ) + all_datasets_dicts.append(dataset_dicts) + + if keep_instance_predicate is not None: + all_datasets_dicts_plain = [ + d + for d in itertools.chain.from_iterable(all_datasets_dicts) + if keep_instance_predicate(d) + ] + else: + all_datasets_dicts_plain = list(itertools.chain.from_iterable(all_datasets_dicts)) + return all_datasets_dicts_plain + + +def build_detection_train_loader(cfg: CfgNode, mapper=None): + """ + A data loader is created in a way similar to that of Detectron2. + The main differences are: + - it allows to combine data with different but compatible object category sets + + The data loader is created by the following steps: + 1. Use the dataset names in config to query :class:`DatasetCatalog`, and obtain a list of dicts. + 2. Start workers to work on the dicts. Each worker will: + * Map each metadata dict into another format to be consumed by the model. + * Batch them by simply putting dicts into a list. + The batched ``list[mapped_dict]`` is what this dataloader will return. + + Args: + cfg (CfgNode): the config + mapper (callable): a callable which takes a sample (dict) from dataset and + returns the format to be consumed by the model. + By default it will be `DatasetMapper(cfg, True)`. + + Returns: + an infinite iterator of training data + """ + images_per_worker = _compute_num_images_per_worker(cfg) + + _add_category_whitelists_to_metadata(cfg) + _add_category_maps_to_metadata(cfg) + dataset_dicts = combine_detection_dataset_dicts( + cfg.DATASETS.TRAIN, + keep_instance_predicate=_get_train_keep_instance_predicate(cfg), + proposal_files=cfg.DATASETS.PROPOSAL_FILES_TRAIN if cfg.MODEL.LOAD_PROPOSALS else None, + ) + dataset = DatasetFromList(dataset_dicts, copy=False) + + if mapper is None: + mapper = DatasetMapper(cfg, True) + dataset = MapDataset(dataset, mapper) + + sampler_name = cfg.DATALOADER.SAMPLER_TRAIN + logger = logging.getLogger(__name__) + logger.info("Using training sampler {}".format(sampler_name)) + if sampler_name == "TrainingSampler": + sampler = samplers.TrainingSampler(len(dataset)) + elif sampler_name == "RepeatFactorTrainingSampler": + sampler = samplers.RepeatFactorTrainingSampler( + dataset_dicts, cfg.DATALOADER.REPEAT_THRESHOLD + ) + else: + raise ValueError("Unknown training sampler: {}".format(sampler_name)) + + if cfg.DATALOADER.ASPECT_RATIO_GROUPING: + data_loader = torch.utils.data.DataLoader( + dataset, + sampler=sampler, + num_workers=cfg.DATALOADER.NUM_WORKERS, + batch_sampler=None, + collate_fn=operator.itemgetter(0), # don't batch, but yield individual elements + worker_init_fn=worker_init_reset_seed, + ) # yield individual mapped dict + data_loader = AspectRatioGroupedDataset(data_loader, images_per_worker) + else: + batch_sampler = torch.utils.data.sampler.BatchSampler( + sampler, images_per_worker, drop_last=True + ) + # drop_last so the batch always have the same size + data_loader = torch.utils.data.DataLoader( + dataset, + num_workers=cfg.DATALOADER.NUM_WORKERS, + batch_sampler=batch_sampler, + collate_fn=trivial_batch_collator, + worker_init_fn=worker_init_reset_seed, + ) + + return data_loader + + +def build_detection_test_loader(cfg, dataset_name, mapper=None): + """ + Similar to `build_detection_train_loader`. + But this function uses the given `dataset_name` argument (instead of the names in cfg), + and uses batch size 1. + + Args: + cfg: a detectron2 CfgNode + dataset_name (str): a name of the dataset that's available in the DatasetCatalog + mapper (callable): a callable which takes a sample (dict) from dataset + and returns the format to be consumed by the model. + By default it will be `DatasetMapper(cfg, False)`. + + Returns: + DataLoader: a torch DataLoader, that loads the given detection + dataset, with test-time transformation and batching. + """ + _add_category_whitelists_to_metadata(cfg) + _add_category_maps_to_metadata(cfg) + dataset_dicts = combine_detection_dataset_dicts( + [dataset_name], + keep_instance_predicate=_get_test_keep_instance_predicate(cfg), + proposal_files=[ + cfg.DATASETS.PROPOSAL_FILES_TEST[list(cfg.DATASETS.TEST).index(dataset_name)] + ] + if cfg.MODEL.LOAD_PROPOSALS + else None, + ) + + dataset = DatasetFromList(dataset_dicts) + if mapper is None: + mapper = DatasetMapper(cfg, False) + dataset = MapDataset(dataset, mapper) + + sampler = samplers.InferenceSampler(len(dataset)) + # Always use 1 image per worker during inference since this is the + # standard when reporting inference time in papers. + batch_sampler = torch.utils.data.sampler.BatchSampler(sampler, 1, drop_last=False) + + data_loader = torch.utils.data.DataLoader( + dataset, + num_workers=cfg.DATALOADER.NUM_WORKERS, + batch_sampler=batch_sampler, + collate_fn=trivial_batch_collator, + ) + return data_loader diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/dataset_mapper.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/dataset_mapper.py new file mode 100644 index 0000000000000000000000000000000000000000..f74976745151952ece06c7b7ba542e0b63f53899 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/dataset_mapper.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import copy +import torch +from fvcore.common.file_io import PathManager + +from detectron2.data import MetadataCatalog +from detectron2.data import detection_utils as utils +from detectron2.data import transforms as T + +from .structures import DensePoseDataRelative, DensePoseList, DensePoseTransformData + + +class DatasetMapper: + """ + A customized version of `detectron2.data.DatasetMapper` + """ + + def __init__(self, cfg, is_train=True): + self.tfm_gens = utils.build_transform_gen(cfg, is_train) + + # fmt: off + self.img_format = cfg.INPUT.FORMAT + self.mask_on = cfg.MODEL.MASK_ON + self.keypoint_on = cfg.MODEL.KEYPOINT_ON + self.densepose_on = cfg.MODEL.DENSEPOSE_ON + assert not cfg.MODEL.LOAD_PROPOSALS, "not supported yet" + # fmt: on + if self.keypoint_on and is_train: + # Flip only makes sense in training + self.keypoint_hflip_indices = utils.create_keypoint_hflip_indices(cfg.DATASETS.TRAIN) + else: + self.keypoint_hflip_indices = None + + if self.densepose_on: + densepose_transform_srcs = [ + MetadataCatalog.get(ds).densepose_transform_src + for ds in cfg.DATASETS.TRAIN + cfg.DATASETS.TEST + ] + assert len(densepose_transform_srcs) > 0 + # TODO: check that DensePose transformation data is the same for + # all the data. Otherwise one would have to pass DB ID with + # each entry to select proper transformation data. For now, since + # all DensePose annotated data uses the same data semantics, we + # omit this check. + densepose_transform_data_fpath = PathManager.get_local_path(densepose_transform_srcs[0]) + self.densepose_transform_data = DensePoseTransformData.load( + densepose_transform_data_fpath + ) + + self.is_train = is_train + + def __call__(self, dataset_dict): + """ + Args: + dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format. + + Returns: + dict: a format that builtin models in detectron2 accept + """ + dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code below + image = utils.read_image(dataset_dict["file_name"], format=self.img_format) + utils.check_image_size(dataset_dict, image) + + image, transforms = T.apply_transform_gens(self.tfm_gens, image) + image_shape = image.shape[:2] # h, w + dataset_dict["image"] = torch.as_tensor(image.transpose(2, 0, 1).astype("float32")) + + if not self.is_train: + dataset_dict.pop("annotations", None) + return dataset_dict + + for anno in dataset_dict["annotations"]: + if not self.mask_on: + anno.pop("segmentation", None) + if not self.keypoint_on: + anno.pop("keypoints", None) + + # USER: Implement additional transformations if you have other types of data + # USER: Don't call transpose_densepose if you don't need + annos = [ + self._transform_densepose( + utils.transform_instance_annotations( + obj, transforms, image_shape, keypoint_hflip_indices=self.keypoint_hflip_indices + ), + transforms, + ) + for obj in dataset_dict.pop("annotations") + if obj.get("iscrowd", 0) == 0 + ] + instances = utils.annotations_to_instances(annos, image_shape) + + if len(annos) and "densepose" in annos[0]: + gt_densepose = [obj["densepose"] for obj in annos] + instances.gt_densepose = DensePoseList(gt_densepose, instances.gt_boxes, image_shape) + + dataset_dict["instances"] = instances[instances.gt_boxes.nonempty()] + return dataset_dict + + def _transform_densepose(self, annotation, transforms): + if not self.densepose_on: + return annotation + + # Handle densepose annotations + is_valid, reason_not_valid = DensePoseDataRelative.validate_annotation(annotation) + if is_valid: + densepose_data = DensePoseDataRelative(annotation, cleanup=True) + densepose_data.apply_transform(transforms, self.densepose_transform_data) + annotation["densepose"] = densepose_data + else: + # logger = logging.getLogger(__name__) + # logger.debug("Could not load DensePose annotation: {}".format(reason_not_valid)) + DensePoseDataRelative.cleanup_annotation(annotation) + # NOTE: annotations for certain instances may be unavailable. + # 'None' is accepted by the DensePostList data structure. + annotation["densepose"] = None + return annotation diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/datasets/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4a59d9332034e9dc3a09f0ba7aa63f0c61b25e87 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/datasets/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from . import builtin # ensure the builtin data are registered + +__all__ = [k for k in globals().keys() if "builtin" not in k and not k.startswith("_")] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/datasets/builtin.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/datasets/builtin.py new file mode 100644 index 0000000000000000000000000000000000000000..e70f3d3e006d1801dcfb743c9c21b46ca54a3053 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/datasets/builtin.py @@ -0,0 +1,10 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .coco import BASE_DATASETS as BASE_COCO_DATASETS +from .coco import DATASETS as COCO_DATASETS +from .coco import register_datasets as register_coco_datasets + +DEFAULT_DATASETS_ROOT = "data" + + +register_coco_datasets(COCO_DATASETS, DEFAULT_DATASETS_ROOT) +register_coco_datasets(BASE_COCO_DATASETS, DEFAULT_DATASETS_ROOT) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/datasets/coco.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/datasets/coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3a96474fc990129d5c92786f62720621de97b230 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/datasets/coco.py @@ -0,0 +1,314 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import contextlib +import io +import logging +import os +from dataclasses import dataclass +from typing import Any, Dict, Iterable, List, Optional +from fvcore.common.file_io import PathManager +from fvcore.common.timer import Timer + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.structures import BoxMode + +DENSEPOSE_MASK_KEY = "dp_masks" +DENSEPOSE_KEYS_WITHOUT_MASK = ["dp_x", "dp_y", "dp_I", "dp_U", "dp_V"] +DENSEPOSE_KEYS = DENSEPOSE_KEYS_WITHOUT_MASK + [DENSEPOSE_MASK_KEY] +DENSEPOSE_METADATA_URL_PREFIX = "https://dl.fbaipublicfiles.com/densepose/data/" + + +@dataclass +class CocoDatasetInfo: + name: str + images_root: str + annotations_fpath: str + + +DATASETS = [ + CocoDatasetInfo( + name="densepose_coco_2014_train", + images_root="coco/train2014", + annotations_fpath="coco/annotations/densepose_train2014.json", + ), + CocoDatasetInfo( + name="densepose_coco_2014_minival", + images_root="coco/val2014", + annotations_fpath="coco/annotations/densepose_minival2014.json", + ), + CocoDatasetInfo( + name="densepose_coco_2014_minival_100", + images_root="coco/val2014", + annotations_fpath="coco/annotations/densepose_minival2014_100.json", + ), + CocoDatasetInfo( + name="densepose_coco_2014_valminusminival", + images_root="coco/val2014", + annotations_fpath="coco/annotations/densepose_valminusminival2014.json", + ), + CocoDatasetInfo( + name="densepose_chimps", + images_root="densepose_evolution/densepose_chimps", + annotations_fpath="densepose_evolution/annotations/densepose_chimps_densepose.json", + ), +] + + +BASE_DATASETS = [ + CocoDatasetInfo( + name="base_coco_2017_train", + images_root="coco/train2017", + annotations_fpath="coco/annotations/instances_train2017.json", + ), + CocoDatasetInfo( + name="base_coco_2017_val", + images_root="coco/val2017", + annotations_fpath="coco/annotations/instances_val2017.json", + ), + CocoDatasetInfo( + name="base_coco_2017_val_100", + images_root="coco/val2017", + annotations_fpath="coco/annotations/instances_val2017_100.json", + ), +] + + +def _is_relative_local_path(path: os.PathLike): + path_str = os.fsdecode(path) + return ("://" not in path_str) and not os.path.isabs(path) + + +def _maybe_prepend_base_path(base_path: Optional[os.PathLike], path: os.PathLike): + """ + Prepends the provided path with a base path prefix if: + 1) base path is not None; + 2) path is a local path + """ + if base_path is None: + return path + if _is_relative_local_path(path): + return os.path.join(base_path, path) + return path + + +def get_metadata(base_path: Optional[os.PathLike]) -> Dict[str, Any]: + """ + Returns metadata associated with COCO DensePose data + + Args: + base_path: Optional[os.PathLike] + Base path used to load metadata from + + Returns: + Dict[str, Any] + Metadata in the form of a dictionary + """ + meta = { + "densepose_transform_src": _maybe_prepend_base_path( + base_path, "UV_symmetry_transforms.mat" + ), + "densepose_smpl_subdiv": _maybe_prepend_base_path(base_path, "SMPL_subdiv.mat"), + "densepose_smpl_subdiv_transform": _maybe_prepend_base_path( + base_path, "SMPL_SUBDIV_TRANSFORM.mat" + ), + } + return meta + + +def _load_coco_annotations(json_file: str): + """ + Load COCO annotations from a JSON file + + Args: + json_file: str + Path to the file to load annotations from + Returns: + Instance of `pycocotools.coco.COCO` that provides access to annotations + data + """ + from pycocotools.coco import COCO + + logger = logging.getLogger(__name__) + timer = Timer() + with contextlib.redirect_stdout(io.StringIO()): + coco_api = COCO(json_file) + if timer.seconds() > 1: + logger.info("Loading {} takes {:.2f} seconds.".format(json_file, timer.seconds())) + return coco_api + + +def _add_categories_metadata(dataset_name: str, categories: Dict[str, Any]): + meta = MetadataCatalog.get(dataset_name) + meta.categories = {c["id"]: c["name"] for c in categories} + logger = logging.getLogger(__name__) + logger.info("Dataset {} categories: {}".format(dataset_name, categories)) + + +def _verify_annotations_have_unique_ids(json_file: str, anns: List[List[Dict[str, Any]]]): + if "minival" in json_file: + # Skip validation on COCO2014 valminusminival and minival annotations + # The ratio of buggy annotations there is tiny and does not affect accuracy + # Therefore we explicitly white-list them + return + ann_ids = [ann["id"] for anns_per_image in anns for ann in anns_per_image] + assert len(set(ann_ids)) == len(ann_ids), "Annotation ids in '{}' are not unique!".format( + json_file + ) + + +def _maybe_add_bbox(obj: Dict[str, Any], ann_dict: Dict[str, Any]): + if "bbox" not in ann_dict: + return + obj["bbox"] = ann_dict["bbox"] + obj["bbox_mode"] = BoxMode.XYWH_ABS + + +def _maybe_add_segm(obj: Dict[str, Any], ann_dict: Dict[str, Any]): + if "segmentation" not in ann_dict: + return + segm = ann_dict["segmentation"] + if not isinstance(segm, dict): + # filter out invalid polygons (< 3 points) + segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6] + if len(segm) == 0: + return + obj["segmentation"] = segm + + +def _maybe_add_keypoints(obj: Dict[str, Any], ann_dict: Dict[str, Any]): + if "keypoints" not in ann_dict: + return + keypts = ann_dict["keypoints"] # list[int] + for idx, v in enumerate(keypts): + if idx % 3 != 2: + # COCO's segmentation coordinates are floating points in [0, H or W], + # but keypoint coordinates are integers in [0, H-1 or W-1] + # Therefore we assume the coordinates are "pixel indices" and + # add 0.5 to convert to floating point coordinates. + keypts[idx] = v + 0.5 + obj["keypoints"] = keypts + + +def _maybe_add_densepose(obj: Dict[str, Any], ann_dict: Dict[str, Any]): + for key in DENSEPOSE_KEYS: + if key in ann_dict: + obj[key] = ann_dict[key] + + +def _combine_images_with_annotations( + dataset_name: str, + image_root: str, + img_datas: Iterable[Dict[str, Any]], + ann_datas: Iterable[Iterable[Dict[str, Any]]], +): + + ann_keys = ["iscrowd", "category_id"] + dataset_dicts = [] + + for img_dict, ann_dicts in zip(img_datas, ann_datas): + record = {} + record["file_name"] = os.path.join(image_root, img_dict["file_name"]) + record["height"] = img_dict["height"] + record["width"] = img_dict["width"] + record["image_id"] = img_dict["id"] + record["dataset"] = dataset_name + objs = [] + for ann_dict in ann_dicts: + assert ann_dict["image_id"] == record["image_id"] + assert ann_dict.get("ignore", 0) == 0 + obj = {key: ann_dict[key] for key in ann_keys if key in ann_dict} + _maybe_add_bbox(obj, ann_dict) + _maybe_add_segm(obj, ann_dict) + _maybe_add_keypoints(obj, ann_dict) + _maybe_add_densepose(obj, ann_dict) + objs.append(obj) + record["annotations"] = objs + dataset_dicts.append(record) + return dataset_dicts + + +def load_coco_json(annotations_json_file: str, image_root: str, dataset_name: str): + """ + Loads a JSON file with annotations in COCO instances format. + Replaces `detectron2.data.data.coco.load_coco_json` to handle metadata + in a more flexible way. Postpones category mapping to a later stage to be + able to combine several data with different (but coherent) sets of + categories. + + Args: + + annotations_json_file: str + Path to the JSON file with annotations in COCO instances format. + image_root: str + directory that contains all the images + dataset_name: str + the name that identifies a dataset, e.g. "densepose_coco_2014_train" + extra_annotation_keys: Optional[List[str]] + If provided, these keys are used to extract additional data from + the annotations. + """ + coco_api = _load_coco_annotations(PathManager.get_local_path(annotations_json_file)) + _add_categories_metadata(dataset_name, coco_api.loadCats(coco_api.getCatIds())) + # sort indices for reproducible results + img_ids = sorted(coco_api.imgs.keys()) + # imgs is a list of dicts, each looks something like: + # {'license': 4, + # 'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg', + # 'file_name': 'COCO_val2014_000000001268.jpg', + # 'height': 427, + # 'width': 640, + # 'date_captured': '2013-11-17 05:57:24', + # 'id': 1268} + imgs = coco_api.loadImgs(img_ids) + logger = logging.getLogger(__name__) + logger.info("Loaded {} images in COCO format from {}".format(len(imgs), annotations_json_file)) + # anns is a list[list[dict]], where each dict is an annotation + # record for an object. The inner list enumerates the objects in an image + # and the outer list enumerates over images. + anns = [coco_api.imgToAnns[img_id] for img_id in img_ids] + _verify_annotations_have_unique_ids(annotations_json_file, anns) + dataset_records = _combine_images_with_annotations(dataset_name, image_root, imgs, anns) + return dataset_records + + +def register_dataset(dataset_data: CocoDatasetInfo, datasets_root: Optional[os.PathLike] = None): + """ + Registers provided COCO DensePose dataset + + Args: + dataset_data: CocoDatasetInfo + Dataset data + datasets_root: Optional[os.PathLike] + Datasets root folder (default: None) + """ + annotations_fpath = _maybe_prepend_base_path(datasets_root, dataset_data.annotations_fpath) + images_root = _maybe_prepend_base_path(datasets_root, dataset_data.images_root) + + def load_annotations(): + return load_coco_json( + annotations_json_file=annotations_fpath, + image_root=images_root, + dataset_name=dataset_data.name, + ) + + DatasetCatalog.register(dataset_data.name, load_annotations) + MetadataCatalog.get(dataset_data.name).set( + json_file=annotations_fpath, + image_root=images_root, + **get_metadata(DENSEPOSE_METADATA_URL_PREFIX) + ) + + +def register_datasets( + datasets_data: Iterable[CocoDatasetInfo], datasets_root: Optional[os.PathLike] = None +): + """ + Registers provided COCO DensePose data + + Args: + datasets_data: Iterable[CocoDatasetInfo] + An iterable of dataset datas + datasets_root: Optional[os.PathLike] + Datasets root folder (default: None) + """ + for dataset_data in datasets_data: + register_dataset(dataset_data, datasets_root) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/structures.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/structures.py new file mode 100644 index 0000000000000000000000000000000000000000..bbb950ba09b1302b72f36d143e092d2ade6dc11e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/data/structures.py @@ -0,0 +1,579 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import base64 +import numpy as np +from io import BytesIO +import torch +from PIL import Image +from torch.nn import functional as F + + +class DensePoseTransformData(object): + + # Horizontal symmetry label transforms used for horizontal flip + MASK_LABEL_SYMMETRIES = [0, 1, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 14] + # fmt: off + POINT_LABEL_SYMMETRIES = [ 0, 1, 2, 4, 3, 6, 5, 8, 7, 10, 9, 12, 11, 14, 13, 16, 15, 18, 17, 20, 19, 22, 21, 24, 23] # noqa + # fmt: on + + def __init__(self, uv_symmetries): + self.mask_label_symmetries = DensePoseTransformData.MASK_LABEL_SYMMETRIES + self.point_label_symmetries = DensePoseTransformData.POINT_LABEL_SYMMETRIES + self.uv_symmetries = uv_symmetries + + @staticmethod + def load(fpath): + import scipy.io + + uv_symmetry_map = scipy.io.loadmat(fpath) + uv_symmetry_map_torch = {} + for key in ["U_transforms", "V_transforms"]: + uv_symmetry_map_torch[key] = [] + map_src = uv_symmetry_map[key] + map_dst = uv_symmetry_map_torch[key] + for i in range(map_src.shape[1]): + map_dst.append(torch.from_numpy(map_src[0, i]).to(dtype=torch.float)) + uv_symmetry_map_torch[key] = torch.stack(map_dst, dim=0).to( + device=torch.cuda.current_device() + ) + transform_data = DensePoseTransformData(uv_symmetry_map_torch) + return transform_data + + +class DensePoseDataRelative(object): + """ + Dense pose relative annotations that can be applied to any bounding box: + x - normalized X coordinates [0, 255] of annotated points + y - normalized Y coordinates [0, 255] of annotated points + i - body part labels 0,...,24 for annotated points + u - body part U coordinates [0, 1] for annotated points + v - body part V coordinates [0, 1] for annotated points + segm - 256x256 segmentation mask with values 0,...,14 + To obtain absolute x and y data wrt some bounding box one needs to first + divide the data by 256, multiply by the respective bounding box size + and add bounding box offset: + x_img = x0 + x_norm * w / 256.0 + y_img = y0 + y_norm * h / 256.0 + Segmentation masks are typically sampled to get image-based masks. + """ + + # Key for normalized X coordinates in annotation dict + X_KEY = "dp_x" + # Key for normalized Y coordinates in annotation dict + Y_KEY = "dp_y" + # Key for U part coordinates in annotation dict + U_KEY = "dp_U" + # Key for V part coordinates in annotation dict + V_KEY = "dp_V" + # Key for I point labels in annotation dict + I_KEY = "dp_I" + # Key for segmentation mask in annotation dict + S_KEY = "dp_masks" + # Number of body parts in segmentation masks + N_BODY_PARTS = 14 + # Number of parts in point labels + N_PART_LABELS = 24 + MASK_SIZE = 256 + + def __init__(self, annotation, cleanup=False): + is_valid, reason_not_valid = DensePoseDataRelative.validate_annotation(annotation) + assert is_valid, "Invalid DensePose annotations: {}".format(reason_not_valid) + self.x = torch.as_tensor(annotation[DensePoseDataRelative.X_KEY]) + self.y = torch.as_tensor(annotation[DensePoseDataRelative.Y_KEY]) + self.i = torch.as_tensor(annotation[DensePoseDataRelative.I_KEY]) + self.u = torch.as_tensor(annotation[DensePoseDataRelative.U_KEY]) + self.v = torch.as_tensor(annotation[DensePoseDataRelative.V_KEY]) + self.segm = DensePoseDataRelative.extract_segmentation_mask(annotation) + self.device = torch.device("cpu") + if cleanup: + DensePoseDataRelative.cleanup_annotation(annotation) + + def to(self, device): + if self.device == device: + return self + new_data = DensePoseDataRelative.__new__(DensePoseDataRelative) + new_data.x = self.x + new_data.x = self.x.to(device) + new_data.y = self.y.to(device) + new_data.i = self.i.to(device) + new_data.u = self.u.to(device) + new_data.v = self.v.to(device) + new_data.segm = self.segm.to(device) + new_data.device = device + return new_data + + @staticmethod + def extract_segmentation_mask(annotation): + import pycocotools.mask as mask_utils + + poly_specs = annotation[DensePoseDataRelative.S_KEY] + segm = torch.zeros((DensePoseDataRelative.MASK_SIZE,) * 2, dtype=torch.float32) + for i in range(DensePoseDataRelative.N_BODY_PARTS): + poly_i = poly_specs[i] + if poly_i: + mask_i = mask_utils.decode(poly_i) + segm[mask_i > 0] = i + 1 + return segm + + @staticmethod + def validate_annotation(annotation): + for key in [ + DensePoseDataRelative.X_KEY, + DensePoseDataRelative.Y_KEY, + DensePoseDataRelative.I_KEY, + DensePoseDataRelative.U_KEY, + DensePoseDataRelative.V_KEY, + DensePoseDataRelative.S_KEY, + ]: + if key not in annotation: + return False, "no {key} data in the annotation".format(key=key) + return True, None + + @staticmethod + def cleanup_annotation(annotation): + for key in [ + DensePoseDataRelative.X_KEY, + DensePoseDataRelative.Y_KEY, + DensePoseDataRelative.I_KEY, + DensePoseDataRelative.U_KEY, + DensePoseDataRelative.V_KEY, + DensePoseDataRelative.S_KEY, + ]: + if key in annotation: + del annotation[key] + + def apply_transform(self, transforms, densepose_transform_data): + self._transform_pts(transforms, densepose_transform_data) + self._transform_segm(transforms, densepose_transform_data) + + def _transform_pts(self, transforms, dp_transform_data): + import detectron2.data.transforms as T + + # NOTE: This assumes that HorizFlipTransform is the only one that does flip + do_hflip = sum(isinstance(t, T.HFlipTransform) for t in transforms.transforms) % 2 == 1 + if do_hflip: + self.x = self.segm.size(1) - self.x + self._flip_iuv_semantics(dp_transform_data) + + def _flip_iuv_semantics(self, dp_transform_data: DensePoseTransformData) -> None: + i_old = self.i.clone() + uv_symmetries = dp_transform_data.uv_symmetries + pt_label_symmetries = dp_transform_data.point_label_symmetries + for i in range(self.N_PART_LABELS): + if i + 1 in i_old: + annot_indices_i = i_old == i + 1 + if pt_label_symmetries[i + 1] != i + 1: + self.i[annot_indices_i] = pt_label_symmetries[i + 1] + u_loc = (self.u[annot_indices_i] * 255).long() + v_loc = (self.v[annot_indices_i] * 255).long() + self.u[annot_indices_i] = uv_symmetries["U_transforms"][i][v_loc, u_loc].to( + device=self.u.device + ) + self.v[annot_indices_i] = uv_symmetries["V_transforms"][i][v_loc, u_loc].to( + device=self.v.device + ) + + def _transform_segm(self, transforms, dp_transform_data): + import detectron2.data.transforms as T + + # NOTE: This assumes that HorizFlipTransform is the only one that does flip + do_hflip = sum(isinstance(t, T.HFlipTransform) for t in transforms.transforms) % 2 == 1 + if do_hflip: + self.segm = torch.flip(self.segm, [1]) + self._flip_segm_semantics(dp_transform_data) + + def _flip_segm_semantics(self, dp_transform_data): + old_segm = self.segm.clone() + mask_label_symmetries = dp_transform_data.mask_label_symmetries + for i in range(self.N_BODY_PARTS): + if mask_label_symmetries[i + 1] != i + 1: + self.segm[old_segm == i + 1] = mask_label_symmetries[i + 1] + + +def normalized_coords_transform(x0, y0, w, h): + """ + Coordinates transform that maps top left corner to (-1, -1) and bottom + right corner to (1, 1). Used for torch.grid_sample to initialize the + grid + """ + + def f(p): + return (2 * (p[0] - x0) / w - 1, 2 * (p[1] - y0) / h - 1) + + return f + + +class DensePoseOutput(object): + def __init__(self, S, I, U, V, confidences): + """ + Args: + S (`torch.Tensor`): coarse segmentation tensor of size (N, A, H, W) + I (`torch.Tensor`): fine segmentation tensor of size (N, C, H, W) + U (`torch.Tensor`): U coordinates for each fine segmentation label of size (N, C, H, W) + V (`torch.Tensor`): V coordinates for each fine segmentation label of size (N, C, H, W) + confidences (dict of str -> `torch.Tensor`) estimated confidence model parameters + """ + self.S = S + self.I = I # noqa: E741 + self.U = U + self.V = V + self.confidences = confidences + self._check_output_dims(S, I, U, V) + + def _check_output_dims(self, S, I, U, V): + assert ( + len(S.size()) == 4 + ), "Segmentation output should have 4 " "dimensions (NCHW), but has size {}".format( + S.size() + ) + assert ( + len(I.size()) == 4 + ), "Segmentation output should have 4 " "dimensions (NCHW), but has size {}".format( + S.size() + ) + assert ( + len(U.size()) == 4 + ), "Segmentation output should have 4 " "dimensions (NCHW), but has size {}".format( + S.size() + ) + assert ( + len(V.size()) == 4 + ), "Segmentation output should have 4 " "dimensions (NCHW), but has size {}".format( + S.size() + ) + assert len(S) == len(I), ( + "Number of output segmentation planes {} " + "should be equal to the number of output part index " + "planes {}".format(len(S), len(I)) + ) + assert S.size()[2:] == I.size()[2:], ( + "Output segmentation plane size {} " + "should be equal to the output part index " + "plane size {}".format(S.size()[2:], I.size()[2:]) + ) + assert I.size() == U.size(), ( + "Part index output shape {} " + "should be the same as U coordinates output shape {}".format(I.size(), U.size()) + ) + assert I.size() == V.size(), ( + "Part index output shape {} " + "should be the same as V coordinates output shape {}".format(I.size(), V.size()) + ) + + def resize(self, image_size_hw): + # do nothing - outputs are invariant to resize + pass + + def _crop(self, S, I, U, V, bbox_old_xywh, bbox_new_xywh): + """ + Resample S, I, U, V from bbox_old to the cropped bbox_new + """ + x0old, y0old, wold, hold = bbox_old_xywh + x0new, y0new, wnew, hnew = bbox_new_xywh + tr_coords = normalized_coords_transform(x0old, y0old, wold, hold) + topleft = (x0new, y0new) + bottomright = (x0new + wnew, y0new + hnew) + topleft_norm = tr_coords(topleft) + bottomright_norm = tr_coords(bottomright) + hsize = S.size(1) + wsize = S.size(2) + grid = torch.meshgrid( + torch.arange( + topleft_norm[1], + bottomright_norm[1], + (bottomright_norm[1] - topleft_norm[1]) / hsize, + )[:hsize], + torch.arange( + topleft_norm[0], + bottomright_norm[0], + (bottomright_norm[0] - topleft_norm[0]) / wsize, + )[:wsize], + ) + grid = torch.stack(grid, dim=2).to(S.device) + assert ( + grid.size(0) == hsize + ), "Resampled grid expected " "height={}, actual height={}".format(hsize, grid.size(0)) + assert grid.size(1) == wsize, "Resampled grid expected " "width={}, actual width={}".format( + wsize, grid.size(1) + ) + S_new = F.grid_sample( + S.unsqueeze(0), + torch.unsqueeze(grid, 0), + mode="bilinear", + padding_mode="border", + align_corners=True, + ).squeeze(0) + I_new = F.grid_sample( + I.unsqueeze(0), + torch.unsqueeze(grid, 0), + mode="bilinear", + padding_mode="border", + align_corners=True, + ).squeeze(0) + U_new = F.grid_sample( + U.unsqueeze(0), + torch.unsqueeze(grid, 0), + mode="bilinear", + padding_mode="border", + align_corners=True, + ).squeeze(0) + V_new = F.grid_sample( + V.unsqueeze(0), + torch.unsqueeze(grid, 0), + mode="bilinear", + padding_mode="border", + align_corners=True, + ).squeeze(0) + return S_new, I_new, U_new, V_new + + def crop(self, indices_cropped, bboxes_old, bboxes_new): + """ + Crop outputs for selected bounding boxes to the new bounding boxes. + """ + # VK: cropping is ignored for now + # for i, ic in enumerate(indices_cropped): + # self.S[ic], self.I[ic], self.U[ic], self.V[ic] = \ + # self._crop(self.S[ic], self.I[ic], self.U[ic], self.V[ic], + # bboxes_old[i], bboxes_new[i]) + pass + + def hflip(self, transform_data: DensePoseTransformData) -> None: + """ + Change S, I, U and V to take into account a Horizontal flip. + """ + if self.I.shape[0] > 0: + for el in "SIUV": + self.__dict__[el] = torch.flip(self.__dict__[el], [3]) + self._flip_iuv_semantics_tensor(transform_data) + self._flip_segm_semantics_tensor(transform_data) + + def _flip_iuv_semantics_tensor(self, dp_transform_data: DensePoseTransformData) -> None: + point_label_symmetries = dp_transform_data.point_label_symmetries + uv_symmetries = dp_transform_data.uv_symmetries + + N, C, H, W = self.U.shape + u_loc = (self.U[:, 1:, :, :].clamp(0, 1) * 255).long() + v_loc = (self.V[:, 1:, :, :].clamp(0, 1) * 255).long() + Iindex = torch.arange(C - 1, device=self.U.device)[None, :, None, None].expand( + N, C - 1, H, W + ) + self.U[:, 1:, :, :] = uv_symmetries["U_transforms"][Iindex, v_loc, u_loc].to( + device=self.U.device + ) + self.V[:, 1:, :, :] = uv_symmetries["V_transforms"][Iindex, v_loc, u_loc].to( + device=self.V.device + ) + + for el in "IUV": + self.__dict__[el] = self.__dict__[el][:, point_label_symmetries, :, :] + + def _flip_segm_semantics_tensor(self, dp_transform_data): + if self.S.shape[1] == DensePoseDataRelative.N_BODY_PARTS + 1: + self.S = self.S[:, dp_transform_data.mask_label_symmetries, :, :] + + def to_result(self, boxes_xywh): + """ + Convert DensePose outputs to results format. Results are more compact, + but cannot be resampled any more + """ + result = DensePoseResult(boxes_xywh, self.S, self.I, self.U, self.V) + return result + + def __getitem__(self, item): + if isinstance(item, int): + S_selected = self.S[item].unsqueeze(0) + I_selected = self.I[item].unsqueeze(0) + U_selected = self.U[item].unsqueeze(0) + V_selected = self.V[item].unsqueeze(0) + conf_selected = {} + for key in self.confidences: + conf_selected[key] = self.confidences[key][item].unsqueeze(0) + else: + S_selected = self.S[item] + I_selected = self.I[item] + U_selected = self.U[item] + V_selected = self.V[item] + conf_selected = {} + for key in self.confidences: + conf_selected[key] = self.confidences[key][item] + return DensePoseOutput(S_selected, I_selected, U_selected, V_selected, conf_selected) + + def __str__(self): + s = "DensePoseOutput S {}, I {}, U {}, V {}".format( + list(self.S.size()), list(self.I.size()), list(self.U.size()), list(self.V.size()) + ) + s_conf = "confidences: [{}]".format( + ", ".join([f"{key} {list(self.confidences[key].size())}" for key in self.confidences]) + ) + return ", ".join([s, s_conf]) + + def __len__(self): + return self.S.size(0) + + +class DensePoseResult(object): + def __init__(self, boxes_xywh, S, I, U, V): + self.results = [] + self.boxes_xywh = boxes_xywh.cpu().tolist() + assert len(boxes_xywh.size()) == 2 + assert boxes_xywh.size(1) == 4 + for i, box_xywh in enumerate(boxes_xywh): + result_i = self._output_to_result(box_xywh, S[[i]], I[[i]], U[[i]], V[[i]]) + result_numpy_i = result_i.cpu().numpy() + result_encoded_i = DensePoseResult.encode_png_data(result_numpy_i) + result_encoded_with_shape_i = (result_numpy_i.shape, result_encoded_i) + self.results.append(result_encoded_with_shape_i) + + def __str__(self): + s = "DensePoseResult: N={} [{}]".format( + len(self.results), ", ".join([str(list(r[0])) for r in self.results]) + ) + return s + + def _output_to_result(self, box_xywh, S, I, U, V): + x, y, w, h = box_xywh + w = max(int(w), 1) + h = max(int(h), 1) + result = torch.zeros([3, h, w], dtype=torch.uint8, device=U.device) + assert ( + len(S.size()) == 4 + ), "AnnIndex tensor size should have {} " "dimensions but has {}".format(4, len(S.size())) + s_bbox = F.interpolate(S, (h, w), mode="bilinear", align_corners=False).argmax(dim=1) + assert ( + len(I.size()) == 4 + ), "IndexUV tensor size should have {} " "dimensions but has {}".format(4, len(S.size())) + i_bbox = ( + F.interpolate(I, (h, w), mode="bilinear", align_corners=False).argmax(dim=1) + * (s_bbox > 0).long() + ).squeeze(0) + assert len(U.size()) == 4, "U tensor size should have {} " "dimensions but has {}".format( + 4, len(U.size()) + ) + u_bbox = F.interpolate(U, (h, w), mode="bilinear", align_corners=False) + assert len(V.size()) == 4, "V tensor size should have {} " "dimensions but has {}".format( + 4, len(V.size()) + ) + v_bbox = F.interpolate(V, (h, w), mode="bilinear", align_corners=False) + result[0] = i_bbox + for part_id in range(1, u_bbox.size(1)): + result[1][i_bbox == part_id] = ( + (u_bbox[0, part_id][i_bbox == part_id] * 255).clamp(0, 255).to(torch.uint8) + ) + result[2][i_bbox == part_id] = ( + (v_bbox[0, part_id][i_bbox == part_id] * 255).clamp(0, 255).to(torch.uint8) + ) + assert ( + result.size(1) == h + ), "Results height {} should be equal" "to bounding box height {}".format(result.size(1), h) + assert ( + result.size(2) == w + ), "Results width {} should be equal" "to bounding box width {}".format(result.size(2), w) + return result + + @staticmethod + def encode_png_data(arr): + """ + Encode array data as a PNG image using the highest compression rate + @param arr [in] Data stored in an array of size (3, M, N) of type uint8 + @return Base64-encoded string containing PNG-compressed data + """ + assert len(arr.shape) == 3, "Expected a 3D array as an input," " got a {0}D array".format( + len(arr.shape) + ) + assert arr.shape[0] == 3, "Expected first array dimension of size 3," " got {0}".format( + arr.shape[0] + ) + assert arr.dtype == np.uint8, "Expected an array of type np.uint8, " " got {0}".format( + arr.dtype + ) + data = np.moveaxis(arr, 0, -1) + im = Image.fromarray(data) + fstream = BytesIO() + im.save(fstream, format="png", optimize=True) + s = base64.encodebytes(fstream.getvalue()).decode() + return s + + @staticmethod + def decode_png_data(shape, s): + """ + Decode array data from a string that contains PNG-compressed data + @param Base64-encoded string containing PNG-compressed data + @return Data stored in an array of size (3, M, N) of type uint8 + """ + fstream = BytesIO(base64.decodebytes(s.encode())) + im = Image.open(fstream) + data = np.moveaxis(np.array(im.getdata(), dtype=np.uint8), -1, 0) + return data.reshape(shape) + + def __len__(self): + return len(self.results) + + def __getitem__(self, item): + result_encoded = self.results[item] + bbox_xywh = self.boxes_xywh[item] + return result_encoded, bbox_xywh + + +class DensePoseList(object): + + _TORCH_DEVICE_CPU = torch.device("cpu") + + def __init__(self, densepose_datas, boxes_xyxy_abs, image_size_hw, device=_TORCH_DEVICE_CPU): + assert len(densepose_datas) == len( + boxes_xyxy_abs + ), "Attempt to initialize DensePoseList with {} DensePose datas " "and {} boxes".format( + len(densepose_datas), len(boxes_xyxy_abs) + ) + self.densepose_datas = [] + for densepose_data in densepose_datas: + assert isinstance(densepose_data, DensePoseDataRelative) or densepose_data is None, ( + "Attempt to initialize DensePoseList with DensePose datas " + "of type {}, expected DensePoseDataRelative".format(type(densepose_data)) + ) + densepose_data_ondevice = ( + densepose_data.to(device) if densepose_data is not None else None + ) + self.densepose_datas.append(densepose_data_ondevice) + self.boxes_xyxy_abs = boxes_xyxy_abs.to(device) + self.image_size_hw = image_size_hw + self.device = device + + def to(self, device): + if self.device == device: + return self + return DensePoseList(self.densepose_datas, self.boxes_xyxy_abs, self.image_size_hw, device) + + def __iter__(self): + return iter(self.densepose_datas) + + def __len__(self): + return len(self.densepose_datas) + + def __repr__(self): + s = self.__class__.__name__ + "(" + s += "num_instances={}, ".format(len(self.densepose_datas)) + s += "image_width={}, ".format(self.image_size_hw[1]) + s += "image_height={})".format(self.image_size_hw[0]) + return s + + def __getitem__(self, item): + if isinstance(item, int): + densepose_data_rel = self.densepose_datas[item] + return densepose_data_rel + elif isinstance(item, slice): + densepose_datas_rel = self.densepose_datas[item] + boxes_xyxy_abs = self.boxes_xyxy_abs[item] + return DensePoseList( + densepose_datas_rel, boxes_xyxy_abs, self.image_size_hw, self.device + ) + elif isinstance(item, torch.Tensor) and (item.dtype == torch.bool): + densepose_datas_rel = [self.densepose_datas[i] for i, x in enumerate(item) if x > 0] + boxes_xyxy_abs = self.boxes_xyxy_abs[item] + return DensePoseList( + densepose_datas_rel, boxes_xyxy_abs, self.image_size_hw, self.device + ) + else: + densepose_datas_rel = [self.densepose_datas[i] for i in item] + boxes_xyxy_abs = self.boxes_xyxy_abs[item] + return DensePoseList( + densepose_datas_rel, boxes_xyxy_abs, self.image_size_hw, self.device + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/densepose_coco_evaluation.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/densepose_coco_evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..489e7b006da436531e37ebeb1f01f13bad60874d --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/densepose_coco_evaluation.py @@ -0,0 +1,1138 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# This is a modified version of cocoeval.py where we also have the densepose evaluation. + +__author__ = "tsungyi" + +import copy +import datetime +import itertools +import logging +import numpy as np +import pickle +import time +from collections import defaultdict +from enum import Enum +from typing import Any, Dict, Tuple +import scipy.spatial.distance as ssd +from fvcore.common.file_io import PathManager +from pycocotools import mask as maskUtils +from scipy.io import loadmat +from scipy.ndimage import zoom as spzoom + +from .data.structures import DensePoseDataRelative, DensePoseResult + +logger = logging.getLogger(__name__) + + +class DensePoseEvalMode(str, Enum): + # use both masks and geodesic distances (GPS * IOU) to compute scores + GPSM = "gpsm" + # use only geodesic distances (GPS) to compute scores + GPS = "gps" + # use only masks (IOU) to compute scores + IOU = "iou" + + +class DensePoseDataMode(str, Enum): + # use estimated IUV data (default mode) + IUV_DT = "iuvdt" + # use ground truth IUV data + IUV_GT = "iuvgt" + # use ground truth labels I and set UV to 0 + I_GT_UV_0 = "igtuv0" + # use ground truth labels I and estimated UV coordinates + I_GT_UV_DT = "igtuvdt" + # use estimated labels I and set UV to 0 + I_DT_UV_0 = "idtuv0" + + +class DensePoseCocoEval(object): + # Interface for evaluating detection on the Microsoft COCO dataset. + # + # The usage for CocoEval is as follows: + # cocoGt=..., cocoDt=... # load dataset and results + # E = CocoEval(cocoGt,cocoDt); # initialize CocoEval object + # E.params.recThrs = ...; # set parameters as desired + # E.evaluate(); # run per image evaluation + # E.accumulate(); # accumulate per image results + # E.summarize(); # display summary metrics of results + # For example usage see evalDemo.m and http://mscoco.org/. + # + # The evaluation parameters are as follows (defaults in brackets): + # imgIds - [all] N demo ids to use for evaluation + # catIds - [all] K cat ids to use for evaluation + # iouThrs - [.5:.05:.95] T=10 IoU thresholds for evaluation + # recThrs - [0:.01:1] R=101 recall thresholds for evaluation + # areaRng - [...] A=4 object area ranges for evaluation + # maxDets - [1 10 100] M=3 thresholds on max detections per image + # iouType - ['segm'] set iouType to 'segm', 'bbox', 'keypoints' or 'densepose' + # iouType replaced the now DEPRECATED useSegm parameter. + # useCats - [1] if true use category labels for evaluation + # Note: if useCats=0 category labels are ignored as in proposal scoring. + # Note: multiple areaRngs [Ax2] and maxDets [Mx1] can be specified. + # + # evaluate(): evaluates detections on every image and every category and + # concats the results into the "evalImgs" with fields: + # dtIds - [1xD] id for each of the D detections (dt) + # gtIds - [1xG] id for each of the G ground truths (gt) + # dtMatches - [TxD] matching gt id at each IoU or 0 + # gtMatches - [TxG] matching dt id at each IoU or 0 + # dtScores - [1xD] confidence of each dt + # gtIgnore - [1xG] ignore flag for each gt + # dtIgnore - [TxD] ignore flag for each dt at each IoU + # + # accumulate(): accumulates the per-image, per-category evaluation + # results in "evalImgs" into the dictionary "eval" with fields: + # params - parameters used for evaluation + # date - date evaluation was performed + # counts - [T,R,K,A,M] parameter dimensions (see above) + # precision - [TxRxKxAxM] precision for every evaluation setting + # recall - [TxKxAxM] max recall for every evaluation setting + # Note: precision and recall==-1 for settings with no gt objects. + # + # See also coco, mask, pycocoDemo, pycocoEvalDemo + # + # Microsoft COCO Toolbox. version 2.0 + # Data, paper, and tutorials available at: http://mscoco.org/ + # Code written by Piotr Dollar and Tsung-Yi Lin, 2015. + # Licensed under the Simplified BSD License [see coco/license.txt] + def __init__( + self, + cocoGt=None, + cocoDt=None, + iouType: str = "densepose", + dpEvalMode: DensePoseEvalMode = DensePoseEvalMode.GPS, + dpDataMode: DensePoseDataMode = DensePoseDataMode.IUV_DT, + ): + """ + Initialize CocoEval using coco APIs for gt and dt + :param cocoGt: coco object with ground truth annotations + :param cocoDt: coco object with detection results + :return: None + """ + self.cocoGt = cocoGt # ground truth COCO API + self.cocoDt = cocoDt # detections COCO API + self._dpEvalMode = dpEvalMode + self._dpDataMode = dpDataMode + self.params = {} # evaluation parameters + self.evalImgs = defaultdict(list) # per-image per-category eval results [KxAxI] + self.eval = {} # accumulated evaluation results + self._gts = defaultdict(list) # gt for evaluation + self._dts = defaultdict(list) # dt for evaluation + self.params = Params(iouType=iouType) # parameters + self._paramsEval = {} # parameters for evaluation + self.stats = [] # result summarization + self.ious = {} # ious between all gts and dts + if cocoGt is not None: + self.params.imgIds = sorted(cocoGt.getImgIds()) + self.params.catIds = sorted(cocoGt.getCatIds()) + self.ignoreThrBB = 0.7 + self.ignoreThrUV = 0.9 + + def _loadGEval(self): + smpl_subdiv_fpath = PathManager.get_local_path( + "https://dl.fbaipublicfiles.com/densepose/data/SMPL_subdiv.mat" + ) + pdist_transform_fpath = PathManager.get_local_path( + "https://dl.fbaipublicfiles.com/densepose/data/SMPL_SUBDIV_TRANSFORM.mat" + ) + pdist_matrix_fpath = PathManager.get_local_path( + "https://dl.fbaipublicfiles.com/densepose/data/Pdist_matrix.pkl", timeout_sec=120 + ) + SMPL_subdiv = loadmat(smpl_subdiv_fpath) + self.PDIST_transform = loadmat(pdist_transform_fpath) + self.PDIST_transform = self.PDIST_transform["index"].squeeze() + UV = np.array([SMPL_subdiv["U_subdiv"], SMPL_subdiv["V_subdiv"]]).squeeze() + ClosestVertInds = np.arange(UV.shape[1]) + 1 + self.Part_UVs = [] + self.Part_ClosestVertInds = [] + for i in np.arange(24): + self.Part_UVs.append(UV[:, SMPL_subdiv["Part_ID_subdiv"].squeeze() == (i + 1)]) + self.Part_ClosestVertInds.append( + ClosestVertInds[SMPL_subdiv["Part_ID_subdiv"].squeeze() == (i + 1)] + ) + + with open(pdist_matrix_fpath, "rb") as hFile: + arrays = pickle.load(hFile, encoding="latin1") + self.Pdist_matrix = arrays["Pdist_matrix"] + self.Part_ids = np.array(SMPL_subdiv["Part_ID_subdiv"].squeeze()) + # Mean geodesic distances for parts. + self.Mean_Distances = np.array([0, 0.351, 0.107, 0.126, 0.237, 0.173, 0.142, 0.128, 0.150]) + # Coarse Part labels. + self.CoarseParts = np.array( + [0, 1, 1, 2, 2, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8] + ) + + def _prepare(self): + """ + Prepare ._gts and ._dts for evaluation based on params + :return: None + """ + + def _toMask(anns, coco): + # modify ann['segmentation'] by reference + for ann in anns: + rle = coco.annToRLE(ann) + ann["segmentation"] = rle + + def _getIgnoreRegion(iid, coco): + img = coco.imgs[iid] + + if "ignore_regions_x" not in img.keys(): + return None + + if len(img["ignore_regions_x"]) == 0: + return None + + rgns_merged = [] + for region_x, region_y in zip(img["ignore_regions_x"], img["ignore_regions_y"]): + rgns = [iter(region_x), iter(region_y)] + rgns_merged.append([next(it) for it in itertools.cycle(rgns)]) + rles = maskUtils.frPyObjects(rgns_merged, img["height"], img["width"]) + rle = maskUtils.merge(rles) + return maskUtils.decode(rle) + + def _checkIgnore(dt, iregion): + if iregion is None: + return True + + bb = np.array(dt["bbox"]).astype(np.int) + x1, y1, x2, y2 = bb[0], bb[1], bb[0] + bb[2], bb[1] + bb[3] + x2 = min([x2, iregion.shape[1]]) + y2 = min([y2, iregion.shape[0]]) + + if bb[2] * bb[3] == 0: + return False + + crop_iregion = iregion[y1:y2, x1:x2] + + if crop_iregion.sum() == 0: + return True + + if "densepose" not in dt.keys(): # filtering boxes + return crop_iregion.sum() / bb[2] / bb[3] < self.ignoreThrBB + + # filtering UVs + ignoremask = np.require(crop_iregion, requirements=["F"]) + mask = self._extract_mask(dt) + uvmask = np.require(np.asarray(mask > 0), dtype=np.uint8, requirements=["F"]) + uvmask_ = maskUtils.encode(uvmask) + ignoremask_ = maskUtils.encode(ignoremask) + uviou = maskUtils.iou([uvmask_], [ignoremask_], [1])[0] + return uviou < self.ignoreThrUV + + p = self.params + + if p.useCats: + gts = self.cocoGt.loadAnns(self.cocoGt.getAnnIds(imgIds=p.imgIds, catIds=p.catIds)) + dts = self.cocoDt.loadAnns(self.cocoDt.getAnnIds(imgIds=p.imgIds, catIds=p.catIds)) + else: + gts = self.cocoGt.loadAnns(self.cocoGt.getAnnIds(imgIds=p.imgIds)) + dts = self.cocoDt.loadAnns(self.cocoDt.getAnnIds(imgIds=p.imgIds)) + + imns = self.cocoGt.loadImgs(p.imgIds) + self.size_mapping = {} + for im in imns: + self.size_mapping[im["id"]] = [im["height"], im["width"]] + + # if iouType == 'uv', add point gt annotations + if p.iouType == "densepose": + self._loadGEval() + + # convert ground truth to mask if iouType == 'segm' + if p.iouType == "segm": + _toMask(gts, self.cocoGt) + _toMask(dts, self.cocoDt) + + # set ignore flag + for gt in gts: + gt["ignore"] = gt["ignore"] if "ignore" in gt else 0 + gt["ignore"] = "iscrowd" in gt and gt["iscrowd"] + if p.iouType == "keypoints": + gt["ignore"] = (gt["num_keypoints"] == 0) or gt["ignore"] + if p.iouType == "densepose": + gt["ignore"] = ("dp_x" in gt) == 0 + + self._gts = defaultdict(list) # gt for evaluation + self._dts = defaultdict(list) # dt for evaluation + self._igrgns = defaultdict(list) + + for gt in gts: + iid = gt["image_id"] + if iid not in self._igrgns.keys(): + self._igrgns[iid] = _getIgnoreRegion(iid, self.cocoGt) + if _checkIgnore(gt, self._igrgns[iid]): + self._gts[iid, gt["category_id"]].append(gt) + for dt in dts: + iid = dt["image_id"] + if (iid not in self._igrgns) or _checkIgnore(dt, self._igrgns[iid]): + self._dts[iid, dt["category_id"]].append(dt) + + self.evalImgs = defaultdict(list) # per-image per-category evaluation results + self.eval = {} # accumulated evaluation results + + def evaluate(self): + """ + Run per image evaluation on given images and store results (a list of dict) in self.evalImgs + :return: None + """ + tic = time.time() + logger.info("Running per image DensePose evaluation... {}".format(self.params.iouType)) + p = self.params + # add backward compatibility if useSegm is specified in params + if p.useSegm is not None: + p.iouType = "segm" if p.useSegm == 1 else "bbox" + logger.info("useSegm (deprecated) is not None. Running DensePose evaluation") + p.imgIds = list(np.unique(p.imgIds)) + if p.useCats: + p.catIds = list(np.unique(p.catIds)) + p.maxDets = sorted(p.maxDets) + self.params = p + + self._prepare() + # loop through images, area range, max detection number + catIds = p.catIds if p.useCats else [-1] + + if p.iouType in ["segm", "bbox"]: + computeIoU = self.computeIoU + elif p.iouType == "keypoints": + computeIoU = self.computeOks + elif p.iouType == "densepose": + computeIoU = self.computeOgps + if self._dpEvalMode == DensePoseEvalMode.GPSM: + self.real_ious = { + (imgId, catId): self.computeDPIoU(imgId, catId) + for imgId in p.imgIds + for catId in catIds + } + + self.ious = { + (imgId, catId): computeIoU(imgId, catId) for imgId in p.imgIds for catId in catIds + } + + evaluateImg = self.evaluateImg + maxDet = p.maxDets[-1] + self.evalImgs = [ + evaluateImg(imgId, catId, areaRng, maxDet) + for catId in catIds + for areaRng in p.areaRng + for imgId in p.imgIds + ] + self._paramsEval = copy.deepcopy(self.params) + toc = time.time() + logger.info("DensePose evaluation DONE (t={:0.2f}s).".format(toc - tic)) + + def getDensePoseMask(self, polys): + maskGen = np.zeros([256, 256]) + for i in range(1, 15): + if polys[i - 1]: + currentMask = maskUtils.decode(polys[i - 1]) + maskGen[currentMask > 0] = i + return maskGen + + def _generate_rlemask_on_image(self, mask, imgId, data): + bbox_xywh = np.array(data["bbox"]) + x, y, w, h = bbox_xywh + im_h, im_w = self.size_mapping[imgId] + im_mask = np.zeros((im_h, im_w), dtype=np.uint8) + if mask is not None: + x0 = max(int(x), 0) + x1 = min(int(x + w), im_w, int(x) + mask.shape[1]) + y0 = max(int(y), 0) + y1 = min(int(y + h), im_h, int(y) + mask.shape[0]) + y = int(y) + x = int(x) + im_mask[y0:y1, x0:x1] = mask[y0 - y : y1 - y, x0 - x : x1 - x] + im_mask = np.require(np.asarray(im_mask > 0), dtype=np.uint8, requirements=["F"]) + rle_mask = maskUtils.encode(np.array(im_mask[:, :, np.newaxis], order="F"))[0] + return rle_mask + + def computeDPIoU(self, imgId, catId): + p = self.params + if p.useCats: + gt = self._gts[imgId, catId] + dt = self._dts[imgId, catId] + else: + gt = [_ for cId in p.catIds for _ in self._gts[imgId, cId]] + dt = [_ for cId in p.catIds for _ in self._dts[imgId, cId]] + if len(gt) == 0 and len(dt) == 0: + return [] + inds = np.argsort([-d["score"] for d in dt], kind="mergesort") + dt = [dt[i] for i in inds] + if len(dt) > p.maxDets[-1]: + dt = dt[0 : p.maxDets[-1]] + + gtmasks = [] + for g in gt: + if DensePoseDataRelative.S_KEY in g: + mask = self.getDensePoseMask(g[DensePoseDataRelative.S_KEY]) + _, _, w, h = g["bbox"] + scale_x = float(max(w, 1)) / mask.shape[1] + scale_y = float(max(h, 1)) / mask.shape[0] + mask = spzoom(mask, (scale_y, scale_x), order=1, prefilter=False) + mask = np.array(mask > 0.5, dtype=np.uint8) + rle_mask = self._generate_rlemask_on_image(mask, imgId, g) + elif "segmentation" in g: + segmentation = g["segmentation"] + if isinstance(segmentation, list) and segmentation: + # polygons + im_h, im_w = self.size_mapping[imgId] + rles = maskUtils.frPyObjects(segmentation, im_h, im_w) + rle_mask = maskUtils.merge(rles) + elif isinstance(segmentation, dict): + if isinstance(segmentation["counts"], list): + # uncompressed RLE + im_h, im_w = self.size_mapping[imgId] + rle_mask = maskUtils.frPyObjects(segmentation, im_h, im_w) + else: + # compressed RLE + rle_mask = segmentation + else: + rle_mask = self._generate_rlemask_on_image(None, imgId, g) + else: + rle_mask = self._generate_rlemask_on_image(None, imgId, g) + gtmasks.append(rle_mask) + + dtmasks = [] + for d in dt: + mask = self._extract_mask(d) + mask = np.require(np.asarray(mask > 0), dtype=np.uint8, requirements=["F"]) + rle_mask = self._generate_rlemask_on_image(mask, imgId, d) + dtmasks.append(rle_mask) + + # compute iou between each dt and gt region + iscrowd = [int(o["iscrowd"]) for o in gt] + iousDP = maskUtils.iou(dtmasks, gtmasks, iscrowd) + return iousDP + + def computeIoU(self, imgId, catId): + p = self.params + if p.useCats: + gt = self._gts[imgId, catId] + dt = self._dts[imgId, catId] + else: + gt = [_ for cId in p.catIds for _ in self._gts[imgId, cId]] + dt = [_ for cId in p.catIds for _ in self._dts[imgId, cId]] + if len(gt) == 0 and len(dt) == 0: + return [] + inds = np.argsort([-d["score"] for d in dt], kind="mergesort") + dt = [dt[i] for i in inds] + if len(dt) > p.maxDets[-1]: + dt = dt[0 : p.maxDets[-1]] + + if p.iouType == "segm": + g = [g["segmentation"] for g in gt] + d = [d["segmentation"] for d in dt] + elif p.iouType == "bbox": + g = [g["bbox"] for g in gt] + d = [d["bbox"] for d in dt] + else: + raise Exception("unknown iouType for iou computation") + + # compute iou between each dt and gt region + iscrowd = [int(o["iscrowd"]) for o in gt] + ious = maskUtils.iou(d, g, iscrowd) + return ious + + def computeOks(self, imgId, catId): + p = self.params + # dimension here should be Nxm + gts = self._gts[imgId, catId] + dts = self._dts[imgId, catId] + inds = np.argsort([-d["score"] for d in dts], kind="mergesort") + dts = [dts[i] for i in inds] + if len(dts) > p.maxDets[-1]: + dts = dts[0 : p.maxDets[-1]] + # if len(gts) == 0 and len(dts) == 0: + if len(gts) == 0 or len(dts) == 0: + return [] + ious = np.zeros((len(dts), len(gts))) + sigmas = ( + np.array( + [ + 0.26, + 0.25, + 0.25, + 0.35, + 0.35, + 0.79, + 0.79, + 0.72, + 0.72, + 0.62, + 0.62, + 1.07, + 1.07, + 0.87, + 0.87, + 0.89, + 0.89, + ] + ) + / 10.0 + ) + vars = (sigmas * 2) ** 2 + k = len(sigmas) + # compute oks between each detection and ground truth object + for j, gt in enumerate(gts): + # create bounds for ignore regions(double the gt bbox) + g = np.array(gt["keypoints"]) + xg = g[0::3] + yg = g[1::3] + vg = g[2::3] + k1 = np.count_nonzero(vg > 0) + bb = gt["bbox"] + x0 = bb[0] - bb[2] + x1 = bb[0] + bb[2] * 2 + y0 = bb[1] - bb[3] + y1 = bb[1] + bb[3] * 2 + for i, dt in enumerate(dts): + d = np.array(dt["keypoints"]) + xd = d[0::3] + yd = d[1::3] + if k1 > 0: + # measure the per-keypoint distance if keypoints visible + dx = xd - xg + dy = yd - yg + else: + # measure minimum distance to keypoints in (x0,y0) & (x1,y1) + z = np.zeros(k) + dx = np.max((z, x0 - xd), axis=0) + np.max((z, xd - x1), axis=0) + dy = np.max((z, y0 - yd), axis=0) + np.max((z, yd - y1), axis=0) + e = (dx ** 2 + dy ** 2) / vars / (gt["area"] + np.spacing(1)) / 2 + if k1 > 0: + e = e[vg > 0] + ious[i, j] = np.sum(np.exp(-e)) / e.shape[0] + return ious + + def _extract_mask(self, dt: Dict[str, Any]) -> np.ndarray: + (densepose_shape, densepose_data_encoded), densepose_bbox_xywh = dt["densepose"] + densepose_data = DensePoseResult.decode_png_data(densepose_shape, densepose_data_encoded) + return densepose_data[0] + + def _extract_iuv( + self, densepose_data: np.ndarray, py: np.ndarray, px: np.ndarray, gt: Dict[str, Any] + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """ + Extract arrays of I, U and V values at given points as numpy arrays + given the data mode stored in self._dpDataMode + """ + if self._dpDataMode == DensePoseDataMode.IUV_DT: + # estimated labels and UV (default) + ipoints = densepose_data[0, py, px] + upoints = densepose_data[1, py, px] / 255.0 # convert from uint8 by /255. + vpoints = densepose_data[2, py, px] / 255.0 + elif self._dpDataMode == DensePoseDataMode.IUV_GT: + # ground truth + ipoints = np.array(gt["dp_I"]) + upoints = np.array(gt["dp_U"]) + vpoints = np.array(gt["dp_V"]) + elif self._dpDataMode == DensePoseDataMode.I_GT_UV_0: + # ground truth labels, UV = 0 + ipoints = np.array(gt["dp_I"]) + upoints = upoints * 0.0 + vpoints = vpoints * 0.0 + elif self._dpDataMode == DensePoseDataMode.I_GT_UV_DT: + # ground truth labels, estimated UV + ipoints = np.array(gt["dp_I"]) + upoints = densepose_data[1, py, px] / 255.0 # convert from uint8 by /255. + vpoints = densepose_data[2, py, px] / 255.0 + elif self._dpDataMode == DensePoseDataMode.I_DT_UV_0: + # estimated labels, UV = 0 + ipoints = densepose_data[0, py, px] + upoints = upoints * 0.0 + vpoints = vpoints * 0.0 + else: + raise ValueError(f"Unknown data mode: {self._dpDataMode}") + return ipoints, upoints, vpoints + + def computeOgps(self, imgId, catId): + p = self.params + # dimension here should be Nxm + g = self._gts[imgId, catId] + d = self._dts[imgId, catId] + inds = np.argsort([-d_["score"] for d_ in d], kind="mergesort") + d = [d[i] for i in inds] + if len(d) > p.maxDets[-1]: + d = d[0 : p.maxDets[-1]] + # if len(gts) == 0 and len(dts) == 0: + if len(g) == 0 or len(d) == 0: + return [] + ious = np.zeros((len(d), len(g))) + # compute opgs between each detection and ground truth object + # sigma = self.sigma #0.255 # dist = 0.3m corresponds to ogps = 0.5 + # 1 # dist = 0.3m corresponds to ogps = 0.96 + # 1.45 # dist = 1.7m (person height) corresponds to ogps = 0.5) + for j, gt in enumerate(g): + if not gt["ignore"]: + g_ = gt["bbox"] + for i, dt in enumerate(d): + # + dy = int(dt["bbox"][3]) + dx = int(dt["bbox"][2]) + dp_x = np.array(gt["dp_x"]) * g_[2] / 255.0 + dp_y = np.array(gt["dp_y"]) * g_[3] / 255.0 + py = (dp_y + g_[1] - dt["bbox"][1]).astype(np.int) + px = (dp_x + g_[0] - dt["bbox"][0]).astype(np.int) + # + pts = np.zeros(len(px)) + pts[px >= dx] = -1 + pts[py >= dy] = -1 + pts[px < 0] = -1 + pts[py < 0] = -1 + if len(pts) < 1: + ogps = 0.0 + elif np.max(pts) == -1: + ogps = 0.0 + else: + px[pts == -1] = 0 + py[pts == -1] = 0 + (densepose_shape, densepose_data_encoded), densepose_bbox_xywh = dt[ + "densepose" + ] + densepose_data = DensePoseResult.decode_png_data( + densepose_shape, densepose_data_encoded + ) + assert densepose_data.shape[2] == dx, ( + "DensePoseData width {} should be equal to " + "detection bounding box width {}".format(densepose_data.shape[2], dx) + ) + assert densepose_data.shape[1] == dy, ( + "DensePoseData height {} should be equal to " + "detection bounding box height {}".format(densepose_data.shape[1], dy) + ) + ipoints, upoints, vpoints = self._extract_iuv(densepose_data, py, px, gt) + ipoints[pts == -1] = 0 + # Find closest vertices in subsampled mesh. + cVerts, cVertsGT = self.findAllClosestVerts(gt, upoints, vpoints, ipoints) + # Get pairwise geodesic distances between gt and estimated mesh points. + dist = self.getDistances(cVertsGT, cVerts) + # Compute the Ogps measure. + # Find the mean geodesic normalization distance for + # each GT point, based on which part it is on. + Current_Mean_Distances = self.Mean_Distances[ + self.CoarseParts[self.Part_ids[cVertsGT[cVertsGT > 0].astype(int) - 1]] + ] + # Compute gps + ogps_values = np.exp(-(dist ** 2) / (2 * (Current_Mean_Distances ** 2))) + # + if len(dist) > 0: + ogps = np.sum(ogps_values) / len(dist) + ious[i, j] = ogps + + gbb = [gt["bbox"] for gt in g] + dbb = [dt["bbox"] for dt in d] + + # compute iou between each dt and gt region + iscrowd = [int(o["iscrowd"]) for o in g] + ious_bb = maskUtils.iou(dbb, gbb, iscrowd) + return ious, ious_bb + + def evaluateImg(self, imgId, catId, aRng, maxDet): + """ + perform evaluation for single category and image + :return: dict (single image results) + """ + + p = self.params + if p.useCats: + gt = self._gts[imgId, catId] + dt = self._dts[imgId, catId] + else: + gt = [_ for cId in p.catIds for _ in self._gts[imgId, cId]] + dt = [_ for cId in p.catIds for _ in self._dts[imgId, cId]] + if len(gt) == 0 and len(dt) == 0: + return None + + for g in gt: + # g['_ignore'] = g['ignore'] + if g["ignore"] or (g["area"] < aRng[0] or g["area"] > aRng[1]): + g["_ignore"] = True + else: + g["_ignore"] = False + + # sort dt highest score first, sort gt ignore last + gtind = np.argsort([g["_ignore"] for g in gt], kind="mergesort") + gt = [gt[i] for i in gtind] + dtind = np.argsort([-d["score"] for d in dt], kind="mergesort") + dt = [dt[i] for i in dtind[0:maxDet]] + iscrowd = [int(o["iscrowd"]) for o in gt] + # load computed ious + if p.iouType == "densepose": + # print('Checking the length', len(self.ious[imgId, catId])) + # if len(self.ious[imgId, catId]) == 0: + # print(self.ious[imgId, catId]) + ious = ( + self.ious[imgId, catId][0][:, gtind] + if len(self.ious[imgId, catId]) > 0 + else self.ious[imgId, catId] + ) + ioubs = ( + self.ious[imgId, catId][1][:, gtind] + if len(self.ious[imgId, catId]) > 0 + else self.ious[imgId, catId] + ) + if self._dpEvalMode == DensePoseEvalMode.GPSM: + iousM = ( + self.real_ious[imgId, catId][:, gtind] + if len(self.real_ious[imgId, catId]) > 0 + else self.real_ious[imgId, catId] + ) + else: + ious = ( + self.ious[imgId, catId][:, gtind] + if len(self.ious[imgId, catId]) > 0 + else self.ious[imgId, catId] + ) + + T = len(p.iouThrs) + G = len(gt) + D = len(dt) + gtm = np.zeros((T, G)) + dtm = np.zeros((T, D)) + gtIg = np.array([g["_ignore"] for g in gt]) + dtIg = np.zeros((T, D)) + if np.all(gtIg) and p.iouType == "densepose": + dtIg = np.logical_or(dtIg, True) + + if len(ious) > 0: # and not p.iouType == 'densepose': + for tind, t in enumerate(p.iouThrs): + for dind, d in enumerate(dt): + # information about best match so far (m=-1 -> unmatched) + iou = min([t, 1 - 1e-10]) + m = -1 + for gind, _g in enumerate(gt): + # if this gt already matched, and not a crowd, continue + if gtm[tind, gind] > 0 and not iscrowd[gind]: + continue + # if dt matched to reg gt, and on ignore gt, stop + if m > -1 and gtIg[m] == 0 and gtIg[gind] == 1: + break + if p.iouType == "densepose": + if self._dpEvalMode == DensePoseEvalMode.GPSM: + new_iou = np.sqrt(iousM[dind, gind] * ious[dind, gind]) + elif self._dpEvalMode == DensePoseEvalMode.IOU: + new_iou = iousM[dind, gind] + elif self._dpEvalMode == DensePoseEvalMode.GPS: + new_iou = ious[dind, gind] + else: + new_iou = ious[dind, gind] + if new_iou < iou: + continue + if new_iou == 0.0: + continue + # if match successful and best so far, store appropriately + iou = new_iou + m = gind + # if match made store id of match for both dt and gt + if m == -1: + continue + dtIg[tind, dind] = gtIg[m] + dtm[tind, dind] = gt[m]["id"] + gtm[tind, m] = d["id"] + + if p.iouType == "densepose": + if not len(ioubs) == 0: + for dind, d in enumerate(dt): + # information about best match so far (m=-1 -> unmatched) + if dtm[tind, dind] == 0: + ioub = 0.8 + m = -1 + for gind, _g in enumerate(gt): + # if this gt already matched, and not a crowd, continue + if gtm[tind, gind] > 0 and not iscrowd[gind]: + continue + # continue to next gt unless better match made + if ioubs[dind, gind] < ioub: + continue + # if match successful and best so far, store appropriately + ioub = ioubs[dind, gind] + m = gind + # if match made store id of match for both dt and gt + if m > -1: + dtIg[:, dind] = gtIg[m] + if gtIg[m]: + dtm[tind, dind] = gt[m]["id"] + gtm[tind, m] = d["id"] + # set unmatched detections outside of area range to ignore + a = np.array([d["area"] < aRng[0] or d["area"] > aRng[1] for d in dt]).reshape((1, len(dt))) + dtIg = np.logical_or(dtIg, np.logical_and(dtm == 0, np.repeat(a, T, 0))) + # store results for given image and category + # print('Done with the function', len(self.ious[imgId, catId])) + return { + "image_id": imgId, + "category_id": catId, + "aRng": aRng, + "maxDet": maxDet, + "dtIds": [d["id"] for d in dt], + "gtIds": [g["id"] for g in gt], + "dtMatches": dtm, + "gtMatches": gtm, + "dtScores": [d["score"] for d in dt], + "gtIgnore": gtIg, + "dtIgnore": dtIg, + } + + def accumulate(self, p=None): + """ + Accumulate per image evaluation results and store the result in self.eval + :param p: input params for evaluation + :return: None + """ + logger.info("Accumulating evaluation results...") + tic = time.time() + if not self.evalImgs: + logger.info("Please run evaluate() first") + # allows input customized parameters + if p is None: + p = self.params + p.catIds = p.catIds if p.useCats == 1 else [-1] + T = len(p.iouThrs) + R = len(p.recThrs) + K = len(p.catIds) if p.useCats else 1 + A = len(p.areaRng) + M = len(p.maxDets) + precision = -(np.ones((T, R, K, A, M))) # -1 for the precision of absent categories + recall = -(np.ones((T, K, A, M))) + + # create dictionary for future indexing + logger.info("Categories: {}".format(p.catIds)) + _pe = self._paramsEval + catIds = _pe.catIds if _pe.useCats else [-1] + setK = set(catIds) + setA = set(map(tuple, _pe.areaRng)) + setM = set(_pe.maxDets) + setI = set(_pe.imgIds) + # get inds to evaluate + k_list = [n for n, k in enumerate(p.catIds) if k in setK] + m_list = [m for n, m in enumerate(p.maxDets) if m in setM] + a_list = [n for n, a in enumerate(map(lambda x: tuple(x), p.areaRng)) if a in setA] + i_list = [n for n, i in enumerate(p.imgIds) if i in setI] + I0 = len(_pe.imgIds) + A0 = len(_pe.areaRng) + # retrieve E at each category, area range, and max number of detections + for k, k0 in enumerate(k_list): + Nk = k0 * A0 * I0 + for a, a0 in enumerate(a_list): + Na = a0 * I0 + for m, maxDet in enumerate(m_list): + E = [self.evalImgs[Nk + Na + i] for i in i_list] + E = [e for e in E if e is not None] + if len(E) == 0: + continue + dtScores = np.concatenate([e["dtScores"][0:maxDet] for e in E]) + + # different sorting method generates slightly different results. + # mergesort is used to be consistent as Matlab implementation. + inds = np.argsort(-dtScores, kind="mergesort") + + dtm = np.concatenate([e["dtMatches"][:, 0:maxDet] for e in E], axis=1)[:, inds] + dtIg = np.concatenate([e["dtIgnore"][:, 0:maxDet] for e in E], axis=1)[:, inds] + gtIg = np.concatenate([e["gtIgnore"] for e in E]) + npig = np.count_nonzero(gtIg == 0) + if npig == 0: + continue + tps = np.logical_and(dtm, np.logical_not(dtIg)) + fps = np.logical_and(np.logical_not(dtm), np.logical_not(dtIg)) + tp_sum = np.cumsum(tps, axis=1).astype(dtype=np.float) + fp_sum = np.cumsum(fps, axis=1).astype(dtype=np.float) + for t, (tp, fp) in enumerate(zip(tp_sum, fp_sum)): + tp = np.array(tp) + fp = np.array(fp) + nd = len(tp) + rc = tp / npig + pr = tp / (fp + tp + np.spacing(1)) + q = np.zeros((R,)) + + if nd: + recall[t, k, a, m] = rc[-1] + else: + recall[t, k, a, m] = 0 + + # numpy is slow without cython optimization for accessing elements + # use python array gets significant speed improvement + pr = pr.tolist() + q = q.tolist() + + for i in range(nd - 1, 0, -1): + if pr[i] > pr[i - 1]: + pr[i - 1] = pr[i] + + inds = np.searchsorted(rc, p.recThrs, side="left") + try: + for ri, pi in enumerate(inds): + q[ri] = pr[pi] + except Exception: + pass + precision[t, :, k, a, m] = np.array(q) + logger.info( + "Final: max precision {}, min precision {}".format(np.max(precision), np.min(precision)) + ) + self.eval = { + "params": p, + "counts": [T, R, K, A, M], + "date": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + "precision": precision, + "recall": recall, + } + toc = time.time() + logger.info("DONE (t={:0.2f}s).".format(toc - tic)) + + def summarize(self): + """ + Compute and display summary metrics for evaluation results. + Note this function can *only* be applied on the default parameter setting + """ + + def _summarize(ap=1, iouThr=None, areaRng="all", maxDets=100): + p = self.params + iStr = " {:<18} {} @[ {}={:<9} | area={:>6s} | maxDets={:>3d} ] = {:0.3f}" + titleStr = "Average Precision" if ap == 1 else "Average Recall" + typeStr = "(AP)" if ap == 1 else "(AR)" + measure = "IoU" + if self.params.iouType == "keypoints": + measure = "OKS" + elif self.params.iouType == "densepose": + measure = "OGPS" + iouStr = ( + "{:0.2f}:{:0.2f}".format(p.iouThrs[0], p.iouThrs[-1]) + if iouThr is None + else "{:0.2f}".format(iouThr) + ) + + aind = [i for i, aRng in enumerate(p.areaRngLbl) if aRng == areaRng] + mind = [i for i, mDet in enumerate(p.maxDets) if mDet == maxDets] + if ap == 1: + # dimension of precision: [TxRxKxAxM] + s = self.eval["precision"] + # IoU + if iouThr is not None: + t = np.where(np.abs(iouThr - p.iouThrs) < 0.001)[0] + s = s[t] + s = s[:, :, :, aind, mind] + else: + # dimension of recall: [TxKxAxM] + s = self.eval["recall"] + if iouThr is not None: + t = np.where(iouThr == p.iouThrs)[0] + s = s[t] + s = s[:, :, aind, mind] + if len(s[s > -1]) == 0: + mean_s = -1 + else: + mean_s = np.mean(s[s > -1]) + logger.info(iStr.format(titleStr, typeStr, measure, iouStr, areaRng, maxDets, mean_s)) + return mean_s + + def _summarizeDets(): + stats = np.zeros((12,)) + stats[0] = _summarize(1) + stats[1] = _summarize(1, iouThr=0.5, maxDets=self.params.maxDets[2]) + stats[2] = _summarize(1, iouThr=0.75, maxDets=self.params.maxDets[2]) + stats[3] = _summarize(1, areaRng="small", maxDets=self.params.maxDets[2]) + stats[4] = _summarize(1, areaRng="medium", maxDets=self.params.maxDets[2]) + stats[5] = _summarize(1, areaRng="large", maxDets=self.params.maxDets[2]) + stats[6] = _summarize(0, maxDets=self.params.maxDets[0]) + stats[7] = _summarize(0, maxDets=self.params.maxDets[1]) + stats[8] = _summarize(0, maxDets=self.params.maxDets[2]) + stats[9] = _summarize(0, areaRng="small", maxDets=self.params.maxDets[2]) + stats[10] = _summarize(0, areaRng="medium", maxDets=self.params.maxDets[2]) + stats[11] = _summarize(0, areaRng="large", maxDets=self.params.maxDets[2]) + return stats + + def _summarizeKps(): + stats = np.zeros((10,)) + stats[0] = _summarize(1, maxDets=20) + stats[1] = _summarize(1, maxDets=20, iouThr=0.5) + stats[2] = _summarize(1, maxDets=20, iouThr=0.75) + stats[3] = _summarize(1, maxDets=20, areaRng="medium") + stats[4] = _summarize(1, maxDets=20, areaRng="large") + stats[5] = _summarize(0, maxDets=20) + stats[6] = _summarize(0, maxDets=20, iouThr=0.5) + stats[7] = _summarize(0, maxDets=20, iouThr=0.75) + stats[8] = _summarize(0, maxDets=20, areaRng="medium") + stats[9] = _summarize(0, maxDets=20, areaRng="large") + return stats + + def _summarizeUvs(): + stats = np.zeros((10,)) + stats[0] = _summarize(1, maxDets=self.params.maxDets[0]) + stats[1] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.5) + stats[2] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.75) + stats[3] = _summarize(1, maxDets=self.params.maxDets[0], areaRng="medium") + stats[4] = _summarize(1, maxDets=self.params.maxDets[0], areaRng="large") + stats[5] = _summarize(0, maxDets=self.params.maxDets[0]) + stats[6] = _summarize(0, maxDets=self.params.maxDets[0], iouThr=0.5) + stats[7] = _summarize(0, maxDets=self.params.maxDets[0], iouThr=0.75) + stats[8] = _summarize(0, maxDets=self.params.maxDets[0], areaRng="medium") + stats[9] = _summarize(0, maxDets=self.params.maxDets[0], areaRng="large") + return stats + + def _summarizeUvsOld(): + stats = np.zeros((18,)) + stats[0] = _summarize(1, maxDets=self.params.maxDets[0]) + stats[1] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.5) + stats[2] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.55) + stats[3] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.60) + stats[4] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.65) + stats[5] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.70) + stats[6] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.75) + stats[7] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.80) + stats[8] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.85) + stats[9] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.90) + stats[10] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.95) + stats[11] = _summarize(1, maxDets=self.params.maxDets[0], areaRng="medium") + stats[12] = _summarize(1, maxDets=self.params.maxDets[0], areaRng="large") + stats[13] = _summarize(0, maxDets=self.params.maxDets[0]) + stats[14] = _summarize(0, maxDets=self.params.maxDets[0], iouThr=0.5) + stats[15] = _summarize(0, maxDets=self.params.maxDets[0], iouThr=0.75) + stats[16] = _summarize(0, maxDets=self.params.maxDets[0], areaRng="medium") + stats[17] = _summarize(0, maxDets=self.params.maxDets[0], areaRng="large") + return stats + + if not self.eval: + raise Exception("Please run accumulate() first") + iouType = self.params.iouType + if iouType in ["segm", "bbox"]: + summarize = _summarizeDets + elif iouType in ["keypoints"]: + summarize = _summarizeKps + elif iouType in ["densepose"]: + summarize = _summarizeUvs + self.stats = summarize() + + def __str__(self): + self.summarize() + + # ================ functions for dense pose ============================== + def findAllClosestVerts(self, gt, U_points, V_points, Index_points): + # + I_gt = np.array(gt["dp_I"]) + U_gt = np.array(gt["dp_U"]) + V_gt = np.array(gt["dp_V"]) + # + # print(I_gt) + # + ClosestVerts = np.ones(Index_points.shape) * -1 + for i in np.arange(24): + # + if sum(Index_points == (i + 1)) > 0: + UVs = np.array( + [U_points[Index_points == (i + 1)], V_points[Index_points == (i + 1)]] + ) + Current_Part_UVs = self.Part_UVs[i] + Current_Part_ClosestVertInds = self.Part_ClosestVertInds[i] + D = ssd.cdist(Current_Part_UVs.transpose(), UVs.transpose()).squeeze() + ClosestVerts[Index_points == (i + 1)] = Current_Part_ClosestVertInds[ + np.argmin(D, axis=0) + ] + # + ClosestVertsGT = np.ones(Index_points.shape) * -1 + for i in np.arange(24): + if sum(I_gt == (i + 1)) > 0: + UVs = np.array([U_gt[I_gt == (i + 1)], V_gt[I_gt == (i + 1)]]) + Current_Part_UVs = self.Part_UVs[i] + Current_Part_ClosestVertInds = self.Part_ClosestVertInds[i] + D = ssd.cdist(Current_Part_UVs.transpose(), UVs.transpose()).squeeze() + ClosestVertsGT[I_gt == (i + 1)] = Current_Part_ClosestVertInds[np.argmin(D, axis=0)] + # + return ClosestVerts, ClosestVertsGT + + def getDistances(self, cVertsGT, cVerts): + + ClosestVertsTransformed = self.PDIST_transform[cVerts.astype(int) - 1] + ClosestVertsGTTransformed = self.PDIST_transform[cVertsGT.astype(int) - 1] + # + ClosestVertsTransformed[cVerts < 0] = 0 + ClosestVertsGTTransformed[cVertsGT < 0] = 0 + # + cVertsGT = ClosestVertsGTTransformed + cVerts = ClosestVertsTransformed + # + n = 27554 + dists = [] + for d in range(len(cVertsGT)): + if cVertsGT[d] > 0: + if cVerts[d] > 0: + i = cVertsGT[d] - 1 + j = cVerts[d] - 1 + if j == i: + dists.append(0) + elif j > i: + ccc = i + i = j + j = ccc + i = n - i - 1 + j = n - j - 1 + k = (n * (n - 1) / 2) - (n - i) * ((n - i) - 1) / 2 + j - i - 1 + k = (n * n - n) / 2 - k - 1 + dists.append(self.Pdist_matrix[int(k)][0]) + else: + i = n - i - 1 + j = n - j - 1 + k = (n * (n - 1) / 2) - (n - i) * ((n - i) - 1) / 2 + j - i - 1 + k = (n * n - n) / 2 - k - 1 + dists.append(self.Pdist_matrix[int(k)][0]) + else: + dists.append(np.inf) + return np.atleast_1d(np.array(dists).squeeze()) + + +class Params: + """ + Params for coco evaluation api + """ + + def setDetParams(self): + self.imgIds = [] + self.catIds = [] + # np.arange causes trouble. the data point on arange is slightly larger than the true value + self.iouThrs = np.linspace(0.5, 0.95, np.round((0.95 - 0.5) / 0.05) + 1, endpoint=True) + self.recThrs = np.linspace(0.0, 1.00, np.round((1.00 - 0.0) / 0.01) + 1, endpoint=True) + self.maxDets = [1, 10, 100] + self.areaRng = [ + [0 ** 2, 1e5 ** 2], + [0 ** 2, 32 ** 2], + [32 ** 2, 96 ** 2], + [96 ** 2, 1e5 ** 2], + ] + self.areaRngLbl = ["all", "small", "medium", "large"] + self.useCats = 1 + + def setKpParams(self): + self.imgIds = [] + self.catIds = [] + # np.arange causes trouble. the data point on arange is slightly larger than the true value + self.iouThrs = np.linspace(0.5, 0.95, np.round((0.95 - 0.5) / 0.05) + 1, endpoint=True) + self.recThrs = np.linspace(0.0, 1.00, np.round((1.00 - 0.0) / 0.01) + 1, endpoint=True) + self.maxDets = [20] + self.areaRng = [[0 ** 2, 1e5 ** 2], [32 ** 2, 96 ** 2], [96 ** 2, 1e5 ** 2]] + self.areaRngLbl = ["all", "medium", "large"] + self.useCats = 1 + + def setUvParams(self): + self.imgIds = [] + self.catIds = [] + self.iouThrs = np.linspace(0.5, 0.95, int(np.round((0.95 - 0.5) / 0.05)) + 1, endpoint=True) + self.recThrs = np.linspace(0.0, 1.00, int(np.round((1.00 - 0.0) / 0.01)) + 1, endpoint=True) + self.maxDets = [20] + self.areaRng = [[0 ** 2, 1e5 ** 2], [32 ** 2, 96 ** 2], [96 ** 2, 1e5 ** 2]] + self.areaRngLbl = ["all", "medium", "large"] + self.useCats = 1 + + def __init__(self, iouType="segm"): + if iouType == "segm" or iouType == "bbox": + self.setDetParams() + elif iouType == "keypoints": + self.setKpParams() + elif iouType == "densepose": + self.setUvParams() + else: + raise Exception("iouType not supported") + self.iouType = iouType + # useSegm is deprecated + self.useSegm = None diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/densepose_head.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/densepose_head.py new file mode 100644 index 0000000000000000000000000000000000000000..363970681db36a41d5bc5b1960960a2a8bf23855 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/densepose_head.py @@ -0,0 +1,1216 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import math +from dataclasses import dataclass +from enum import Enum +import fvcore.nn.weight_init as weight_init +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.layers import Conv2d, ConvTranspose2d, interpolate +from detectron2.structures.boxes import matched_boxlist_iou +from detectron2.utils.registry import Registry + +from .data.structures import DensePoseOutput + +ROI_DENSEPOSE_HEAD_REGISTRY = Registry("ROI_DENSEPOSE_HEAD") + + +class DensePoseUVConfidenceType(Enum): + """ + Statistical model type for confidence learning, possible values: + - "iid_iso": statistically independent identically distributed residuals + with anisotropic covariance + - "indep_aniso": statistically independent residuals with anisotropic + covariances + For details, see: + N. Neverova, D. Novotny, A. Vedaldi "Correlated Uncertainty for Learning + Dense Correspondences from Noisy Labels", p. 918--926, in Proc. NIPS 2019 + """ + + # fmt: off + IID_ISO = "iid_iso" + INDEP_ANISO = "indep_aniso" + # fmt: on + + +@dataclass +class DensePoseUVConfidenceConfig: + """ + Configuration options for confidence on UV data + """ + + enabled: bool = False + # lower bound on UV confidences + epsilon: float = 0.01 + type: DensePoseUVConfidenceType = DensePoseUVConfidenceType.IID_ISO + + +@dataclass +class DensePoseConfidenceModelConfig: + """ + Configuration options for confidence models + """ + + # confidence for U and V values + uv_confidence: DensePoseUVConfidenceConfig + + @staticmethod + def from_cfg(cfg: CfgNode) -> "DensePoseConfidenceModelConfig": + return DensePoseConfidenceModelConfig( + uv_confidence=DensePoseUVConfidenceConfig( + enabled=cfg.MODEL.ROI_DENSEPOSE_HEAD.UV_CONFIDENCE.ENABLED, + epsilon=cfg.MODEL.ROI_DENSEPOSE_HEAD.UV_CONFIDENCE.EPSILON, + type=DensePoseUVConfidenceType(cfg.MODEL.ROI_DENSEPOSE_HEAD.UV_CONFIDENCE.TYPE), + ) + ) + + +def initialize_module_params(module): + for name, param in module.named_parameters(): + if "bias" in name: + nn.init.constant_(param, 0) + elif "weight" in name: + nn.init.kaiming_normal_(param, mode="fan_out", nonlinearity="relu") + + +@ROI_DENSEPOSE_HEAD_REGISTRY.register() +class DensePoseDeepLabHead(nn.Module): + def __init__(self, cfg, input_channels): + super(DensePoseDeepLabHead, self).__init__() + # fmt: off + hidden_dim = cfg.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_DIM + kernel_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_KERNEL + norm = cfg.MODEL.ROI_DENSEPOSE_HEAD.DEEPLAB.NORM + self.n_stacked_convs = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_STACKED_CONVS + self.use_nonlocal = cfg.MODEL.ROI_DENSEPOSE_HEAD.DEEPLAB.NONLOCAL_ON + # fmt: on + pad_size = kernel_size // 2 + n_channels = input_channels + + self.ASPP = ASPP(input_channels, [6, 12, 56], n_channels) # 6, 12, 56 + self.add_module("ASPP", self.ASPP) + + if self.use_nonlocal: + self.NLBlock = NONLocalBlock2D(input_channels, bn_layer=True) + self.add_module("NLBlock", self.NLBlock) + # weight_init.c2_msra_fill(self.ASPP) + + for i in range(self.n_stacked_convs): + norm_module = nn.GroupNorm(32, hidden_dim) if norm == "GN" else None + layer = Conv2d( + n_channels, + hidden_dim, + kernel_size, + stride=1, + padding=pad_size, + bias=not norm, + norm=norm_module, + ) + weight_init.c2_msra_fill(layer) + n_channels = hidden_dim + layer_name = self._get_layer_name(i) + self.add_module(layer_name, layer) + self.n_out_channels = hidden_dim + # initialize_module_params(self) + + def forward(self, features): + x0 = features + x = self.ASPP(x0) + if self.use_nonlocal: + x = self.NLBlock(x) + output = x + for i in range(self.n_stacked_convs): + layer_name = self._get_layer_name(i) + x = getattr(self, layer_name)(x) + x = F.relu(x) + output = x + return output + + def _get_layer_name(self, i): + layer_name = "body_conv_fcn{}".format(i + 1) + return layer_name + + +# Copied from +# https://github.com/pytorch/vision/blob/master/torchvision/models/segmentation/deeplabv3.py +# See https://arxiv.org/pdf/1706.05587.pdf for details +class ASPPConv(nn.Sequential): + def __init__(self, in_channels, out_channels, dilation): + modules = [ + nn.Conv2d( + in_channels, out_channels, 3, padding=dilation, dilation=dilation, bias=False + ), + nn.GroupNorm(32, out_channels), + nn.ReLU(), + ] + super(ASPPConv, self).__init__(*modules) + + +class ASPPPooling(nn.Sequential): + def __init__(self, in_channels, out_channels): + super(ASPPPooling, self).__init__( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(in_channels, out_channels, 1, bias=False), + nn.GroupNorm(32, out_channels), + nn.ReLU(), + ) + + def forward(self, x): + size = x.shape[-2:] + x = super(ASPPPooling, self).forward(x) + return F.interpolate(x, size=size, mode="bilinear", align_corners=False) + + +class ASPP(nn.Module): + def __init__(self, in_channels, atrous_rates, out_channels): + super(ASPP, self).__init__() + modules = [] + modules.append( + nn.Sequential( + nn.Conv2d(in_channels, out_channels, 1, bias=False), + nn.GroupNorm(32, out_channels), + nn.ReLU(), + ) + ) + + rate1, rate2, rate3 = tuple(atrous_rates) + modules.append(ASPPConv(in_channels, out_channels, rate1)) + modules.append(ASPPConv(in_channels, out_channels, rate2)) + modules.append(ASPPConv(in_channels, out_channels, rate3)) + modules.append(ASPPPooling(in_channels, out_channels)) + + self.convs = nn.ModuleList(modules) + + self.project = nn.Sequential( + nn.Conv2d(5 * out_channels, out_channels, 1, bias=False), + # nn.BatchNorm2d(out_channels), + nn.ReLU() + # nn.Dropout(0.5) + ) + + def forward(self, x): + res = [] + for conv in self.convs: + res.append(conv(x)) + res = torch.cat(res, dim=1) + return self.project(res) + + +# copied from +# https://github.com/AlexHex7/Non-local_pytorch/blob/master/lib/non_local_embedded_gaussian.py +# See https://arxiv.org/abs/1711.07971 for details +class _NonLocalBlockND(nn.Module): + def __init__( + self, in_channels, inter_channels=None, dimension=3, sub_sample=True, bn_layer=True + ): + super(_NonLocalBlockND, self).__init__() + + assert dimension in [1, 2, 3] + + self.dimension = dimension + self.sub_sample = sub_sample + + self.in_channels = in_channels + self.inter_channels = inter_channels + + if self.inter_channels is None: + self.inter_channels = in_channels // 2 + if self.inter_channels == 0: + self.inter_channels = 1 + + if dimension == 3: + conv_nd = nn.Conv3d + max_pool_layer = nn.MaxPool3d(kernel_size=(1, 2, 2)) + bn = nn.GroupNorm # (32, hidden_dim) #nn.BatchNorm3d + elif dimension == 2: + conv_nd = nn.Conv2d + max_pool_layer = nn.MaxPool2d(kernel_size=(2, 2)) + bn = nn.GroupNorm # (32, hidden_dim)nn.BatchNorm2d + else: + conv_nd = nn.Conv1d + max_pool_layer = nn.MaxPool1d(kernel_size=2) + bn = nn.GroupNorm # (32, hidden_dim)nn.BatchNorm1d + + self.g = conv_nd( + in_channels=self.in_channels, + out_channels=self.inter_channels, + kernel_size=1, + stride=1, + padding=0, + ) + + if bn_layer: + self.W = nn.Sequential( + conv_nd( + in_channels=self.inter_channels, + out_channels=self.in_channels, + kernel_size=1, + stride=1, + padding=0, + ), + bn(32, self.in_channels), + ) + nn.init.constant_(self.W[1].weight, 0) + nn.init.constant_(self.W[1].bias, 0) + else: + self.W = conv_nd( + in_channels=self.inter_channels, + out_channels=self.in_channels, + kernel_size=1, + stride=1, + padding=0, + ) + nn.init.constant_(self.W.weight, 0) + nn.init.constant_(self.W.bias, 0) + + self.theta = conv_nd( + in_channels=self.in_channels, + out_channels=self.inter_channels, + kernel_size=1, + stride=1, + padding=0, + ) + self.phi = conv_nd( + in_channels=self.in_channels, + out_channels=self.inter_channels, + kernel_size=1, + stride=1, + padding=0, + ) + + if sub_sample: + self.g = nn.Sequential(self.g, max_pool_layer) + self.phi = nn.Sequential(self.phi, max_pool_layer) + + def forward(self, x): + """ + :param x: (b, c, t, h, w) + :return: + """ + + batch_size = x.size(0) + + g_x = self.g(x).view(batch_size, self.inter_channels, -1) + g_x = g_x.permute(0, 2, 1) + + theta_x = self.theta(x).view(batch_size, self.inter_channels, -1) + theta_x = theta_x.permute(0, 2, 1) + phi_x = self.phi(x).view(batch_size, self.inter_channels, -1) + f = torch.matmul(theta_x, phi_x) + f_div_C = F.softmax(f, dim=-1) + + y = torch.matmul(f_div_C, g_x) + y = y.permute(0, 2, 1).contiguous() + y = y.view(batch_size, self.inter_channels, *x.size()[2:]) + W_y = self.W(y) + z = W_y + x + + return z + + +class NONLocalBlock2D(_NonLocalBlockND): + def __init__(self, in_channels, inter_channels=None, sub_sample=True, bn_layer=True): + super(NONLocalBlock2D, self).__init__( + in_channels, + inter_channels=inter_channels, + dimension=2, + sub_sample=sub_sample, + bn_layer=bn_layer, + ) + + +@ROI_DENSEPOSE_HEAD_REGISTRY.register() +class DensePoseV1ConvXHead(nn.Module): + def __init__(self, cfg, input_channels): + super(DensePoseV1ConvXHead, self).__init__() + # fmt: off + hidden_dim = cfg.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_DIM + kernel_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_KERNEL + self.n_stacked_convs = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_STACKED_CONVS + # fmt: on + pad_size = kernel_size // 2 + n_channels = input_channels + for i in range(self.n_stacked_convs): + layer = Conv2d(n_channels, hidden_dim, kernel_size, stride=1, padding=pad_size) + layer_name = self._get_layer_name(i) + self.add_module(layer_name, layer) + n_channels = hidden_dim + self.n_out_channels = n_channels + initialize_module_params(self) + + def forward(self, features): + x = features + output = x + for i in range(self.n_stacked_convs): + layer_name = self._get_layer_name(i) + x = getattr(self, layer_name)(x) + x = F.relu(x) + output = x + return output + + def _get_layer_name(self, i): + layer_name = "body_conv_fcn{}".format(i + 1) + return layer_name + + +class DensePosePredictor(nn.Module): + def __init__(self, cfg, input_channels): + + super(DensePosePredictor, self).__init__() + dim_in = input_channels + n_segm_chan = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_COARSE_SEGM_CHANNELS + dim_out_patches = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_PATCHES + 1 + kernel_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECONV_KERNEL + self.ann_index_lowres = ConvTranspose2d( + dim_in, n_segm_chan, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + self.index_uv_lowres = ConvTranspose2d( + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + self.u_lowres = ConvTranspose2d( + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + self.v_lowres = ConvTranspose2d( + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + self.scale_factor = cfg.MODEL.ROI_DENSEPOSE_HEAD.UP_SCALE + self.confidence_model_cfg = DensePoseConfidenceModelConfig.from_cfg(cfg) + self._initialize_confidence_estimation_layers(cfg, self.confidence_model_cfg, dim_in) + initialize_module_params(self) + + def forward(self, head_outputs): + ann_index_lowres = self.ann_index_lowres(head_outputs) + index_uv_lowres = self.index_uv_lowres(head_outputs) + u_lowres = self.u_lowres(head_outputs) + v_lowres = self.v_lowres(head_outputs) + + def interp2d(input): + return interpolate( + input, scale_factor=self.scale_factor, mode="bilinear", align_corners=False + ) + + ann_index = interp2d(ann_index_lowres) + index_uv = interp2d(index_uv_lowres) + u = interp2d(u_lowres) + v = interp2d(v_lowres) + ( + (sigma_1, sigma_2, kappa_u, kappa_v), + (sigma_1_lowres, sigma_2_lowres, kappa_u_lowres, kappa_v_lowres), + (ann_index, index_uv), + ) = self._forward_confidence_estimation_layers( + self.confidence_model_cfg, head_outputs, interp2d, ann_index, index_uv + ) + return ( + (ann_index, index_uv, u, v), + (ann_index_lowres, index_uv_lowres, u_lowres, v_lowres), + (sigma_1, sigma_2, kappa_u, kappa_v), + (sigma_1_lowres, sigma_2_lowres, kappa_u_lowres, kappa_v_lowres), + ) + + def _initialize_confidence_estimation_layers( + self, cfg: CfgNode, confidence_model_cfg: DensePoseConfidenceModelConfig, dim_in: int + ): + dim_out_patches = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_PATCHES + 1 + kernel_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECONV_KERNEL + if confidence_model_cfg.uv_confidence.enabled: + if confidence_model_cfg.uv_confidence.type == DensePoseUVConfidenceType.IID_ISO: + self.sigma_2_lowres = ConvTranspose2d( + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + elif confidence_model_cfg.uv_confidence.type == DensePoseUVConfidenceType.INDEP_ANISO: + self.sigma_2_lowres = ConvTranspose2d( + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + self.kappa_u_lowres = ConvTranspose2d( + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + self.kappa_v_lowres = ConvTranspose2d( + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + else: + raise ValueError( + f"Unknown confidence model type: {confidence_model_cfg.confidence_model_type}" + ) + + def _forward_confidence_estimation_layers( + self, confidence_model_cfg, head_outputs, interp2d, ann_index, index_uv + ): + sigma_1, sigma_2, kappa_u, kappa_v = None, None, None, None + sigma_1_lowres, sigma_2_lowres, kappa_u_lowres, kappa_v_lowres = None, None, None, None + if confidence_model_cfg.uv_confidence.enabled: + if confidence_model_cfg.uv_confidence.type == DensePoseUVConfidenceType.IID_ISO: + sigma_2_lowres = self.sigma_2_lowres(head_outputs) + sigma_2 = interp2d(sigma_2_lowres) + elif confidence_model_cfg.uv_confidence.type == DensePoseUVConfidenceType.INDEP_ANISO: + sigma_2_lowres = self.sigma_2_lowres(head_outputs) + kappa_u_lowres = self.kappa_u_lowres(head_outputs) + kappa_v_lowres = self.kappa_v_lowres(head_outputs) + sigma_2 = interp2d(sigma_2_lowres) + kappa_u = interp2d(kappa_u_lowres) + kappa_v = interp2d(kappa_v_lowres) + else: + raise ValueError( + f"Unknown confidence model type: {confidence_model_cfg.confidence_model_type}" + ) + return ( + (sigma_1, sigma_2, kappa_u, kappa_v), + (sigma_1_lowres, sigma_2_lowres, kappa_u_lowres, kappa_v_lowres), + (ann_index, index_uv), + ) + + +class DensePoseDataFilter(object): + def __init__(self, cfg): + self.iou_threshold = cfg.MODEL.ROI_DENSEPOSE_HEAD.FG_IOU_THRESHOLD + + @torch.no_grad() + def __call__(self, proposals_with_targets): + """ + Filters proposals with targets to keep only the ones relevant for + DensePose training + proposals: list(Instances), each element of the list corresponds to + various instances (proposals, GT for boxes and densepose) for one + image + """ + proposals_filtered = [] + for proposals_per_image in proposals_with_targets: + if not hasattr(proposals_per_image, "gt_densepose"): + continue + assert hasattr(proposals_per_image, "gt_boxes") + assert hasattr(proposals_per_image, "proposal_boxes") + gt_boxes = proposals_per_image.gt_boxes + est_boxes = proposals_per_image.proposal_boxes + # apply match threshold for densepose head + iou = matched_boxlist_iou(gt_boxes, est_boxes) + iou_select = iou > self.iou_threshold + proposals_per_image = proposals_per_image[iou_select] + assert len(proposals_per_image.gt_boxes) == len(proposals_per_image.proposal_boxes) + # filter out any target without densepose annotation + gt_densepose = proposals_per_image.gt_densepose + assert len(proposals_per_image.gt_boxes) == len(proposals_per_image.gt_densepose) + selected_indices = [ + i for i, dp_target in enumerate(gt_densepose) if dp_target is not None + ] + if len(selected_indices) != len(gt_densepose): + proposals_per_image = proposals_per_image[selected_indices] + assert len(proposals_per_image.gt_boxes) == len(proposals_per_image.proposal_boxes) + assert len(proposals_per_image.gt_boxes) == len(proposals_per_image.gt_densepose) + proposals_filtered.append(proposals_per_image) + return proposals_filtered + + +def build_densepose_head(cfg, input_channels): + head_name = cfg.MODEL.ROI_DENSEPOSE_HEAD.NAME + return ROI_DENSEPOSE_HEAD_REGISTRY.get(head_name)(cfg, input_channels) + + +def build_densepose_predictor(cfg, input_channels): + predictor = DensePosePredictor(cfg, input_channels) + return predictor + + +def build_densepose_data_filter(cfg): + dp_filter = DensePoseDataFilter(cfg) + return dp_filter + + +def densepose_inference(densepose_outputs, densepose_confidences, detections): + """ + Infer dense pose estimate based on outputs from the DensePose head + and detections. The estimate for each detection instance is stored in its + "pred_densepose" attribute. + + Args: + densepose_outputs (tuple(`torch.Tensor`)): iterable containing 4 elements: + - s (:obj: `torch.Tensor`): coarse segmentation tensor of size (N, A, H, W), + - i (:obj: `torch.Tensor`): fine segmentation tensor of size (N, C, H, W), + - u (:obj: `torch.Tensor`): U coordinates for each class of size (N, C, H, W), + - v (:obj: `torch.Tensor`): V coordinates for each class of size (N, C, H, W), + where N is the total number of detections in a batch, + A is the number of coarse segmentations labels + (e.g. 15 for coarse body parts + background), + C is the number of fine segmentation labels + (e.g. 25 for fine body parts + background), + W is the resolution along the X axis + H is the resolution along the Y axis + densepose_confidences (tuple(`torch.Tensor`)): iterable containing 4 elements: + - sigma_1 (:obj: `torch.Tensor`): global confidences for UV coordinates + of size (N, C, H, W) + - sigma_2 (:obj: `torch.Tensor`): individual confidences for UV coordinates + of size (N, C, H, W) + - kappa_u (:obj: `torch.Tensor`): first component of confidence direction + vector of size (N, C, H, W) + - kappa_v (:obj: `torch.Tensor`): second component of confidence direction + vector of size (N, C, H, W) + detections (list[Instances]): A list of N Instances, where N is the number of images + in the batch. Instances are modified by this method: "pred_densepose" attribute + is added to each instance, the attribute contains the corresponding + DensePoseOutput object. + """ + # DensePose outputs: segmentation, body part indices, U, V + s, index_uv, u, v = densepose_outputs + sigma_1, sigma_2, kappa_u, kappa_v = densepose_confidences + k = 0 + for detection in detections: + n_i = len(detection) + s_i = s[k : k + n_i] + index_uv_i = index_uv[k : k + n_i] + u_i = u[k : k + n_i] + v_i = v[k : k + n_i] + _local_vars = locals() + confidences = { + name: _local_vars[name] + for name in ("sigma_1", "sigma_2", "kappa_u", "kappa_v") + if _local_vars.get(name) is not None + } + densepose_output_i = DensePoseOutput(s_i, index_uv_i, u_i, v_i, confidences) + detection.pred_densepose = densepose_output_i + k += n_i + + +def _linear_interpolation_utilities(v_norm, v0_src, size_src, v0_dst, size_dst, size_z): + """ + Computes utility values for linear interpolation at points v. + The points are given as normalized offsets in the source interval + (v0_src, v0_src + size_src), more precisely: + v = v0_src + v_norm * size_src / 256.0 + The computed utilities include lower points v_lo, upper points v_hi, + interpolation weights v_w and flags j_valid indicating whether the + points falls into the destination interval (v0_dst, v0_dst + size_dst). + + Args: + v_norm (:obj: `torch.Tensor`): tensor of size N containing + normalized point offsets + v0_src (:obj: `torch.Tensor`): tensor of size N containing + left bounds of source intervals for normalized points + size_src (:obj: `torch.Tensor`): tensor of size N containing + source interval sizes for normalized points + v0_dst (:obj: `torch.Tensor`): tensor of size N containing + left bounds of destination intervals + size_dst (:obj: `torch.Tensor`): tensor of size N containing + destination interval sizes + size_z (int): interval size for data to be interpolated + + Returns: + v_lo (:obj: `torch.Tensor`): int tensor of size N containing + indices of lower values used for interpolation, all values are + integers from [0, size_z - 1] + v_hi (:obj: `torch.Tensor`): int tensor of size N containing + indices of upper values used for interpolation, all values are + integers from [0, size_z - 1] + v_w (:obj: `torch.Tensor`): float tensor of size N containing + interpolation weights + j_valid (:obj: `torch.Tensor`): uint8 tensor of size N containing + 0 for points outside the estimation interval + (v0_est, v0_est + size_est) and 1 otherwise + """ + v = v0_src + v_norm * size_src / 256.0 + j_valid = (v - v0_dst >= 0) * (v - v0_dst < size_dst) + v_grid = (v - v0_dst) * size_z / size_dst + v_lo = v_grid.floor().long().clamp(min=0, max=size_z - 1) + v_hi = (v_lo + 1).clamp(max=size_z - 1) + v_grid = torch.min(v_hi.float(), v_grid) + v_w = v_grid - v_lo.float() + return v_lo, v_hi, v_w, j_valid + + +def _grid_sampling_utilities( + zh, zw, bbox_xywh_est, bbox_xywh_gt, index_gt, x_norm, y_norm, index_bbox +): + """ + Prepare tensors used in grid sampling. + + Args: + z_est (:obj: `torch.Tensor`): tensor of size (N,C,H,W) with estimated + values of Z to be extracted for the points X, Y and channel + indices I + bbox_xywh_est (:obj: `torch.Tensor`): tensor of size (N, 4) containing + estimated bounding boxes in format XYWH + bbox_xywh_gt (:obj: `torch.Tensor`): tensor of size (N, 4) containing + matched ground truth bounding boxes in format XYWH + index_gt (:obj: `torch.Tensor`): tensor of size K with point labels for + ground truth points + x_norm (:obj: `torch.Tensor`): tensor of size K with X normalized + coordinates of ground truth points. Image X coordinates can be + obtained as X = Xbbox + x_norm * Wbbox / 255 + y_norm (:obj: `torch.Tensor`): tensor of size K with Y normalized + coordinates of ground truth points. Image Y coordinates can be + obtained as Y = Ybbox + y_norm * Hbbox / 255 + index_bbox (:obj: `torch.Tensor`): tensor of size K with bounding box + indices for each ground truth point. The values are thus in + [0, N-1] + + Returns: + j_valid (:obj: `torch.Tensor`): uint8 tensor of size M containing + 0 for points to be discarded and 1 for points to be selected + y_lo (:obj: `torch.Tensor`): int tensor of indices of upper values + in z_est for each point + y_hi (:obj: `torch.Tensor`): int tensor of indices of lower values + in z_est for each point + x_lo (:obj: `torch.Tensor`): int tensor of indices of left values + in z_est for each point + x_hi (:obj: `torch.Tensor`): int tensor of indices of right values + in z_est for each point + w_ylo_xlo (:obj: `torch.Tensor`): float tensor of size M; + contains upper-left value weight for each point + w_ylo_xhi (:obj: `torch.Tensor`): float tensor of size M; + contains upper-right value weight for each point + w_yhi_xlo (:obj: `torch.Tensor`): float tensor of size M; + contains lower-left value weight for each point + w_yhi_xhi (:obj: `torch.Tensor`): float tensor of size M; + contains lower-right value weight for each point + """ + + x0_gt, y0_gt, w_gt, h_gt = bbox_xywh_gt[index_bbox].unbind(dim=1) + x0_est, y0_est, w_est, h_est = bbox_xywh_est[index_bbox].unbind(dim=1) + x_lo, x_hi, x_w, jx_valid = _linear_interpolation_utilities( + x_norm, x0_gt, w_gt, x0_est, w_est, zw + ) + y_lo, y_hi, y_w, jy_valid = _linear_interpolation_utilities( + y_norm, y0_gt, h_gt, y0_est, h_est, zh + ) + j_valid = jx_valid * jy_valid + + w_ylo_xlo = (1.0 - x_w) * (1.0 - y_w) + w_ylo_xhi = x_w * (1.0 - y_w) + w_yhi_xlo = (1.0 - x_w) * y_w + w_yhi_xhi = x_w * y_w + + return j_valid, y_lo, y_hi, x_lo, x_hi, w_ylo_xlo, w_ylo_xhi, w_yhi_xlo, w_yhi_xhi + + +def _extract_at_points_packed( + z_est, + index_bbox_valid, + slice_index_uv, + y_lo, + y_hi, + x_lo, + x_hi, + w_ylo_xlo, + w_ylo_xhi, + w_yhi_xlo, + w_yhi_xhi, +): + """ + Extract ground truth values z_gt for valid point indices and estimated + values z_est using bilinear interpolation over top-left (y_lo, x_lo), + top-right (y_lo, x_hi), bottom-left (y_hi, x_lo) and bottom-right + (y_hi, x_hi) values in z_est with corresponding weights: + w_ylo_xlo, w_ylo_xhi, w_yhi_xlo and w_yhi_xhi. + Use slice_index_uv to slice dim=1 in z_est + """ + z_est_sampled = ( + z_est[index_bbox_valid, slice_index_uv, y_lo, x_lo] * w_ylo_xlo + + z_est[index_bbox_valid, slice_index_uv, y_lo, x_hi] * w_ylo_xhi + + z_est[index_bbox_valid, slice_index_uv, y_hi, x_lo] * w_yhi_xlo + + z_est[index_bbox_valid, slice_index_uv, y_hi, x_hi] * w_yhi_xhi + ) + return z_est_sampled + + +def _resample_data( + z, bbox_xywh_src, bbox_xywh_dst, wout, hout, mode="nearest", padding_mode="zeros" +): + """ + Args: + z (:obj: `torch.Tensor`): tensor of size (N,C,H,W) with data to be + resampled + bbox_xywh_src (:obj: `torch.Tensor`): tensor of size (N,4) containing + source bounding boxes in format XYWH + bbox_xywh_dst (:obj: `torch.Tensor`): tensor of size (N,4) containing + destination bounding boxes in format XYWH + Return: + zresampled (:obj: `torch.Tensor`): tensor of size (N, C, Hout, Wout) + with resampled values of z, where D is the discretization size + """ + n = bbox_xywh_src.size(0) + assert n == bbox_xywh_dst.size(0), ( + "The number of " + "source ROIs for resampling ({}) should be equal to the number " + "of destination ROIs ({})".format(bbox_xywh_src.size(0), bbox_xywh_dst.size(0)) + ) + x0src, y0src, wsrc, hsrc = bbox_xywh_src.unbind(dim=1) + x0dst, y0dst, wdst, hdst = bbox_xywh_dst.unbind(dim=1) + x0dst_norm = 2 * (x0dst - x0src) / wsrc - 1 + y0dst_norm = 2 * (y0dst - y0src) / hsrc - 1 + x1dst_norm = 2 * (x0dst + wdst - x0src) / wsrc - 1 + y1dst_norm = 2 * (y0dst + hdst - y0src) / hsrc - 1 + grid_w = torch.arange(wout, device=z.device, dtype=torch.float) / wout + grid_h = torch.arange(hout, device=z.device, dtype=torch.float) / hout + grid_w_expanded = grid_w[None, None, :].expand(n, hout, wout) + grid_h_expanded = grid_h[None, :, None].expand(n, hout, wout) + dx_expanded = (x1dst_norm - x0dst_norm)[:, None, None].expand(n, hout, wout) + dy_expanded = (y1dst_norm - y0dst_norm)[:, None, None].expand(n, hout, wout) + x0_expanded = x0dst_norm[:, None, None].expand(n, hout, wout) + y0_expanded = y0dst_norm[:, None, None].expand(n, hout, wout) + grid_x = grid_w_expanded * dx_expanded + x0_expanded + grid_y = grid_h_expanded * dy_expanded + y0_expanded + grid = torch.stack((grid_x, grid_y), dim=3) + # resample Z from (N, C, H, W) into (N, C, Hout, Wout) + zresampled = F.grid_sample(z, grid, mode=mode, padding_mode=padding_mode, align_corners=True) + return zresampled + + +def _extract_single_tensors_from_matches_one_image( + proposals_targets, bbox_with_dp_offset, bbox_global_offset +): + i_gt_all = [] + x_norm_all = [] + y_norm_all = [] + u_gt_all = [] + v_gt_all = [] + s_gt_all = [] + bbox_xywh_gt_all = [] + bbox_xywh_est_all = [] + # Ibbox_all == k should be true for all data that corresponds + # to bbox_xywh_gt[k] and bbox_xywh_est[k] + # index k here is global wrt images + i_bbox_all = [] + # at offset k (k is global) contains index of bounding box data + # within densepose output tensor + i_with_dp = [] + + boxes_xywh_est = proposals_targets.proposal_boxes.clone() + boxes_xywh_gt = proposals_targets.gt_boxes.clone() + n_i = len(boxes_xywh_est) + assert n_i == len(boxes_xywh_gt) + + if n_i: + boxes_xywh_est.tensor[:, 2] -= boxes_xywh_est.tensor[:, 0] + boxes_xywh_est.tensor[:, 3] -= boxes_xywh_est.tensor[:, 1] + boxes_xywh_gt.tensor[:, 2] -= boxes_xywh_gt.tensor[:, 0] + boxes_xywh_gt.tensor[:, 3] -= boxes_xywh_gt.tensor[:, 1] + if hasattr(proposals_targets, "gt_densepose"): + densepose_gt = proposals_targets.gt_densepose + for k, box_xywh_est, box_xywh_gt, dp_gt in zip( + range(n_i), boxes_xywh_est.tensor, boxes_xywh_gt.tensor, densepose_gt + ): + if (dp_gt is not None) and (len(dp_gt.x) > 0): + i_gt_all.append(dp_gt.i) + x_norm_all.append(dp_gt.x) + y_norm_all.append(dp_gt.y) + u_gt_all.append(dp_gt.u) + v_gt_all.append(dp_gt.v) + s_gt_all.append(dp_gt.segm.unsqueeze(0)) + bbox_xywh_gt_all.append(box_xywh_gt.view(-1, 4)) + bbox_xywh_est_all.append(box_xywh_est.view(-1, 4)) + i_bbox_k = torch.full_like(dp_gt.i, bbox_with_dp_offset + len(i_with_dp)) + i_bbox_all.append(i_bbox_k) + i_with_dp.append(bbox_global_offset + k) + return ( + i_gt_all, + x_norm_all, + y_norm_all, + u_gt_all, + v_gt_all, + s_gt_all, + bbox_xywh_gt_all, + bbox_xywh_est_all, + i_bbox_all, + i_with_dp, + ) + + +def _extract_single_tensors_from_matches(proposals_with_targets): + i_img = [] + i_gt_all = [] + x_norm_all = [] + y_norm_all = [] + u_gt_all = [] + v_gt_all = [] + s_gt_all = [] + bbox_xywh_gt_all = [] + bbox_xywh_est_all = [] + i_bbox_all = [] + i_with_dp_all = [] + n = 0 + for i, proposals_targets_per_image in enumerate(proposals_with_targets): + n_i = proposals_targets_per_image.proposal_boxes.tensor.size(0) + if not n_i: + continue + ( + i_gt_img, + x_norm_img, + y_norm_img, + u_gt_img, + v_gt_img, + s_gt_img, + bbox_xywh_gt_img, + bbox_xywh_est_img, + i_bbox_img, + i_with_dp_img, + ) = _extract_single_tensors_from_matches_one_image( # noqa + proposals_targets_per_image, len(i_with_dp_all), n + ) + i_gt_all.extend(i_gt_img) + x_norm_all.extend(x_norm_img) + y_norm_all.extend(y_norm_img) + u_gt_all.extend(u_gt_img) + v_gt_all.extend(v_gt_img) + s_gt_all.extend(s_gt_img) + bbox_xywh_gt_all.extend(bbox_xywh_gt_img) + bbox_xywh_est_all.extend(bbox_xywh_est_img) + i_bbox_all.extend(i_bbox_img) + i_with_dp_all.extend(i_with_dp_img) + i_img.extend([i] * len(i_with_dp_img)) + n += n_i + # concatenate all data into a single tensor + if (n > 0) and (len(i_with_dp_all) > 0): + i_gt = torch.cat(i_gt_all, 0).long() + x_norm = torch.cat(x_norm_all, 0) + y_norm = torch.cat(y_norm_all, 0) + u_gt = torch.cat(u_gt_all, 0) + v_gt = torch.cat(v_gt_all, 0) + s_gt = torch.cat(s_gt_all, 0) + bbox_xywh_gt = torch.cat(bbox_xywh_gt_all, 0) + bbox_xywh_est = torch.cat(bbox_xywh_est_all, 0) + i_bbox = torch.cat(i_bbox_all, 0).long() + else: + i_gt = None + x_norm = None + y_norm = None + u_gt = None + v_gt = None + s_gt = None + bbox_xywh_gt = None + bbox_xywh_est = None + i_bbox = None + return ( + i_img, + i_with_dp_all, + bbox_xywh_est, + bbox_xywh_gt, + i_gt, + x_norm, + y_norm, + u_gt, + v_gt, + s_gt, + i_bbox, + ) + + +class IIDIsotropicGaussianUVLoss(nn.Module): + """ + Loss for the case of iid residuals with isotropic covariance: + $Sigma_i = sigma_i^2 I$ + The loss (negative log likelihood) is then: + $1/2 sum_{i=1}^n (log(2 pi) + 2 log sigma_i^2 + ||delta_i||^2 / sigma_i^2)$, + where $delta_i=(u - u', v - v')$ is a 2D vector containing UV coordinates + difference between estimated and ground truth UV values + For details, see: + N. Neverova, D. Novotny, A. Vedaldi "Correlated Uncertainty for Learning + Dense Correspondences from Noisy Labels", p. 918--926, in Proc. NIPS 2019 + """ + + def __init__(self, sigma_lower_bound: float): + super(IIDIsotropicGaussianUVLoss, self).__init__() + self.sigma_lower_bound = sigma_lower_bound + self.log2pi = math.log(2 * math.pi) + + def forward( + self, + u: torch.Tensor, + v: torch.Tensor, + sigma_u: torch.Tensor, + target_u: torch.Tensor, + target_v: torch.Tensor, + ): + # compute $\sigma_i^2$ + # use sigma_lower_bound to avoid degenerate solution for variance + # (sigma -> 0) + sigma2 = F.softplus(sigma_u) + self.sigma_lower_bound + # compute \|delta_i\|^2 + delta_t_delta = (u - target_u) ** 2 + (v - target_v) ** 2 + # the total loss from the formula above: + loss = 0.5 * (self.log2pi + 2 * torch.log(sigma2) + delta_t_delta / sigma2) + return loss.sum() + + +class IndepAnisotropicGaussianUVLoss(nn.Module): + """ + Loss for the case of independent residuals with anisotropic covariances: + $Sigma_i = sigma_i^2 I + r_i r_i^T$ + The loss (negative log likelihood) is then: + $1/2 sum_{i=1}^n (log(2 pi) + + log sigma_i^2 (sigma_i^2 + ||r_i||^2) + + ||delta_i||^2 / sigma_i^2 + - ^2 / (sigma_i^2 * (sigma_i^2 + ||r_i||^2)))$, + where $delta_i=(u - u', v - v')$ is a 2D vector containing UV coordinates + difference between estimated and ground truth UV values + For details, see: + N. Neverova, D. Novotny, A. Vedaldi "Correlated Uncertainty for Learning + Dense Correspondences from Noisy Labels", p. 918--926, in Proc. NIPS 2019 + """ + + def __init__(self, sigma_lower_bound: float): + super(IndepAnisotropicGaussianUVLoss, self).__init__() + self.sigma_lower_bound = sigma_lower_bound + self.log2pi = math.log(2 * math.pi) + + def forward( + self, + u: torch.Tensor, + v: torch.Tensor, + sigma_u: torch.Tensor, + kappa_u_est: torch.Tensor, + kappa_v_est: torch.Tensor, + target_u: torch.Tensor, + target_v: torch.Tensor, + ): + # compute $\sigma_i^2$ + sigma2 = F.softplus(sigma_u) + self.sigma_lower_bound + # compute \|r_i\|^2 + r_sqnorm2 = kappa_u_est ** 2 + kappa_v_est ** 2 + delta_u = u - target_u + delta_v = v - target_v + # compute \|delta_i\|^2 + delta_sqnorm = delta_u ** 2 + delta_v ** 2 + delta_u_r_u = delta_u * kappa_u_est + delta_v_r_v = delta_v * kappa_v_est + # compute the scalar product + delta_r = delta_u_r_u + delta_v_r_v + # compute squared scalar product ^2 + delta_r_sqnorm = delta_r ** 2 + denom2 = sigma2 * (sigma2 + r_sqnorm2) + loss = 0.5 * ( + self.log2pi + torch.log(denom2) + delta_sqnorm / sigma2 - delta_r_sqnorm / denom2 + ) + return loss.sum() + + +class DensePoseLosses(object): + def __init__(self, cfg): + # fmt: off + self.heatmap_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.HEATMAP_SIZE + self.w_points = cfg.MODEL.ROI_DENSEPOSE_HEAD.POINT_REGRESSION_WEIGHTS + self.w_part = cfg.MODEL.ROI_DENSEPOSE_HEAD.PART_WEIGHTS + self.w_segm = cfg.MODEL.ROI_DENSEPOSE_HEAD.INDEX_WEIGHTS + self.n_segm_chan = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_COARSE_SEGM_CHANNELS + # fmt: on + self.confidence_model_cfg = DensePoseConfidenceModelConfig.from_cfg(cfg) + if self.confidence_model_cfg.uv_confidence.type == DensePoseUVConfidenceType.IID_ISO: + self.uv_loss_with_confidences = IIDIsotropicGaussianUVLoss( + self.confidence_model_cfg.uv_confidence.epsilon + ) + elif self.confidence_model_cfg.uv_confidence.type == DensePoseUVConfidenceType.INDEP_ANISO: + self.uv_loss_with_confidences = IndepAnisotropicGaussianUVLoss( + self.confidence_model_cfg.uv_confidence.epsilon + ) + + def __call__(self, proposals_with_gt, densepose_outputs, densepose_confidences): + losses = {} + # densepose outputs are computed for all images and all bounding boxes; + # i.e. if a batch has 4 images with (3, 1, 2, 1) proposals respectively, + # the outputs will have size(0) == 3+1+2+1 == 7 + s, index_uv, u, v = densepose_outputs + sigma_1, sigma_2, kappa_u, kappa_v = densepose_confidences + conf_type = self.confidence_model_cfg.uv_confidence.type + assert u.size(2) == v.size(2) + assert u.size(3) == v.size(3) + assert u.size(2) == index_uv.size(2) + assert u.size(3) == index_uv.size(3) + + with torch.no_grad(): + ( + index_uv_img, + i_with_dp, + bbox_xywh_est, + bbox_xywh_gt, + index_gt_all, + x_norm, + y_norm, + u_gt_all, + v_gt_all, + s_gt, + index_bbox, + ) = _extract_single_tensors_from_matches( # noqa + proposals_with_gt + ) + n_batch = len(i_with_dp) + + # NOTE: we need to keep the same computation graph on all the GPUs to + # perform reduction properly. Hence even if we have no data on one + # of the GPUs, we still need to generate the computation graph. + # Add fake (zero) loss in the form Tensor.sum() * 0 + if not n_batch: + losses["loss_densepose_I"] = index_uv.sum() * 0 + losses["loss_densepose_S"] = s.sum() * 0 + if self.confidence_model_cfg.uv_confidence.enabled: + losses["loss_densepose_UV"] = (u.sum() + v.sum()) * 0 + if conf_type == DensePoseUVConfidenceType.IID_ISO: + losses["loss_densepose_UV"] += sigma_2.sum() * 0 + elif conf_type == DensePoseUVConfidenceType.INDEP_ANISO: + losses["loss_densepose_UV"] += ( + sigma_2.sum() + kappa_u.sum() + kappa_v.sum() + ) * 0 + else: + losses["loss_densepose_U"] = u.sum() * 0 + losses["loss_densepose_V"] = v.sum() * 0 + return losses + + zh = u.size(2) + zw = u.size(3) + + ( + j_valid, + y_lo, + y_hi, + x_lo, + x_hi, + w_ylo_xlo, + w_ylo_xhi, + w_yhi_xlo, + w_yhi_xhi, + ) = _grid_sampling_utilities( # noqa + zh, zw, bbox_xywh_est, bbox_xywh_gt, index_gt_all, x_norm, y_norm, index_bbox + ) + + j_valid_fg = j_valid * (index_gt_all > 0) + + u_gt = u_gt_all[j_valid_fg] + u_est_all = _extract_at_points_packed( + u[i_with_dp], + index_bbox, + index_gt_all, + y_lo, + y_hi, + x_lo, + x_hi, + w_ylo_xlo, + w_ylo_xhi, + w_yhi_xlo, + w_yhi_xhi, + ) + u_est = u_est_all[j_valid_fg] + + v_gt = v_gt_all[j_valid_fg] + v_est_all = _extract_at_points_packed( + v[i_with_dp], + index_bbox, + index_gt_all, + y_lo, + y_hi, + x_lo, + x_hi, + w_ylo_xlo, + w_ylo_xhi, + w_yhi_xlo, + w_yhi_xhi, + ) + v_est = v_est_all[j_valid_fg] + + index_uv_gt = index_gt_all[j_valid] + index_uv_est_all = _extract_at_points_packed( + index_uv[i_with_dp], + index_bbox, + slice(None), + y_lo, + y_hi, + x_lo, + x_hi, + w_ylo_xlo[:, None], + w_ylo_xhi[:, None], + w_yhi_xlo[:, None], + w_yhi_xhi[:, None], + ) + index_uv_est = index_uv_est_all[j_valid, :] + + if self.confidence_model_cfg.uv_confidence.enabled: + sigma_2_est_all = _extract_at_points_packed( + sigma_2[i_with_dp], + index_bbox, + index_gt_all, + y_lo, + y_hi, + x_lo, + x_hi, + w_ylo_xlo, + w_ylo_xhi, + w_yhi_xlo, + w_yhi_xhi, + ) + sigma_2_est = sigma_2_est_all[j_valid_fg] + if conf_type in [DensePoseUVConfidenceType.INDEP_ANISO]: + kappa_u_est_all = _extract_at_points_packed( + kappa_u[i_with_dp], + index_bbox, + index_gt_all, + y_lo, + y_hi, + x_lo, + x_hi, + w_ylo_xlo, + w_ylo_xhi, + w_yhi_xlo, + w_yhi_xhi, + ) + kappa_u_est = kappa_u_est_all[j_valid_fg] + kappa_v_est_all = _extract_at_points_packed( + kappa_v[i_with_dp], + index_bbox, + index_gt_all, + y_lo, + y_hi, + x_lo, + x_hi, + w_ylo_xlo, + w_ylo_xhi, + w_yhi_xlo, + w_yhi_xhi, + ) + kappa_v_est = kappa_v_est_all[j_valid_fg] + + # Resample everything to the estimated data size, no need to resample + # S_est then: + s_est = s[i_with_dp] + with torch.no_grad(): + s_gt = _resample_data( + s_gt.unsqueeze(1), + bbox_xywh_gt, + bbox_xywh_est, + self.heatmap_size, + self.heatmap_size, + mode="nearest", + padding_mode="zeros", + ).squeeze(1) + + # add point-based losses: + if self.confidence_model_cfg.uv_confidence.enabled: + if conf_type == DensePoseUVConfidenceType.IID_ISO: + uv_loss = ( + self.uv_loss_with_confidences(u_est, v_est, sigma_2_est, u_gt, v_gt) + * self.w_points + ) + losses["loss_densepose_UV"] = uv_loss + elif conf_type == DensePoseUVConfidenceType.INDEP_ANISO: + uv_loss = ( + self.uv_loss_with_confidences( + u_est, v_est, sigma_2_est, kappa_u_est, kappa_v_est, u_gt, v_gt + ) + * self.w_points + ) + losses["loss_densepose_UV"] = uv_loss + else: + raise ValueError(f"Unknown confidence model type: {conf_type}") + else: + u_loss = F.smooth_l1_loss(u_est, u_gt, reduction="sum") * self.w_points + losses["loss_densepose_U"] = u_loss + v_loss = F.smooth_l1_loss(v_est, v_gt, reduction="sum") * self.w_points + losses["loss_densepose_V"] = v_loss + index_uv_loss = F.cross_entropy(index_uv_est, index_uv_gt.long()) * self.w_part + losses["loss_densepose_I"] = index_uv_loss + + if self.n_segm_chan == 2: + s_gt = s_gt > 0 + s_loss = F.cross_entropy(s_est, s_gt.long()) * self.w_segm + losses["loss_densepose_S"] = s_loss + return losses + + +def build_densepose_losses(cfg): + losses = DensePoseLosses(cfg) + return losses diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/evaluator.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/evaluator.py new file mode 100644 index 0000000000000000000000000000000000000000..3bb002b5093365f12edf5f4610ab261491d12bc8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/evaluator.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import contextlib +import copy +import io +import itertools +import json +import logging +import os +from collections import OrderedDict +import torch +from fvcore.common.file_io import PathManager +from pycocotools.coco import COCO + +from detectron2.data import MetadataCatalog +from detectron2.evaluation import DatasetEvaluator +from detectron2.structures import BoxMode +from detectron2.utils.comm import all_gather, is_main_process, synchronize +from detectron2.utils.logger import create_small_table + +from .densepose_coco_evaluation import DensePoseCocoEval, DensePoseEvalMode + + +class DensePoseCOCOEvaluator(DatasetEvaluator): + def __init__(self, dataset_name, distributed, output_dir=None): + self._distributed = distributed + self._output_dir = output_dir + + self._cpu_device = torch.device("cpu") + self._logger = logging.getLogger(__name__) + + self._metadata = MetadataCatalog.get(dataset_name) + json_file = PathManager.get_local_path(self._metadata.json_file) + with contextlib.redirect_stdout(io.StringIO()): + self._coco_api = COCO(json_file) + + def reset(self): + self._predictions = [] + + def process(self, inputs, outputs): + """ + Args: + inputs: the inputs to a COCO model (e.g., GeneralizedRCNN). + It is a list of dict. Each dict corresponds to an image and + contains keys like "height", "width", "file_name", "image_id". + outputs: the outputs of a COCO model. It is a list of dicts with key + "instances" that contains :class:`Instances`. + The :class:`Instances` object needs to have `densepose` field. + """ + for input, output in zip(inputs, outputs): + instances = output["instances"].to(self._cpu_device) + + boxes = instances.pred_boxes.tensor.clone() + boxes = BoxMode.convert(boxes, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS) + instances.pred_densepose = instances.pred_densepose.to_result(boxes) + + json_results = prediction_to_json(instances, input["image_id"]) + self._predictions.extend(json_results) + + def evaluate(self): + if self._distributed: + synchronize() + predictions = all_gather(self._predictions) + predictions = list(itertools.chain(*predictions)) + if not is_main_process(): + return + else: + predictions = self._predictions + + return copy.deepcopy(self._eval_predictions(predictions)) + + def _eval_predictions(self, predictions): + """ + Evaluate predictions on densepose. + Return results with the metrics of the tasks. + """ + self._logger.info("Preparing results for COCO format ...") + + if self._output_dir: + file_path = os.path.join(self._output_dir, "coco_densepose_results.json") + with open(file_path, "w") as f: + json.dump(predictions, f) + f.flush() + os.fsync(f.fileno()) + + self._logger.info("Evaluating predictions ...") + res = OrderedDict() + results_gps, results_gpsm = _evaluate_predictions_on_coco(self._coco_api, predictions) + res["densepose_gps"] = results_gps + res["densepose_gpsm"] = results_gpsm + return res + + +def prediction_to_json(instances, img_id): + """ + Args: + instances (Instances): the output of the model + img_id (str): the image id in COCO + + Returns: + list[dict]: the results in densepose evaluation format + """ + scores = instances.scores.tolist() + + results = [] + for k in range(len(instances)): + densepose = instances.pred_densepose[k] + result = { + "image_id": img_id, + "category_id": 1, # densepose only has one class + "bbox": densepose[1], + "score": scores[k], + "densepose": densepose, + } + results.append(result) + return results + + +def _evaluate_predictions_on_coco(coco_gt, coco_results): + metrics = ["AP", "AP50", "AP75", "APm", "APl"] + + logger = logging.getLogger(__name__) + + if len(coco_results) == 0: # cocoapi does not handle empty results very well + logger.warn("No predictions from the model! Set scores to -1") + results_gps = {metric: -1 for metric in metrics} + results_gpsm = {metric: -1 for metric in metrics} + return results_gps, results_gpsm + + coco_dt = coco_gt.loadRes(coco_results) + results_gps = _evaluate_predictions_on_coco_gps(coco_gt, coco_dt, metrics) + logger.info( + "Evaluation results for densepose, GPS metric: \n" + create_small_table(results_gps) + ) + results_gpsm = _evaluate_predictions_on_coco_gpsm(coco_gt, coco_dt, metrics) + logger.info( + "Evaluation results for densepose, GPSm metric: \n" + create_small_table(results_gpsm) + ) + return results_gps, results_gpsm + + +def _evaluate_predictions_on_coco_gps(coco_gt, coco_dt, metrics): + coco_eval = DensePoseCocoEval(coco_gt, coco_dt, "densepose", dpEvalMode=DensePoseEvalMode.GPS) + coco_eval.evaluate() + coco_eval.accumulate() + coco_eval.summarize() + results = {metric: float(coco_eval.stats[idx] * 100) for idx, metric in enumerate(metrics)} + return results + + +def _evaluate_predictions_on_coco_gpsm(coco_gt, coco_dt, metrics): + coco_eval = DensePoseCocoEval(coco_gt, coco_dt, "densepose", dpEvalMode=DensePoseEvalMode.GPSM) + coco_eval.evaluate() + coco_eval.accumulate() + coco_eval.summarize() + results = {metric: float(coco_eval.stats[idx] * 100) for idx, metric in enumerate(metrics)} + return results diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/modeling/test_time_augmentation.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/modeling/test_time_augmentation.py new file mode 100644 index 0000000000000000000000000000000000000000..fcf69db1b6e4c687bc4e284e2795cab61ebf043f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/modeling/test_time_augmentation.py @@ -0,0 +1,75 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from detectron2.modeling.test_time_augmentation import GeneralizedRCNNWithTTA + + +class DensePoseGeneralizedRCNNWithTTA(GeneralizedRCNNWithTTA): + def __init__(self, cfg, model, transform_data, tta_mapper=None, batch_size=1): + """ + Args: + cfg (CfgNode): + model (GeneralizedRCNN): a GeneralizedRCNN to apply TTA on. + transform_data (DensePoseTransformData): contains symmetry label + transforms used for horizontal flip + tta_mapper (callable): takes a dataset dict and returns a list of + augmented versions of the dataset dict. Defaults to + `DatasetMapperTTA(cfg)`. + batch_size (int): batch the augmented images into this batch size for inference. + """ + self._transform_data = transform_data + super().__init__(cfg=cfg, model=model, tta_mapper=tta_mapper, batch_size=batch_size) + + # the implementation follows closely the one from detectron2/modeling + def _inference_one_image(self, input): + """ + Args: + input (dict): one dataset dict + + Returns: + dict: one output dict + """ + + augmented_inputs, aug_vars = self._get_augmented_inputs(input) + # Detect boxes from all augmented versions + with self._turn_off_roi_heads(["mask_on", "keypoint_on", "densepose_on"]): + # temporarily disable roi heads + all_boxes, all_scores, all_classes = self._get_augmented_boxes( + augmented_inputs, aug_vars + ) + merged_instances = self._merge_detections( + all_boxes, all_scores, all_classes, (aug_vars["height"], aug_vars["width"]) + ) + + if self.cfg.MODEL.MASK_ON or self.cfg.MODEL.DENSEPOSE_ON: + # Use the detected boxes to obtain new fields + augmented_instances = self._rescale_detected_boxes( + augmented_inputs, merged_instances, aug_vars + ) + # run forward on the detected boxes + outputs = self._batch_inference( + augmented_inputs, augmented_instances, do_postprocess=False + ) + # Delete now useless variables to avoid being out of memory + del augmented_inputs, augmented_instances, merged_instances + # average the predictions + if self.cfg.MODEL.MASK_ON: + outputs[0].pred_masks = self._reduce_pred_masks(outputs, aug_vars) + if self.cfg.MODEL.DENSEPOSE_ON: + outputs[0].pred_densepose = self._reduce_pred_densepose(outputs, aug_vars) + # postprocess + output = self._detector_postprocess(outputs[0], aug_vars) + return {"instances": output} + else: + return {"instances": merged_instances} + + def _reduce_pred_densepose(self, outputs, aug_vars): + for idx, output in enumerate(outputs): + if aug_vars["do_hflip"][idx]: + output.pred_densepose.hflip(self._transform_data) + # Less memory-intensive averaging + for attr in "SIUV": + setattr( + outputs[0].pred_densepose, + attr, + sum(getattr(o.pred_densepose, attr) for o in outputs) / len(outputs), + ) + return outputs[0].pred_densepose diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/roi_head.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..023119760b77cf5294ed18292e77e7f495099770 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/roi_head.py @@ -0,0 +1,213 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import numpy as np +from typing import Dict +import fvcore.nn.weight_init as weight_init +import torch +import torch.nn as nn +from torch.nn import functional as F + +from detectron2.layers import Conv2d, ShapeSpec, get_norm +from detectron2.modeling import ROI_HEADS_REGISTRY, StandardROIHeads +from detectron2.modeling.poolers import ROIPooler +from detectron2.modeling.roi_heads import select_foreground_proposals + +from .densepose_head import ( + build_densepose_data_filter, + build_densepose_head, + build_densepose_losses, + build_densepose_predictor, + densepose_inference, +) + + +class Decoder(nn.Module): + """ + A semantic segmentation head described in detail in the Panoptic Feature Pyramid Networks paper + (https://arxiv.org/abs/1901.02446). It takes FPN features as input and merges information from + all levels of the FPN into single output. + """ + + def __init__(self, cfg, input_shape: Dict[str, ShapeSpec], in_features): + super(Decoder, self).__init__() + + # fmt: off + self.in_features = in_features + feature_strides = {k: v.stride for k, v in input_shape.items()} + feature_channels = {k: v.channels for k, v in input_shape.items()} + num_classes = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECODER_NUM_CLASSES + conv_dims = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECODER_CONV_DIMS + self.common_stride = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECODER_COMMON_STRIDE + norm = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECODER_NORM + # fmt: on + + self.scale_heads = [] + for in_feature in self.in_features: + head_ops = [] + head_length = max( + 1, int(np.log2(feature_strides[in_feature]) - np.log2(self.common_stride)) + ) + for k in range(head_length): + conv = Conv2d( + feature_channels[in_feature] if k == 0 else conv_dims, + conv_dims, + kernel_size=3, + stride=1, + padding=1, + bias=not norm, + norm=get_norm(norm, conv_dims), + activation=F.relu, + ) + weight_init.c2_msra_fill(conv) + head_ops.append(conv) + if feature_strides[in_feature] != self.common_stride: + head_ops.append( + nn.Upsample(scale_factor=2, mode="bilinear", align_corners=False) + ) + self.scale_heads.append(nn.Sequential(*head_ops)) + self.add_module(in_feature, self.scale_heads[-1]) + self.predictor = Conv2d(conv_dims, num_classes, kernel_size=1, stride=1, padding=0) + weight_init.c2_msra_fill(self.predictor) + + def forward(self, features): + for i, _ in enumerate(self.in_features): + if i == 0: + x = self.scale_heads[i](features[i]) + else: + x = x + self.scale_heads[i](features[i]) + x = self.predictor(x) + return x + + +@ROI_HEADS_REGISTRY.register() +class DensePoseROIHeads(StandardROIHeads): + """ + A Standard ROIHeads which contains an addition of DensePose head. + """ + + def __init__(self, cfg, input_shape): + super().__init__(cfg, input_shape) + self._init_densepose_head(cfg, input_shape) + + def _init_densepose_head(self, cfg, input_shape): + # fmt: off + self.densepose_on = cfg.MODEL.DENSEPOSE_ON + if not self.densepose_on: + return + self.densepose_data_filter = build_densepose_data_filter(cfg) + dp_pooler_resolution = cfg.MODEL.ROI_DENSEPOSE_HEAD.POOLER_RESOLUTION + dp_pooler_sampling_ratio = cfg.MODEL.ROI_DENSEPOSE_HEAD.POOLER_SAMPLING_RATIO + dp_pooler_type = cfg.MODEL.ROI_DENSEPOSE_HEAD.POOLER_TYPE + self.use_decoder = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECODER_ON + # fmt: on + if self.use_decoder: + dp_pooler_scales = (1.0 / input_shape[self.in_features[0]].stride,) + else: + dp_pooler_scales = tuple(1.0 / input_shape[k].stride for k in self.in_features) + in_channels = [input_shape[f].channels for f in self.in_features][0] + + if self.use_decoder: + self.decoder = Decoder(cfg, input_shape, self.in_features) + + self.densepose_pooler = ROIPooler( + output_size=dp_pooler_resolution, + scales=dp_pooler_scales, + sampling_ratio=dp_pooler_sampling_ratio, + pooler_type=dp_pooler_type, + ) + self.densepose_head = build_densepose_head(cfg, in_channels) + self.densepose_predictor = build_densepose_predictor( + cfg, self.densepose_head.n_out_channels + ) + self.densepose_losses = build_densepose_losses(cfg) + + def _forward_densepose(self, features, instances): + """ + Forward logic of the densepose prediction branch. + + Args: + features (list[Tensor]): #level input features for densepose prediction + instances (list[Instances]): the per-image instances to train/predict densepose. + In training, they can be the proposals. + In inference, they can be the predicted boxes. + + Returns: + In training, a dict of losses. + In inference, update `instances` with new fields "densepose" and return it. + """ + if not self.densepose_on: + return {} if self.training else instances + + features = [features[f] for f in self.in_features] + if self.training: + proposals, _ = select_foreground_proposals(instances, self.num_classes) + proposals_dp = self.densepose_data_filter(proposals) + if len(proposals_dp) > 0: + # NOTE may deadlock in DDP if certain workers have empty proposals_dp + proposal_boxes = [x.proposal_boxes for x in proposals_dp] + + if self.use_decoder: + features = [self.decoder(features)] + + features_dp = self.densepose_pooler(features, proposal_boxes) + densepose_head_outputs = self.densepose_head(features_dp) + densepose_outputs, _, confidences, _ = self.densepose_predictor( + densepose_head_outputs + ) + densepose_loss_dict = self.densepose_losses( + proposals_dp, densepose_outputs, confidences + ) + return densepose_loss_dict + else: + pred_boxes = [x.pred_boxes for x in instances] + + if self.use_decoder: + features = [self.decoder(features)] + + features_dp = self.densepose_pooler(features, pred_boxes) + if len(features_dp) > 0: + densepose_head_outputs = self.densepose_head(features_dp) + densepose_outputs, _, confidences, _ = self.densepose_predictor( + densepose_head_outputs + ) + else: + # If no detection occurred instances + # set densepose_outputs to empty tensors + empty_tensor = torch.zeros(size=(0, 0, 0, 0), device=features_dp.device) + densepose_outputs = tuple([empty_tensor] * 4) + confidences = tuple([empty_tensor] * 4) + + densepose_inference(densepose_outputs, confidences, instances) + return instances + + def forward(self, images, features, proposals, targets=None): + instances, losses = super().forward(images, features, proposals, targets) + del targets, images + + if self.training: + losses.update(self._forward_densepose(features, instances)) + return instances, losses + + def forward_with_given_boxes(self, features, instances): + """ + Use the given boxes in `instances` to produce other (non-box) per-ROI outputs. + + This is useful for downstream tasks where a box is known, but need to obtain + other attributes (outputs of other heads). + Test-time augmentation also uses this. + + Args: + features: same as in `forward()` + instances (list[Instances]): instances to predict other outputs. Expect the keys + "pred_boxes" and "pred_classes" to exist. + + Returns: + instances (list[Instances]): + the same `Instances` objects, with extra + fields such as `pred_masks` or `pred_keypoints`. + """ + + instances = super().forward_with_given_boxes(features, instances) + instances = self._forward_densepose(features, instances) + return instances diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/utils/dbhelper.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/utils/dbhelper.py new file mode 100644 index 0000000000000000000000000000000000000000..b28862cdede26c13200d928118d5bc5c00e3d2aa --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/utils/dbhelper.py @@ -0,0 +1,145 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from typing import Any, Dict, Optional, Tuple + + +class EntrySelector(object): + """ + Base class for entry selectors + """ + + @staticmethod + def from_string(spec: str) -> "EntrySelector": + if spec == "*": + return AllEntrySelector() + return FieldEntrySelector(spec) + + +class AllEntrySelector(EntrySelector): + """ + Selector that accepts all entries + """ + + SPECIFIER = "*" + + def __call__(self, entry): + return True + + +class FieldEntrySelector(EntrySelector): + """ + Selector that accepts only entries that match provided field + specifier(s). Only a limited set of specifiers is supported for now: + ::=[] + ::=[] + is a valid identifier + ::= "int" | "str" + ::= "=" + ::= "," + ::= ":" + ::= | + ::= + ::= "-" + is a string without spaces and special symbols + (e.g. , , , ) + """ + + _SPEC_DELIM = "," + _TYPE_DELIM = ":" + _RANGE_DELIM = "-" + _EQUAL = "=" + _ERROR_PREFIX = "Invalid field selector specifier" + + class _FieldEntryValuePredicate(object): + """ + Predicate that checks strict equality for the specified entry field + """ + + def __init__(self, name: str, typespec: str, value: str): + import builtins + + self.name = name + self.type = getattr(builtins, typespec) if typespec is not None else str + self.value = value + + def __call__(self, entry): + return entry[self.name] == self.type(self.value) + + class _FieldEntryRangePredicate(object): + """ + Predicate that checks whether an entry field falls into the specified range + """ + + def __init__(self, name: str, typespec: str, vmin: str, vmax: str): + import builtins + + self.name = name + self.type = getattr(builtins, typespec) if typespec is not None else str + self.vmin = vmin + self.vmax = vmax + + def __call__(self, entry): + return (entry[self.name] >= self.type(self.vmin)) and ( + entry[self.name] <= self.type(self.vmax) + ) + + def __init__(self, spec: str): + self._predicates = self._parse_specifier_into_predicates(spec) + + def __call__(self, entry: Dict[str, Any]): + for predicate in self._predicates: + if not predicate(entry): + return False + return True + + def _parse_specifier_into_predicates(self, spec: str): + predicates = [] + specs = spec.split(self._SPEC_DELIM) + for subspec in specs: + eq_idx = subspec.find(self._EQUAL) + if eq_idx > 0: + field_name_with_type = subspec[:eq_idx] + field_name, field_type = self._parse_field_name_type(field_name_with_type) + field_value_or_range = subspec[eq_idx + 1 :] + if self._is_range_spec(field_value_or_range): + vmin, vmax = self._get_range_spec(field_value_or_range) + predicate = FieldEntrySelector._FieldEntryRangePredicate( + field_name, field_type, vmin, vmax + ) + else: + predicate = FieldEntrySelector._FieldEntryValuePredicate( + field_name, field_type, field_value_or_range + ) + predicates.append(predicate) + elif eq_idx == 0: + self._parse_error(f'"{subspec}", field name is empty!') + else: + self._parse_error(f'"{subspec}", should have format ' "=!") + return predicates + + def _parse_field_name_type(self, field_name_with_type: str) -> Tuple[str, Optional[str]]: + type_delim_idx = field_name_with_type.find(self._TYPE_DELIM) + if type_delim_idx > 0: + field_name = field_name_with_type[:type_delim_idx] + field_type = field_name_with_type[type_delim_idx + 1 :] + elif type_delim_idx == 0: + self._parse_error(f'"{field_name_with_type}", field name is empty!') + else: + field_name = field_name_with_type + field_type = None + return field_name, field_type + + def _is_range_spec(self, field_value_or_range): + delim_idx = field_value_or_range.find(self._RANGE_DELIM) + return delim_idx > 0 + + def _get_range_spec(self, field_value_or_range): + if self._is_range_spec(field_value_or_range): + delim_idx = field_value_or_range.find(self._RANGE_DELIM) + vmin = field_value_or_range[:delim_idx] + vmax = field_value_or_range[delim_idx + 1 :] + return vmin, vmax + else: + self._parse_error('"field_value_or_range", range of values expected!') + + def _parse_error(self, msg): + raise ValueError(f"{self._ERROR_PREFIX}: {msg}") diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/utils/logger.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/utils/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..e3fa45e0c0218bdd2e79c08b0d8ff83abc3e4308 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/utils/logger.py @@ -0,0 +1,13 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging + + +def verbosity_to_level(verbosity): + if verbosity is not None: + if verbosity == 0: + return logging.WARNING + elif verbosity == 1: + return logging.INFO + elif verbosity >= 2: + return logging.DEBUG + return logging.WARNING diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/utils/transform.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/utils/transform.py new file mode 100644 index 0000000000000000000000000000000000000000..b7cfe097234dbd3ff19b84ecdfb63fd8bf5fd4b6 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/utils/transform.py @@ -0,0 +1,16 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from fvcore.common.file_io import PathManager + +from detectron2.data import MetadataCatalog + +from densepose import DensePoseTransformData + + +def load_for_dataset(dataset_name): + path = MetadataCatalog.get(dataset_name).densepose_transform_src + densepose_transform_data_fpath = PathManager.get_local_path(path) + return DensePoseTransformData.load(densepose_transform_data_fpath) + + +def load_from_cfg(cfg): + return load_for_dataset(cfg.DATASETS.TEST[0]) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/vis/base.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/vis/base.py new file mode 100644 index 0000000000000000000000000000000000000000..2aa3e6e9f44ae2ce888f6e24dd11c8428734417b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/vis/base.py @@ -0,0 +1,191 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import numpy as np +import cv2 +import torch + +Image = np.ndarray +Boxes = torch.Tensor + + +class MatrixVisualizer(object): + """ + Base visualizer for matrix data + """ + + def __init__( + self, + inplace=True, + cmap=cv2.COLORMAP_PARULA, + val_scale=1.0, + alpha=0.7, + interp_method_matrix=cv2.INTER_LINEAR, + interp_method_mask=cv2.INTER_NEAREST, + ): + self.inplace = inplace + self.cmap = cmap + self.val_scale = val_scale + self.alpha = alpha + self.interp_method_matrix = interp_method_matrix + self.interp_method_mask = interp_method_mask + + def visualize(self, image_bgr, mask, matrix, bbox_xywh): + self._check_image(image_bgr) + self._check_mask_matrix(mask, matrix) + if self.inplace: + image_target_bgr = image_bgr + else: + image_target_bgr = image_bgr * 0 + x, y, w, h = [int(v) for v in bbox_xywh] + if w <= 0 or h <= 0: + return image_bgr + mask, matrix = self._resize(mask, matrix, w, h) + mask_bg = np.tile((mask == 0)[:, :, np.newaxis], [1, 1, 3]) + matrix_scaled = matrix.astype(np.float32) * self.val_scale + _EPSILON = 1e-6 + if np.any(matrix_scaled > 255 + _EPSILON): + logger = logging.getLogger(__name__) + logger.warning( + f"Matrix has values > {255 + _EPSILON} after " f"scaling, clipping to [0..255]" + ) + matrix_scaled_8u = matrix_scaled.clip(0, 255).astype(np.uint8) + matrix_vis = cv2.applyColorMap(matrix_scaled_8u, self.cmap) + matrix_vis[mask_bg] = image_target_bgr[y : y + h, x : x + w, :][mask_bg] + image_target_bgr[y : y + h, x : x + w, :] = ( + image_target_bgr[y : y + h, x : x + w, :] * (1.0 - self.alpha) + matrix_vis * self.alpha + ) + return image_target_bgr.astype(np.uint8) + + def _resize(self, mask, matrix, w, h): + if (w != mask.shape[1]) or (h != mask.shape[0]): + mask = cv2.resize(mask, (w, h), self.interp_method_mask) + if (w != matrix.shape[1]) or (h != matrix.shape[0]): + matrix = cv2.resize(matrix, (w, h), self.interp_method_matrix) + return mask, matrix + + def _check_image(self, image_rgb): + assert len(image_rgb.shape) == 3 + assert image_rgb.shape[2] == 3 + assert image_rgb.dtype == np.uint8 + + def _check_mask_matrix(self, mask, matrix): + assert len(matrix.shape) == 2 + assert len(mask.shape) == 2 + assert mask.dtype == np.uint8 + + +class RectangleVisualizer(object): + + _COLOR_GREEN = (18, 127, 15) + + def __init__(self, color=_COLOR_GREEN, thickness=1): + self.color = color + self.thickness = thickness + + def visualize(self, image_bgr, bbox_xywh, color=None, thickness=None): + x, y, w, h = bbox_xywh + color = color or self.color + thickness = thickness or self.thickness + cv2.rectangle(image_bgr, (int(x), int(y)), (int(x + w), int(y + h)), color, thickness) + return image_bgr + + +class PointsVisualizer(object): + + _COLOR_GREEN = (18, 127, 15) + + def __init__(self, color_bgr=_COLOR_GREEN, r=5): + self.color_bgr = color_bgr + self.r = r + + def visualize(self, image_bgr, pts_xy, colors_bgr=None, rs=None): + for j, pt_xy in enumerate(pts_xy): + x, y = pt_xy + color_bgr = colors_bgr[j] if colors_bgr is not None else self.color_bgr + r = rs[j] if rs is not None else self.r + cv2.circle(image_bgr, (x, y), r, color_bgr, -1) + return image_bgr + + +class TextVisualizer(object): + + _COLOR_GRAY = (218, 227, 218) + _COLOR_WHITE = (255, 255, 255) + + def __init__( + self, + font_face=cv2.FONT_HERSHEY_SIMPLEX, + font_color_bgr=_COLOR_GRAY, + font_scale=0.35, + font_line_type=cv2.LINE_AA, + font_line_thickness=1, + fill_color_bgr=_COLOR_WHITE, + fill_color_transparency=1.0, + frame_color_bgr=_COLOR_WHITE, + frame_color_transparency=1.0, + frame_thickness=1, + ): + self.font_face = font_face + self.font_color_bgr = font_color_bgr + self.font_scale = font_scale + self.font_line_type = font_line_type + self.font_line_thickness = font_line_thickness + self.fill_color_bgr = fill_color_bgr + self.fill_color_transparency = fill_color_transparency + self.frame_color_bgr = frame_color_bgr + self.frame_color_transparency = frame_color_transparency + self.frame_thickness = frame_thickness + + def visualize(self, image_bgr, txt, topleft_xy): + txt_w, txt_h = self.get_text_size_wh(txt) + topleft_xy = tuple(map(int, topleft_xy)) + x, y = topleft_xy + if self.frame_color_transparency < 1.0: + t = self.frame_thickness + image_bgr[y - t : y + txt_h + t, x - t : x + txt_w + t, :] = ( + image_bgr[y - t : y + txt_h + t, x - t : x + txt_w + t, :] + * self.frame_color_transparency + + np.array(self.frame_color_bgr) * (1.0 - self.frame_color_transparency) + ).astype(np.float) + if self.fill_color_transparency < 1.0: + image_bgr[y : y + txt_h, x : x + txt_w, :] = ( + image_bgr[y : y + txt_h, x : x + txt_w, :] * self.fill_color_transparency + + np.array(self.fill_color_bgr) * (1.0 - self.fill_color_transparency) + ).astype(np.float) + cv2.putText( + image_bgr, + txt, + topleft_xy, + self.font_face, + self.font_scale, + self.font_color_bgr, + self.font_line_thickness, + self.font_line_type, + ) + return image_bgr + + def get_text_size_wh(self, txt): + ((txt_w, txt_h), _) = cv2.getTextSize( + txt, self.font_face, self.font_scale, self.font_line_thickness + ) + return txt_w, txt_h + + +class CompoundVisualizer(object): + def __init__(self, visualizers): + self.visualizers = visualizers + + def visualize(self, image_bgr, data): + assert len(data) == len( + self.visualizers + ), "The number of datas {} should match the number of visualizers" " {}".format( + len(data), len(self.visualizers) + ) + image = image_bgr + for i, visualizer in enumerate(self.visualizers): + image = visualizer.visualize(image, data[i]) + return image + + def __str__(self): + visualizer_str = ", ".join([str(v) for v in self.visualizers]) + return "Compound Visualizer [{}]".format(visualizer_str) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/vis/bounding_box.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/vis/bounding_box.py new file mode 100644 index 0000000000000000000000000000000000000000..d7951d69e4a92d638debc79458dd2cfe58c650e3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/vis/bounding_box.py @@ -0,0 +1,37 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .base import RectangleVisualizer, TextVisualizer + + +class BoundingBoxVisualizer(object): + def __init__(self): + self.rectangle_visualizer = RectangleVisualizer() + + def visualize(self, image_bgr, boxes_xywh): + for bbox_xywh in boxes_xywh: + image_bgr = self.rectangle_visualizer.visualize(image_bgr, bbox_xywh) + return image_bgr + + +class ScoredBoundingBoxVisualizer(object): + def __init__(self, bbox_visualizer_params=None, score_visualizer_params=None): + if bbox_visualizer_params is None: + bbox_visualizer_params = {} + if score_visualizer_params is None: + score_visualizer_params = {} + self.visualizer_bbox = RectangleVisualizer(**bbox_visualizer_params) + self.visualizer_score = TextVisualizer(**score_visualizer_params) + + def visualize(self, image_bgr, scored_bboxes): + boxes_xywh, box_scores = scored_bboxes + assert len(boxes_xywh) == len( + box_scores + ), "Number of bounding boxes {} should be equal to the number of scores {}".format( + len(boxes_xywh), len(box_scores) + ) + for i, box_xywh in enumerate(boxes_xywh): + score_i = box_scores[i] + image_bgr = self.visualizer_bbox.visualize(image_bgr, box_xywh) + score_txt = "{0:6.4f}".format(score_i) + topleft_xy = box_xywh[0], box_xywh[1] + image_bgr = self.visualizer_score.visualize(image_bgr, score_txt, topleft_xy) + return image_bgr diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/vis/densepose.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/vis/densepose.py new file mode 100644 index 0000000000000000000000000000000000000000..f2e77dc2d8e0f8c041ac1217978c639a826f0857 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/vis/densepose.py @@ -0,0 +1,593 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import numpy as np +from typing import Iterable, Optional, Tuple +import cv2 + +from ..data.structures import DensePoseDataRelative, DensePoseOutput, DensePoseResult +from .base import Boxes, Image, MatrixVisualizer, PointsVisualizer + + +class DensePoseResultsVisualizer(object): + def visualize(self, image_bgr: Image, densepose_result: Optional[DensePoseResult]) -> Image: + if densepose_result is None: + return image_bgr + context = self.create_visualization_context(image_bgr) + for i, result_encoded_w_shape in enumerate(densepose_result.results): + iuv_arr = DensePoseResult.decode_png_data(*result_encoded_w_shape) + bbox_xywh = densepose_result.boxes_xywh[i] + self.visualize_iuv_arr(context, iuv_arr, bbox_xywh) + image_bgr = self.context_to_image_bgr(context) + return image_bgr + + +class DensePoseMaskedColormapResultsVisualizer(DensePoseResultsVisualizer): + def __init__( + self, + data_extractor, + segm_extractor, + inplace=True, + cmap=cv2.COLORMAP_PARULA, + alpha=0.7, + val_scale=1.0, + ): + self.mask_visualizer = MatrixVisualizer( + inplace=inplace, cmap=cmap, val_scale=val_scale, alpha=alpha + ) + self.data_extractor = data_extractor + self.segm_extractor = segm_extractor + + def create_visualization_context(self, image_bgr: Image): + return image_bgr + + def context_to_image_bgr(self, context): + return context + + def get_image_bgr_from_context(self, context): + return context + + def visualize_iuv_arr(self, context, iuv_arr, bbox_xywh): + image_bgr = self.get_image_bgr_from_context(context) + matrix = self.data_extractor(iuv_arr) + segm = self.segm_extractor(iuv_arr) + mask = np.zeros(matrix.shape, dtype=np.uint8) + mask[segm > 0] = 1 + image_bgr = self.mask_visualizer.visualize(image_bgr, mask, matrix, bbox_xywh) + return image_bgr + + +def _extract_i_from_iuvarr(iuv_arr): + return iuv_arr[0, :, :] + + +def _extract_u_from_iuvarr(iuv_arr): + return iuv_arr[1, :, :] + + +def _extract_v_from_iuvarr(iuv_arr): + return iuv_arr[2, :, :] + + +class DensePoseResultsMplContourVisualizer(DensePoseResultsVisualizer): + def __init__(self, levels=10, **kwargs): + self.levels = levels + self.plot_args = kwargs + + def create_visualization_context(self, image_bgr: Image): + import matplotlib.pyplot as plt + from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas + + context = {} + context["image_bgr"] = image_bgr + dpi = 100 + height_inches = float(image_bgr.shape[0]) / dpi + width_inches = float(image_bgr.shape[1]) / dpi + fig = plt.figure(figsize=(width_inches, height_inches), dpi=dpi) + plt.axes([0, 0, 1, 1]) + plt.axis("off") + context["fig"] = fig + canvas = FigureCanvas(fig) + context["canvas"] = canvas + extent = (0, image_bgr.shape[1], image_bgr.shape[0], 0) + plt.imshow(image_bgr[:, :, ::-1], extent=extent) + return context + + def context_to_image_bgr(self, context): + fig = context["fig"] + w, h = map(int, fig.get_size_inches() * fig.get_dpi()) + canvas = context["canvas"] + canvas.draw() + image_1d = np.fromstring(canvas.tostring_rgb(), dtype="uint8") + image_rgb = image_1d.reshape(h, w, 3) + image_bgr = image_rgb[:, :, ::-1].copy() + return image_bgr + + def visualize_iuv_arr(self, context, iuv_arr: np.ndarray, bbox_xywh: Boxes) -> Image: + import matplotlib.pyplot as plt + + u = _extract_u_from_iuvarr(iuv_arr).astype(float) / 255.0 + v = _extract_v_from_iuvarr(iuv_arr).astype(float) / 255.0 + extent = ( + bbox_xywh[0], + bbox_xywh[0] + bbox_xywh[2], + bbox_xywh[1], + bbox_xywh[1] + bbox_xywh[3], + ) + plt.contour(u, self.levels, extent=extent, **self.plot_args) + plt.contour(v, self.levels, extent=extent, **self.plot_args) + + +class DensePoseResultsCustomContourVisualizer(DensePoseResultsVisualizer): + """ + Contour visualization using marching squares + """ + + def __init__(self, levels=10, **kwargs): + # TODO: colormap is hardcoded + cmap = cv2.COLORMAP_PARULA + if isinstance(levels, int): + self.levels = np.linspace(0, 1, levels) + else: + self.levels = levels + if "linewidths" in kwargs: + self.linewidths = kwargs["linewidths"] + else: + self.linewidths = [1] * len(self.levels) + self.plot_args = kwargs + img_colors_bgr = cv2.applyColorMap((self.levels * 255).astype(np.uint8), cmap) + self.level_colors_bgr = [ + [int(v) for v in img_color_bgr.ravel()] for img_color_bgr in img_colors_bgr + ] + + def create_visualization_context(self, image_bgr: Image): + return image_bgr + + def context_to_image_bgr(self, context): + return context + + def get_image_bgr_from_context(self, context): + return context + + def visualize_iuv_arr(self, context, iuv_arr: np.ndarray, bbox_xywh: Boxes) -> Image: + image_bgr = self.get_image_bgr_from_context(context) + segm = _extract_i_from_iuvarr(iuv_arr) + u = _extract_u_from_iuvarr(iuv_arr).astype(float) / 255.0 + v = _extract_v_from_iuvarr(iuv_arr).astype(float) / 255.0 + self._contours(image_bgr, u, segm, bbox_xywh) + self._contours(image_bgr, v, segm, bbox_xywh) + + def _contours(self, image_bgr, arr, segm, bbox_xywh): + for part_idx in range(1, DensePoseDataRelative.N_PART_LABELS + 1): + mask = segm == part_idx + if not np.any(mask): + continue + arr_min = np.amin(arr[mask]) + arr_max = np.amax(arr[mask]) + I, J = np.nonzero(mask) + i0 = np.amin(I) + i1 = np.amax(I) + 1 + j0 = np.amin(J) + j1 = np.amax(J) + 1 + if (j1 == j0 + 1) or (i1 == i0 + 1): + continue + Nw = arr.shape[1] - 1 + Nh = arr.shape[0] - 1 + for level_idx, level in enumerate(self.levels): + if (level < arr_min) or (level > arr_max): + continue + vp = arr[i0:i1, j0:j1] >= level + bin_codes = vp[:-1, :-1] + vp[1:, :-1] * 2 + vp[1:, 1:] * 4 + vp[:-1, 1:] * 8 + mp = mask[i0:i1, j0:j1] + bin_mask_codes = mp[:-1, :-1] + mp[1:, :-1] * 2 + mp[1:, 1:] * 4 + mp[:-1, 1:] * 8 + it = np.nditer(bin_codes, flags=["multi_index"]) + color_bgr = self.level_colors_bgr[level_idx] + linewidth = self.linewidths[level_idx] + while not it.finished: + if (it[0] != 0) and (it[0] != 15): + i, j = it.multi_index + if bin_mask_codes[i, j] != 0: + self._draw_line( + image_bgr, + arr, + mask, + level, + color_bgr, + linewidth, + it[0], + it.multi_index, + bbox_xywh, + Nw, + Nh, + (i0, j0), + ) + it.iternext() + + def _draw_line( + self, + image_bgr, + arr, + mask, + v, + color_bgr, + linewidth, + bin_code, + multi_idx, + bbox_xywh, + Nw, + Nh, + offset, + ): + lines = self._bin_code_2_lines(arr, v, bin_code, multi_idx, Nw, Nh, offset) + x0, y0, w, h = bbox_xywh + x1 = x0 + w + y1 = y0 + h + for line in lines: + x0r, y0r = line[0] + x1r, y1r = line[1] + pt0 = (int(x0 + x0r * (x1 - x0)), int(y0 + y0r * (y1 - y0))) + pt1 = (int(x0 + x1r * (x1 - x0)), int(y0 + y1r * (y1 - y0))) + cv2.line(image_bgr, pt0, pt1, color_bgr, linewidth) + + def _bin_code_2_lines(self, arr, v, bin_code, multi_idx, Nw, Nh, offset): + i0, j0 = offset + i, j = multi_idx + i += i0 + j += j0 + v0, v1, v2, v3 = arr[i, j], arr[i + 1, j], arr[i + 1, j + 1], arr[i, j + 1] + x0i = float(j) / Nw + y0j = float(i) / Nh + He = 1.0 / Nh + We = 1.0 / Nw + if (bin_code == 1) or (bin_code == 14): + a = (v - v0) / (v1 - v0) + b = (v - v0) / (v3 - v0) + pt1 = (x0i, y0j + a * He) + pt2 = (x0i + b * We, y0j) + return [(pt1, pt2)] + elif (bin_code == 2) or (bin_code == 13): + a = (v - v0) / (v1 - v0) + b = (v - v1) / (v2 - v1) + pt1 = (x0i, y0j + a * He) + pt2 = (x0i + b * We, y0j + He) + return [(pt1, pt2)] + elif (bin_code == 3) or (bin_code == 12): + a = (v - v0) / (v3 - v0) + b = (v - v1) / (v2 - v1) + pt1 = (x0i + a * We, y0j) + pt2 = (x0i + b * We, y0j + He) + return [(pt1, pt2)] + elif (bin_code == 4) or (bin_code == 11): + a = (v - v1) / (v2 - v1) + b = (v - v3) / (v2 - v3) + pt1 = (x0i + a * We, y0j + He) + pt2 = (x0i + We, y0j + b * He) + return [(pt1, pt2)] + elif (bin_code == 6) or (bin_code == 9): + a = (v - v0) / (v1 - v0) + b = (v - v3) / (v2 - v3) + pt1 = (x0i, y0j + a * He) + pt2 = (x0i + We, y0j + b * He) + return [(pt1, pt2)] + elif (bin_code == 7) or (bin_code == 8): + a = (v - v0) / (v3 - v0) + b = (v - v3) / (v2 - v3) + pt1 = (x0i + a * We, y0j) + pt2 = (x0i + We, y0j + b * He) + return [(pt1, pt2)] + elif bin_code == 5: + a1 = (v - v0) / (v1 - v0) + b1 = (v - v1) / (v2 - v1) + pt11 = (x0i, y0j + a1 * He) + pt12 = (x0i + b1 * We, y0j + He) + a2 = (v - v0) / (v3 - v0) + b2 = (v - v3) / (v2 - v3) + pt21 = (x0i + a2 * We, y0j) + pt22 = (x0i + We, y0j + b2 * He) + return [(pt11, pt12), (pt21, pt22)] + elif bin_code == 10: + a1 = (v - v0) / (v3 - v0) + b1 = (v - v0) / (v1 - v0) + pt11 = (x0i + a1 * We, y0j) + pt12 = (x0i, y0j + b1 * He) + a2 = (v - v1) / (v2 - v1) + b2 = (v - v3) / (v2 - v3) + pt21 = (x0i + a2 * We, y0j + He) + pt22 = (x0i + We, y0j + b2 * He) + return [(pt11, pt12), (pt21, pt22)] + return [] + + +try: + import matplotlib + + matplotlib.use("Agg") + DensePoseResultsContourVisualizer = DensePoseResultsMplContourVisualizer +except ModuleNotFoundError: + logger = logging.getLogger(__name__) + logger.warning("Could not import matplotlib, using custom contour visualizer") + DensePoseResultsContourVisualizer = DensePoseResultsCustomContourVisualizer + + +class DensePoseResultsFineSegmentationVisualizer(DensePoseMaskedColormapResultsVisualizer): + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7): + super(DensePoseResultsFineSegmentationVisualizer, self).__init__( + _extract_i_from_iuvarr, + _extract_i_from_iuvarr, + inplace, + cmap, + alpha, + val_scale=255.0 / DensePoseDataRelative.N_PART_LABELS, + ) + + +class DensePoseResultsUVisualizer(DensePoseMaskedColormapResultsVisualizer): + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7): + super(DensePoseResultsUVisualizer, self).__init__( + _extract_u_from_iuvarr, _extract_i_from_iuvarr, inplace, cmap, alpha, val_scale=1.0 + ) + + +class DensePoseResultsVVisualizer(DensePoseMaskedColormapResultsVisualizer): + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7): + super(DensePoseResultsVVisualizer, self).__init__( + _extract_v_from_iuvarr, _extract_i_from_iuvarr, inplace, cmap, alpha, val_scale=1.0 + ) + + +class DensePoseOutputsFineSegmentationVisualizer(object): + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7): + self.mask_visualizer = MatrixVisualizer( + inplace=inplace, + cmap=cmap, + val_scale=255.0 / DensePoseDataRelative.N_PART_LABELS, + alpha=alpha, + ) + + def visualize( + self, image_bgr: Image, dp_output_with_bboxes: Optional[Tuple[DensePoseOutput, Boxes]] + ) -> Image: + if dp_output_with_bboxes is None: + return image_bgr + densepose_output, bboxes_xywh = dp_output_with_bboxes + S = densepose_output.S + I = densepose_output.I # noqa + U = densepose_output.U + V = densepose_output.V + N = S.size(0) + assert N == I.size( + 0 + ), "densepose outputs S {} and I {}" " should have equal first dim size".format( + S.size(), I.size() + ) + assert N == U.size( + 0 + ), "densepose outputs S {} and U {}" " should have equal first dim size".format( + S.size(), U.size() + ) + assert N == V.size( + 0 + ), "densepose outputs S {} and V {}" " should have equal first dim size".format( + S.size(), V.size() + ) + assert N == len( + bboxes_xywh + ), "number of bounding boxes {}" " should be equal to first dim size of outputs {}".format( + len(bboxes_xywh), N + ) + for n in range(N): + Sn = S[n].argmax(dim=0) + In = I[n].argmax(dim=0) * (Sn > 0).long() + matrix = In.cpu().numpy().astype(np.uint8) + mask = np.zeros(matrix.shape, dtype=np.uint8) + mask[matrix > 0] = 1 + bbox_xywh = bboxes_xywh[n] + image_bgr = self.mask_visualizer.visualize(image_bgr, mask, matrix, bbox_xywh) + return image_bgr + + +class DensePoseOutputsUVisualizer(object): + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7): + self.mask_visualizer = MatrixVisualizer( + inplace=inplace, cmap=cmap, val_scale=1.0, alpha=alpha + ) + + def visualize( + self, image_bgr: Image, dp_output_with_bboxes: Optional[Tuple[DensePoseOutput, Boxes]] + ) -> Image: + if dp_output_with_bboxes is None: + return image_bgr + densepose_output, bboxes_xywh = dp_output_with_bboxes + assert isinstance( + densepose_output, DensePoseOutput + ), "DensePoseOutput expected, {} encountered".format(type(densepose_output)) + S = densepose_output.S + I = densepose_output.I # noqa + U = densepose_output.U + V = densepose_output.V + N = S.size(0) + assert N == I.size( + 0 + ), "densepose outputs S {} and I {}" " should have equal first dim size".format( + S.size(), I.size() + ) + assert N == U.size( + 0 + ), "densepose outputs S {} and U {}" " should have equal first dim size".format( + S.size(), U.size() + ) + assert N == V.size( + 0 + ), "densepose outputs S {} and V {}" " should have equal first dim size".format( + S.size(), V.size() + ) + assert N == len( + bboxes_xywh + ), "number of bounding boxes {}" " should be equal to first dim size of outputs {}".format( + len(bboxes_xywh), N + ) + for n in range(N): + Sn = S[n].argmax(dim=0) + In = I[n].argmax(dim=0) * (Sn > 0).long() + segmentation = In.cpu().numpy().astype(np.uint8) + mask = np.zeros(segmentation.shape, dtype=np.uint8) + mask[segmentation > 0] = 1 + Un = U[n].cpu().numpy().astype(np.float32) + Uvis = np.zeros(segmentation.shape, dtype=np.float32) + for partId in range(Un.shape[0]): + Uvis[segmentation == partId] = Un[partId][segmentation == partId].clip(0, 1) * 255 + bbox_xywh = bboxes_xywh[n] + image_bgr = self.mask_visualizer.visualize(image_bgr, mask, Uvis, bbox_xywh) + return image_bgr + + +class DensePoseOutputsVVisualizer(object): + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7): + self.mask_visualizer = MatrixVisualizer( + inplace=inplace, cmap=cmap, val_scale=1.0, alpha=alpha + ) + + def visualize( + self, image_bgr: Image, dp_output_with_bboxes: Optional[Tuple[DensePoseOutput, Boxes]] + ) -> Image: + if dp_output_with_bboxes is None: + return image_bgr + densepose_output, bboxes_xywh = dp_output_with_bboxes + assert isinstance( + densepose_output, DensePoseOutput + ), "DensePoseOutput expected, {} encountered".format(type(densepose_output)) + S = densepose_output.S + I = densepose_output.I # noqa + U = densepose_output.U + V = densepose_output.V + N = S.size(0) + assert N == I.size( + 0 + ), "densepose outputs S {} and I {}" " should have equal first dim size".format( + S.size(), I.size() + ) + assert N == U.size( + 0 + ), "densepose outputs S {} and U {}" " should have equal first dim size".format( + S.size(), U.size() + ) + assert N == V.size( + 0 + ), "densepose outputs S {} and V {}" " should have equal first dim size".format( + S.size(), V.size() + ) + assert N == len( + bboxes_xywh + ), "number of bounding boxes {}" " should be equal to first dim size of outputs {}".format( + len(bboxes_xywh), N + ) + for n in range(N): + Sn = S[n].argmax(dim=0) + In = I[n].argmax(dim=0) * (Sn > 0).long() + segmentation = In.cpu().numpy().astype(np.uint8) + mask = np.zeros(segmentation.shape, dtype=np.uint8) + mask[segmentation > 0] = 1 + Vn = V[n].cpu().numpy().astype(np.float32) + Vvis = np.zeros(segmentation.shape, dtype=np.float32) + for partId in range(Vn.size(0)): + Vvis[segmentation == partId] = Vn[partId][segmentation == partId].clip(0, 1) * 255 + bbox_xywh = bboxes_xywh[n] + image_bgr = self.mask_visualizer.visualize(image_bgr, mask, Vvis, bbox_xywh) + return image_bgr + + +class DensePoseDataCoarseSegmentationVisualizer(object): + """ + Visualizer for ground truth segmentation + """ + + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7): + self.mask_visualizer = MatrixVisualizer( + inplace=inplace, + cmap=cmap, + val_scale=255.0 / DensePoseDataRelative.N_BODY_PARTS, + alpha=alpha, + ) + + def visualize( + self, + image_bgr: Image, + bbox_densepose_datas: Optional[Tuple[Iterable[Boxes], Iterable[DensePoseDataRelative]]], + ) -> Image: + if bbox_densepose_datas is None: + return image_bgr + for bbox_xywh, densepose_data in zip(*bbox_densepose_datas): + matrix = densepose_data.segm.numpy() + mask = np.zeros(matrix.shape, dtype=np.uint8) + mask[matrix > 0] = 1 + image_bgr = self.mask_visualizer.visualize(image_bgr, mask, matrix, bbox_xywh.numpy()) + return image_bgr + + +class DensePoseDataPointsVisualizer(object): + def __init__(self, densepose_data_to_value_fn=None, cmap=cv2.COLORMAP_PARULA): + self.points_visualizer = PointsVisualizer() + self.densepose_data_to_value_fn = densepose_data_to_value_fn + self.cmap = cmap + + def visualize( + self, + image_bgr: Image, + bbox_densepose_datas: Optional[Tuple[Iterable[Boxes], Iterable[DensePoseDataRelative]]], + ) -> Image: + if bbox_densepose_datas is None: + return image_bgr + for bbox_xywh, densepose_data in zip(*bbox_densepose_datas): + x0, y0, w, h = bbox_xywh.numpy() + x = densepose_data.x.numpy() * w / 255.0 + x0 + y = densepose_data.y.numpy() * h / 255.0 + y0 + pts_xy = zip(x, y) + if self.densepose_data_to_value_fn is None: + image_bgr = self.points_visualizer.visualize(image_bgr, pts_xy) + else: + v = self.densepose_data_to_value_fn(densepose_data) + img_colors_bgr = cv2.applyColorMap(v, self.cmap) + colors_bgr = [ + [int(v) for v in img_color_bgr.ravel()] for img_color_bgr in img_colors_bgr + ] + image_bgr = self.points_visualizer.visualize(image_bgr, pts_xy, colors_bgr) + return image_bgr + + +def _densepose_data_u_for_cmap(densepose_data): + u = np.clip(densepose_data.u.numpy(), 0, 1) * 255.0 + return u.astype(np.uint8) + + +def _densepose_data_v_for_cmap(densepose_data): + v = np.clip(densepose_data.v.numpy(), 0, 1) * 255.0 + return v.astype(np.uint8) + + +def _densepose_data_i_for_cmap(densepose_data): + i = ( + np.clip(densepose_data.i.numpy(), 0.0, DensePoseDataRelative.N_PART_LABELS) + * 255.0 + / DensePoseDataRelative.N_PART_LABELS + ) + return i.astype(np.uint8) + + +class DensePoseDataPointsUVisualizer(DensePoseDataPointsVisualizer): + def __init__(self): + super(DensePoseDataPointsUVisualizer, self).__init__( + densepose_data_to_value_fn=_densepose_data_u_for_cmap + ) + + +class DensePoseDataPointsVVisualizer(DensePoseDataPointsVisualizer): + def __init__(self): + super(DensePoseDataPointsVVisualizer, self).__init__( + densepose_data_to_value_fn=_densepose_data_v_for_cmap + ) + + +class DensePoseDataPointsIVisualizer(DensePoseDataPointsVisualizer): + def __init__(self): + super(DensePoseDataPointsIVisualizer, self).__init__( + densepose_data_to_value_fn=_densepose_data_i_for_cmap + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/vis/extractor.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/vis/extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..b715a4451e096d6d6c086f9bcf60f92d2ae692f8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/densepose/vis/extractor.py @@ -0,0 +1,152 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +from typing import Sequence +import torch + +from detectron2.layers.nms import batched_nms +from detectron2.structures.instances import Instances + +from densepose.vis.bounding_box import BoundingBoxVisualizer, ScoredBoundingBoxVisualizer +from densepose.vis.densepose import DensePoseResultsVisualizer + +from .base import CompoundVisualizer + +Scores = Sequence[float] + + +def extract_scores_from_instances(instances: Instances, select=None): + if instances.has("scores"): + return instances.scores if select is None else instances.scores[select] + return None + + +def extract_boxes_xywh_from_instances(instances: Instances, select=None): + if instances.has("pred_boxes"): + boxes_xywh = instances.pred_boxes.tensor.clone() + boxes_xywh[:, 2] -= boxes_xywh[:, 0] + boxes_xywh[:, 3] -= boxes_xywh[:, 1] + return boxes_xywh if select is None else boxes_xywh[select] + return None + + +def create_extractor(visualizer: object): + """ + Create an extractor for the provided visualizer + """ + if isinstance(visualizer, CompoundVisualizer): + extractors = [create_extractor(v) for v in visualizer.visualizers] + return CompoundExtractor(extractors) + elif isinstance(visualizer, DensePoseResultsVisualizer): + return DensePoseResultExtractor() + elif isinstance(visualizer, ScoredBoundingBoxVisualizer): + return CompoundExtractor([extract_boxes_xywh_from_instances, extract_scores_from_instances]) + elif isinstance(visualizer, BoundingBoxVisualizer): + return extract_boxes_xywh_from_instances + else: + logger = logging.getLogger(__name__) + logger.error(f"Could not create extractor for {visualizer}") + return None + + +class BoundingBoxExtractor(object): + """ + Extracts bounding boxes from instances + """ + + def __call__(self, instances: Instances): + boxes_xywh = extract_boxes_xywh_from_instances(instances) + return boxes_xywh + + +class ScoredBoundingBoxExtractor(object): + """ + Extracts bounding boxes from instances + """ + + def __call__(self, instances: Instances, select=None): + scores = extract_scores_from_instances(instances) + boxes_xywh = extract_boxes_xywh_from_instances(instances) + if (scores is None) or (boxes_xywh is None): + return (boxes_xywh, scores) + if select is not None: + scores = scores[select] + boxes_xywh = boxes_xywh[select] + return (boxes_xywh, scores) + + +class DensePoseResultExtractor(object): + """ + Extracts DensePose result from instances + """ + + def __call__(self, instances: Instances, select=None): + boxes_xywh = extract_boxes_xywh_from_instances(instances) + if instances.has("pred_densepose") and (boxes_xywh is not None): + dpout = instances.pred_densepose + if select is not None: + dpout = dpout[select] + boxes_xywh = boxes_xywh[select] + return dpout.to_result(boxes_xywh) + else: + return None + + +class CompoundExtractor(object): + """ + Extracts data for CompoundVisualizer + """ + + def __init__(self, extractors): + self.extractors = extractors + + def __call__(self, instances: Instances, select=None): + datas = [] + for extractor in self.extractors: + data = extractor(instances, select) + datas.append(data) + return datas + + +class NmsFilteredExtractor(object): + """ + Extracts data in the format accepted by NmsFilteredVisualizer + """ + + def __init__(self, extractor, iou_threshold): + self.extractor = extractor + self.iou_threshold = iou_threshold + + def __call__(self, instances: Instances, select=None): + scores = extract_scores_from_instances(instances) + boxes_xywh = extract_boxes_xywh_from_instances(instances) + if boxes_xywh is None: + return None + select_local_idx = batched_nms( + boxes_xywh, + scores, + torch.zeros(len(scores), dtype=torch.int32), + iou_threshold=self.iou_threshold, + ).squeeze() + select_local = torch.zeros(len(boxes_xywh), dtype=torch.bool, device=boxes_xywh.device) + select_local[select_local_idx] = True + select = select_local if select is None else (select & select_local) + return self.extractor(instances, select=select) + + +class ScoreThresholdedExtractor(object): + """ + Extracts data in the format accepted by ScoreThresholdedVisualizer + """ + + def __init__(self, extractor, min_score): + self.extractor = extractor + self.min_score = min_score + + def __call__(self, instances: Instances, select=None): + scores = extract_scores_from_instances(instances) + if scores is None: + return None + select_local = scores > self.min_score + select = select_local if select is None else (select & select_local) + data = self.extractor(instances, select=select) + return data diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/dev/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/dev/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e3a94b67ed4b4d0c2934f074802cd00f3660f9a9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/dev/README.md @@ -0,0 +1,7 @@ + +## Some scripts for developers to use, include: + +- `run_instant_tests.sh`: run training for a few iterations. +- `run_inference_tests.sh`: run inference on a small dataset. +- `../../dev/linter.sh`: lint the codebase before commit +- `../../dev/parse_results.sh`: parse results from log file. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/dev/run_inference_tests.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/dev/run_inference_tests.sh new file mode 100755 index 0000000000000000000000000000000000000000..34f47d5a07a90c411e830c98a346845fa618f836 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/dev/run_inference_tests.sh @@ -0,0 +1,33 @@ +#!/bin/bash -e +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +BIN="python train_net.py" +OUTPUT="inference_test_output" +NUM_GPUS=2 +IMS_PER_GPU=2 +IMS_PER_BATCH=$(( NUM_GPUS * IMS_PER_GPU )) + +CFG_LIST=( "${@:1}" ) + +if [ ${#CFG_LIST[@]} -eq 0 ]; then + CFG_LIST=( ./configs/quick_schedules/*inference_acc_test.yaml ) +fi + +echo "========================================================================" +echo "Configs to run:" +echo "${CFG_LIST[@]}" +echo "========================================================================" + +for cfg in "${CFG_LIST[@]}"; do + echo "========================================================================" + echo "Running $cfg ..." + echo "========================================================================" + $BIN \ + --eval-only \ + --num-gpus $NUM_GPUS \ + --config-file "$cfg" \ + OUTPUT_DIR "$OUTPUT" \ + SOLVER.IMS_PER_BATCH $IMS_PER_BATCH + rm -rf $OUTPUT +done + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/dev/run_instant_tests.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/dev/run_instant_tests.sh new file mode 100755 index 0000000000000000000000000000000000000000..a53785180974a70bce7fdb0c9da4024166efd596 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/dev/run_instant_tests.sh @@ -0,0 +1,28 @@ +#!/bin/bash -e +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +BIN="python train_net.py" +OUTPUT="instant_test_output" +NUM_GPUS=2 +SOLVER_IMS_PER_BATCH=$((NUM_GPUS * 2)) + +CFG_LIST=( "${@:1}" ) +if [ ${#CFG_LIST[@]} -eq 0 ]; then + CFG_LIST=( ./configs/quick_schedules/*instant_test.yaml ) +fi + +echo "========================================================================" +echo "Configs to run:" +echo "${CFG_LIST[@]}" +echo "========================================================================" + +for cfg in "${CFG_LIST[@]}"; do + echo "========================================================================" + echo "Running $cfg ..." + echo "========================================================================" + $BIN --num-gpus $NUM_GPUS --config-file "$cfg" \ + SOLVER.IMS_PER_BATCH $SOLVER_IMS_PER_BATCH \ + OUTPUT_DIR "$OUTPUT" + rm -rf "$OUTPUT" +done + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/doc/GETTING_STARTED.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/doc/GETTING_STARTED.md new file mode 100644 index 0000000000000000000000000000000000000000..a6bcbedee42835c99fa5aa1110309329dfbff6f0 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/doc/GETTING_STARTED.md @@ -0,0 +1,58 @@ +# Getting Started with DensePose + +## Inference with Pre-trained Models + +1. Pick a model and its config file from [Model Zoo](MODEL_ZOO.md), for example [densepose_rcnn_R_50_FPN_s1x.yaml](../configs/densepose_rcnn_R_50_FPN_s1x.yaml) +2. Run the [Apply Net](TOOL_APPLY_NET.md) tool to visualize the results or save the to disk. For example, to use contour visualization for DensePose, one can run: +```bash +python apply_net.py show configs/densepose_rcnn_R_50_FPN_s1x.yaml densepose_rcnn_R_50_FPN_s1x.pkl image.jpg dp_contour,bbox --output image_densepose_contour.png +``` +Please see [Apply Net](TOOL_APPLY_NET.md) for more details on the tool. + +## Training + +First, prepare the [dataset](http://densepose.org/#dataset) into the following structure under the directory you'll run training scripts: +
+datasets/coco/
+  annotations/
+    densepose_{train,minival,valminusminival}2014.json
+    densepose_minival2014_100.json   (optional, for testing only)
+  {train,val}2014/
+    # image files that are mentioned in the corresponding json
+
+ +To train a model one can use the [train_net.py](../train_net.py) script. +This script was used to train all DensePose models in [Model Zoo](MODEL_ZOO.md). +For example, to launch end-to-end DensePose-RCNN training with ResNet-50 FPN backbone +on 8 GPUs following the s1x schedule, one can run +```bash +python train_net.py --config-file configs/densepose_rcnn_R_50_FPN_s1x.yaml --num-gpus 8 +``` +The configs are made for 8-GPU training. To train on 1 GPU, one can apply the +[linear learning rate scaling rule](https://arxiv.org/abs/1706.02677): +```bash +python train_net.py --config-file configs/densepose_rcnn_R_50_FPN_s1x.yaml \ + SOLVER.IMS_PER_BATCH 2 SOLVER.BASE_LR 0.0025 +``` + +## Evaluation + +Model testing can be done in the same way as training, except for an additional flag `--eval-only` and +model location specification through `MODEL.WEIGHTS model.pth` in the command line +```bash +python train_net.py --config-file configs/densepose_rcnn_R_50_FPN_s1x.yaml \ + --eval-only MODEL.WEIGHTS model.pth +``` + +## Tools + +We provide tools which allow one to: + - easily view DensePose annotated data in a dataset; + - perform DensePose inference on a set of images; + - visualize DensePose model results; + +`query_db` is a tool to print or visualize DensePose data in a dataset. +Please refer to [Query DB](TOOL_QUERY_DB.md) for more details on this tool + +`apply_net` is a tool to print or visualize DensePose results. +Please refer to [Apply Net](TOOL_APPLY_NET.md) for more details on this tool diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/doc/MODEL_ZOO.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/doc/MODEL_ZOO.md new file mode 100644 index 0000000000000000000000000000000000000000..c26308417de03efea3872b44fec43c74ead529e9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/doc/MODEL_ZOO.md @@ -0,0 +1,277 @@ +# Model Zoo and Baselines + +# Introduction + +We provide baselines trained with Detectron2 DensePose. The corresponding +configuration files can be found in the [configs](../configs) directory. +All models were trained on COCO `train2014` + `valminusminival2014` and +evaluated on COCO `minival2014`. For the details on common settings in which +baselines were trained, please check [Detectron 2 Model Zoo](../../../MODEL_ZOO.md). + +## License + +All models available for download through this document are licensed under the +[Creative Commons Attribution-ShareAlike 3.0 license](https://creativecommons.org/licenses/by-sa/3.0/) + +## COCO DensePose Baselines with DensePose-RCNN + +### Legacy Models + +Baselines trained using schedules from [Güler et al, 2018](https://arxiv.org/pdf/1802.00434.pdf) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
dp. AP
GPS
dp. AP
GPSm
model iddownload
R_50_FPN_s1x_legacys1x0.3070.0513.258.152.154.9164832157model | metrics
R_101_FPN_s1x_legacys1x0.3900.0634.359.553.256.1164832182model | metrics
+ +### Improved Baselines, Original Fully Convolutional Haad + +These models use an improved training schedule and Panoptic FPN head from [Kirillov et al, 2019](https://arxiv.org/abs/1901.02446). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
dp. AP
GPS
dp. AP
GPSm
model iddownload
R_50_FPN_s1xs1x0.3590.0664.561.263.765.3165712039model | metrics
R_101_FPN_s1xs1x0.4280.0795.862.364.566.4165712084model | metrics
+ +### Improved Baselines, DeepLabV3 Head + +These models use an improved training schedule, Panoptic FPN head from [Kirillov et al, 2019](https://arxiv.org/abs/1901.02446) and DeepLabV3 head from [Chen et al, 2017](https://arxiv.org/abs/1706.05587). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
dp. AP
GPS
dp. AP
GPSm
model iddownload
R_50_FPN_DL_s1xs1x0.3920.0706.761.165.666.8165712097model | metrics
R_101_FPN_DL_s1xs1x0.4780.0837.062.366.367.7165712116model | metrics
+ +### Baselines with Confidence Estimation + +These models perform additional estimation of confidence in regressed UV coodrinates, along the lines of [Neverova et al., 2019](https://papers.nips.cc/paper/8378-correlated-uncertainty-for-learning-dense-correspondences-from-noisy-labels). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
dp. AP
GPS
dp. AP
GPSm
model iddownload
R_50_FPN_WC1_s1xs1x0.3530.0644.660.564.265.6173862049model | metrics
R_50_FPN_WC2_s1xs1x0.3640.0664.860.764.265.7173861455model | metrics
R_50_FPN_DL_WC1_s1xs1x0.3970.0686.761.165.867.1173067973model | metrics
R_50_FPN_DL_WC2_s1xs1x0.4100.0706.860.865.666.7173859335model | metrics
R_101_FPN_WC1_s1xs1x0.4350.0765.762.564.966.5171402969model | metrics
R_101_FPN_WC2_s1xs1x0.4500.0785.762.364.866.6173860702model | metrics
R_101_FPN_DL_WC1_s1xs1x0.4790.0817.962.066.267.4173858525model | metrics
R_101_FPN_DL_WC2_s1xs1x0.4910.0827.661.765.967.3173294801model | metrics
+ +## Old Baselines + +It is still possible to use some baselines from [DensePose 1](https://github.com/facebookresearch/DensePose). +Below are evaluation metrics for the baselines recomputed in the current framework: + +| Model | bbox AP | AP | AP50 | AP75 | APm |APl | +|-----|-----|-----|--- |--- |--- |--- | +| [`ResNet50_FPN_s1x-e2e`](https://dl.fbaipublicfiles.com/densepose/DensePose_ResNet50_FPN_s1x-e2e.pkl) | 54.673 | 48.894 | 84.963 | 50.717 | 43.132 | 50.433 | +| [`ResNet101_FPN_s1x-e2e`](https://dl.fbaipublicfiles.com/densepose/DensePose_ResNet101_FPN_s1x-e2e.pkl) | 56.032 | 51.088 | 86.250 | 55.057 | 46.542 | 52.563 | + +Note: these scores are close, but not strictly equal to the ones reported in the [DensePose 1 Model Zoo](https://github.com/facebookresearch/DensePose/blob/master/MODEL_ZOO.md), +which is due to small incompatibilities between the frameworks. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/doc/TOOL_APPLY_NET.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/doc/TOOL_APPLY_NET.md new file mode 100644 index 0000000000000000000000000000000000000000..f5cf2579a83811e4b192b3688f241b570f62bcb5 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/doc/TOOL_APPLY_NET.md @@ -0,0 +1,130 @@ +# Apply Net + +`apply_net` is a tool to print or visualize DensePose results on a set of images. +It has two modes: `dump` to save DensePose model results to a pickle file +and `show` to visualize them on images. + +## Dump Mode + +The general command form is: +```bash +python apply_net.py dump [-h] [-v] [--output ] +``` + +There are three mandatory arguments: + - ``, configuration file for a given model; + - ``, model file with trained parameters + - ``, input image file name, pattern or folder + +One can additionally provide `--output` argument to define the output file name, +which defaults to `output.pkl`. + + +Examples: + +1. Dump results of a DensePose model with ResNet-50 FPN backbone for images + in a folder `images` to file `dump.pkl`: +```bash +python apply_net.py dump configs/densepose_rcnn_R_50_FPN_s1x.yaml DensePose_ResNet50_FPN_s1x-e2e.pkl images --output dump.pkl -v +``` + +2. Dump results of a DensePose model with ResNet-50 FPN backbone for images + with file name matching a pattern `image*.jpg` to file `results.pkl`: +```bash +python apply_net.py dump configs/densepose_rcnn_R_50_FPN_s1x.yaml DensePose_ResNet50_FPN_s1x-e2e.pkl "image*.jpg" --output results.pkl -v +``` + +If you want to load the pickle file generated by the above command: +``` +# make sure DensePose is in your PYTHONPATH, or use the following line to add it: +sys.path.append("/your_detectron2_path/detectron2_repo/projects/DensePose/") + +f = open('/your_result_path/results.pkl', 'rb') +data = pickle.load(f) +``` + +The file `results.pkl` contains the list of results per image, for each image the result is a dictionary: +``` +data: [{'file_name': '/your_path/image1.jpg', + 'scores': tensor([0.9884]), + 'pred_boxes_XYXY': tensor([[ 69.6114, 0.0000, 706.9797, 706.0000]]), + 'pred_densepose': }, + {'file_name': '/your_path/image2.jpg', + 'scores': tensor([0.9999, 0.5373, 0.3991]), + 'pred_boxes_XYXY': tensor([[ 59.5734, 7.7535, 579.9311, 932.3619], + [612.9418, 686.1254, 612.9999, 704.6053], + [164.5081, 407.4034, 598.3944, 920.4266]]), + 'pred_densepose': }] +``` + +We can use the following code, to parse the outputs of the first +detected instance on the first image. +``` +img_id, instance_id = 0, 0 # Look at the first image and the first detected instance +bbox_xyxy = data[img_id]['pred_boxes_XYXY'][instance_id] +result_encoded = data[img_id]['pred_densepose'].results[instance_id] +iuv_arr = DensePoseResult.decode_png_data(*result_encoded) +``` +The array `bbox_xyxy` contains (x0, y0, x1, y1) of the bounding box. + +The shape of `iuv_arr` is `[3, H, W]`, where (H, W) is the shape of the bounding box. +- `iuv_arr[0,:,:]`: The patch index of image points, indicating which of the 24 surface patches the point is on. +- `iuv_arr[1,:,:]`: The U-coordinate value of image points. +- `iuv_arr[2,:,:]`: The V-coordinate value of image points. + + +## Visualization Mode + +The general command form is: +```bash +python apply_net.py show [-h] [-v] [--min_score ] [--nms_thresh ] [--output ] +``` + +There are four mandatory arguments: + - ``, configuration file for a given model; + - ``, model file with trained parameters + - ``, input image file name, pattern or folder + - ``, visualizations specifier; currently available visualizations are: + * `bbox` - bounding boxes of detected persons; + * `dp_segm` - segmentation masks for detected persons; + * `dp_u` - each body part is colored according to the estimated values of the + U coordinate in part parameterization; + * `dp_v` - each body part is colored according to the estimated values of the + V coordinate in part parameterization; + * `dp_contour` - plots contours with color-coded U and V coordinates + + +One can additionally provide the following optional arguments: + - `--min_score` to only show detections with sufficient scores that are not lower than provided value + - `--nms_thresh` to additionally apply non-maximum suppression to detections at a given threshold + - `--output` to define visualization file name template, which defaults to `output.png`. + To distinguish output file names for different images, the tool appends 1-based entry index, + e.g. output.0001.png, output.0002.png, etc... + + +The following examples show how to output results of a DensePose model +with ResNet-50 FPN backbone using different visualizations for image `image.jpg`: + +1. Show bounding box and segmentation: +```bash +python apply_net.py show configs/densepose_rcnn_R_50_FPN_s1x.yaml DensePose_ResNet50_FPN_s1x-e2e.pkl image.jpg bbox,dp_segm -v +``` +![Bounding Box + Segmentation Visualization](images/res_bbox_dp_segm.jpg) + +2. Show bounding box and estimated U coordinates for body parts: +```bash +python apply_net.py show configs/densepose_rcnn_R_50_FPN_s1x.yaml DensePose_ResNet50_FPN_s1x-e2e.pkl image.jpg bbox,dp_u -v +``` +![Bounding Box + U Coordinate Visualization](images/res_bbox_dp_u.jpg) + +3. Show bounding box and estimated V coordinates for body parts: +```bash +python apply_net.py show configs/densepose_rcnn_R_50_FPN_s1x.yaml DensePose_ResNet50_FPN_s1x-e2e.pkl image.jpg bbox,dp_v -v +``` +![Bounding Box + V Coordinate Visualization](images/res_bbox_dp_v.jpg) + +4. Show bounding box and estimated U and V coordinates via contour plots: +```bash +python apply_net.py show configs/densepose_rcnn_R_50_FPN_s1x.yaml DensePose_ResNet50_FPN_s1x-e2e.pkl image.jpg dp_contour,bbox -v +``` +![Bounding Box + Contour Visualization](images/res_bbox_dp_contour.jpg) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/doc/TOOL_QUERY_DB.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/doc/TOOL_QUERY_DB.md new file mode 100644 index 0000000000000000000000000000000000000000..b0a764b8740597c6af634127b80b53d28913726f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/doc/TOOL_QUERY_DB.md @@ -0,0 +1,105 @@ + +# Query Dataset + +`query_db` is a tool to print or visualize DensePose data from a dataset. +It has two modes: `print` and `show` to output dataset entries to standard +output or to visualize them on images. + +## Print Mode + +The general command form is: +```bash +python query_db.py print [-h] [-v] [--max-entries N] +``` + +There are two mandatory arguments: + - ``, DensePose dataset specification, from which to select + the entries (e.g. `densepose_coco_2014_train`). + - ``, dataset entry selector which can be a single specification, + or a comma-separated list of specifications of the form + `field[:type]=value` for exact match with the value + or `field[:type]=min-max` for a range of values + +One can additionally limit the maximum number of entries to output +by providing `--max-entries` argument. + +Examples: + +1. Output at most 10 first entries from the `densepose_coco_2014_train` dataset: +```bash +python query_db.py print densepose_coco_2014_train \* --max-entries 10 -v +``` + +2. Output all entries with `file_name` equal to `COCO_train2014_000000000036.jpg`: +```bash +python query_db.py print densepose_coco_2014_train file_name=COCO_train2014_000000000036.jpg -v +``` + +3. Output all entries with `image_id` between 36 and 156: +```bash +python query_db.py print densepose_coco_2014_train image_id:int=36-156 -v +``` + +## Visualization Mode + +The general command form is: +```bash +python query_db.py show [-h] [-v] [--max-entries N] [--output ] +``` + +There are three mandatory arguments: + - ``, DensePose dataset specification, from which to select + the entries (e.g. `densepose_coco_2014_train`). + - ``, dataset entry selector which can be a single specification, + or a comma-separated list of specifications of the form + `field[:type]=value` for exact match with the value + or `field[:type]=min-max` for a range of values + - ``, visualizations specifier; currently available visualizations are: + * `bbox` - bounding boxes of annotated persons; + * `dp_i` - annotated points colored according to the containing part; + * `dp_pts` - annotated points in green color; + * `dp_segm` - segmentation masks for annotated persons; + * `dp_u` - annotated points colored according to their U coordinate in part parameterization; + * `dp_v` - annotated points colored according to their V coordinate in part parameterization; + +One can additionally provide one of the two optional arguments: + - `--max_entries` to limit the maximum number of entries to visualize + - `--output` to provide visualization file name template, which defaults + to `output.png`. To distinguish file names for different dataset + entries, the tool appends 1-based entry index to the output file name, + e.g. output.0001.png, output.0002.png, etc. + +The following examples show how to output different visualizations for image with `id = 322` +from `densepose_coco_2014_train` dataset: + +1. Show bounding box and segmentation: +```bash +python query_db.py show densepose_coco_2014_train image_id:int=322 bbox,dp_segm -v +``` +![Bounding Box + Segmentation Visualization](images/vis_bbox_dp_segm.jpg) + +2. Show bounding box and points colored according to the containing part: +```bash +python query_db.py show densepose_coco_2014_train image_id:int=322 bbox,dp_i -v +``` +![Bounding Box + Point Label Visualization](images/vis_bbox_dp_i.jpg) + +3. Show bounding box and annotated points in green color: +```bash +python query_db.py show densepose_coco_2014_train image_id:int=322 bbox,dp_segm -v +``` +![Bounding Box + Point Visualization](images/vis_bbox_dp_pts.jpg) + +4. Show bounding box and annotated points colored according to their U coordinate in part parameterization: +```bash +python query_db.py show densepose_coco_2014_train image_id:int=322 bbox,dp_u -v +``` +![Bounding Box + Point U Visualization](images/vis_bbox_dp_u.jpg) + +5. Show bounding box and annotated points colored according to their V coordinate in part parameterization: +```bash +python query_db.py show densepose_coco_2014_train image_id:int=322 bbox,dp_v -v +``` +![Bounding Box + Point V Visualization](images/vis_bbox_dp_v.jpg) + + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/query_db.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/query_db.py new file mode 100755 index 0000000000000000000000000000000000000000..6d3ea2ffdff7559a8cd78df95a5fb7f308f33e1e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/query_db.py @@ -0,0 +1,250 @@ +#!/usr/bin/env python3 +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import argparse +import logging +import os +import sys +from timeit import default_timer as timer +from typing import Any, ClassVar, Dict, List +import torch +from fvcore.common.file_io import PathManager + +from detectron2.data.catalog import DatasetCatalog +from detectron2.utils.logger import setup_logger + +from densepose.data.structures import DensePoseDataRelative +from densepose.utils.dbhelper import EntrySelector +from densepose.utils.logger import verbosity_to_level +from densepose.vis.base import CompoundVisualizer +from densepose.vis.bounding_box import BoundingBoxVisualizer +from densepose.vis.densepose import ( + DensePoseDataCoarseSegmentationVisualizer, + DensePoseDataPointsIVisualizer, + DensePoseDataPointsUVisualizer, + DensePoseDataPointsVisualizer, + DensePoseDataPointsVVisualizer, +) + +DOC = """Query DB - a tool to print / visualize data from a database +""" + +LOGGER_NAME = "query_db" + +logger = logging.getLogger(LOGGER_NAME) + +_ACTION_REGISTRY: Dict[str, "Action"] = {} + + +class Action(object): + @classmethod + def add_arguments(cls: type, parser: argparse.ArgumentParser): + parser.add_argument( + "-v", + "--verbosity", + action="count", + help="Verbose mode. Multiple -v options increase the verbosity.", + ) + + +def register_action(cls: type): + """ + Decorator for action classes to automate action registration + """ + global _ACTION_REGISTRY + _ACTION_REGISTRY[cls.COMMAND] = cls + return cls + + +class EntrywiseAction(Action): + @classmethod + def add_arguments(cls: type, parser: argparse.ArgumentParser): + super(EntrywiseAction, cls).add_arguments(parser) + parser.add_argument( + "dataset", metavar="", help="Dataset name (e.g. densepose_coco_2014_train)" + ) + parser.add_argument( + "selector", + metavar="", + help="Dataset entry selector in the form field1[:type]=value1[," + "field2[:type]=value_min-value_max...] which selects all " + "entries from the dataset that satisfy the constraints", + ) + parser.add_argument( + "--max-entries", metavar="N", help="Maximum number of entries to process", type=int + ) + + @classmethod + def execute(cls: type, args: argparse.Namespace): + dataset = setup_dataset(args.dataset) + entry_selector = EntrySelector.from_string(args.selector) + context = cls.create_context(args) + if args.max_entries is not None: + for _, entry in zip(range(args.max_entries), dataset): + if entry_selector(entry): + cls.execute_on_entry(entry, context) + else: + for entry in dataset: + if entry_selector(entry): + cls.execute_on_entry(entry, context) + + @classmethod + def create_context(cls: type, args: argparse.Namespace) -> Dict[str, Any]: + context = {} + return context + + +@register_action +class PrintAction(EntrywiseAction): + """ + Print action that outputs selected entries to stdout + """ + + COMMAND: ClassVar[str] = "print" + + @classmethod + def add_parser(cls: type, subparsers: argparse._SubParsersAction): + parser = subparsers.add_parser(cls.COMMAND, help="Output selected entries to stdout. ") + cls.add_arguments(parser) + parser.set_defaults(func=cls.execute) + + @classmethod + def add_arguments(cls: type, parser: argparse.ArgumentParser): + super(PrintAction, cls).add_arguments(parser) + + @classmethod + def execute_on_entry(cls: type, entry: Dict[str, Any], context: Dict[str, Any]): + import pprint + + printer = pprint.PrettyPrinter(indent=2, width=200, compact=True) + printer.pprint(entry) + + +@register_action +class ShowAction(EntrywiseAction): + """ + Show action that visualizes selected entries on an image + """ + + COMMAND: ClassVar[str] = "show" + VISUALIZERS: ClassVar[Dict[str, object]] = { + "dp_segm": DensePoseDataCoarseSegmentationVisualizer(), + "dp_i": DensePoseDataPointsIVisualizer(), + "dp_u": DensePoseDataPointsUVisualizer(), + "dp_v": DensePoseDataPointsVVisualizer(), + "dp_pts": DensePoseDataPointsVisualizer(), + "bbox": BoundingBoxVisualizer(), + } + + @classmethod + def add_parser(cls: type, subparsers: argparse._SubParsersAction): + parser = subparsers.add_parser(cls.COMMAND, help="Visualize selected entries") + cls.add_arguments(parser) + parser.set_defaults(func=cls.execute) + + @classmethod + def add_arguments(cls: type, parser: argparse.ArgumentParser): + super(ShowAction, cls).add_arguments(parser) + parser.add_argument( + "visualizations", + metavar="", + help="Comma separated list of visualizations, possible values: " + "[{}]".format(",".join(sorted(cls.VISUALIZERS.keys()))), + ) + parser.add_argument( + "--output", + metavar="", + default="output.png", + help="File name to save output to", + ) + + @classmethod + def execute_on_entry(cls: type, entry: Dict[str, Any], context: Dict[str, Any]): + import cv2 + import numpy as np + + image_fpath = PathManager.get_local_path(entry["file_name"]) + image = cv2.imread(image_fpath, cv2.IMREAD_GRAYSCALE) + image = np.tile(image[:, :, np.newaxis], [1, 1, 3]) + datas = cls._extract_data_for_visualizers_from_entry(context["vis_specs"], entry) + visualizer = context["visualizer"] + image_vis = visualizer.visualize(image, datas) + entry_idx = context["entry_idx"] + 1 + out_fname = cls._get_out_fname(entry_idx, context["out_fname"]) + cv2.imwrite(out_fname, image_vis) + logger.info(f"Output saved to {out_fname}") + context["entry_idx"] += 1 + + @classmethod + def _get_out_fname(cls: type, entry_idx: int, fname_base: str): + base, ext = os.path.splitext(fname_base) + return base + ".{0:04d}".format(entry_idx) + ext + + @classmethod + def create_context(cls: type, args: argparse.Namespace) -> Dict[str, Any]: + vis_specs = args.visualizations.split(",") + visualizers = [] + for vis_spec in vis_specs: + vis = cls.VISUALIZERS[vis_spec] + visualizers.append(vis) + context = { + "vis_specs": vis_specs, + "visualizer": CompoundVisualizer(visualizers), + "out_fname": args.output, + "entry_idx": 0, + } + return context + + @classmethod + def _extract_data_for_visualizers_from_entry( + cls: type, vis_specs: List[str], entry: Dict[str, Any] + ): + dp_list = [] + bbox_list = [] + for annotation in entry["annotations"]: + is_valid, _ = DensePoseDataRelative.validate_annotation(annotation) + if not is_valid: + continue + bbox = torch.as_tensor(annotation["bbox"]) + bbox_list.append(bbox) + dp_data = DensePoseDataRelative(annotation) + dp_list.append(dp_data) + datas = [] + for vis_spec in vis_specs: + datas.append(bbox_list if "bbox" == vis_spec else (bbox_list, dp_list)) + return datas + + +def setup_dataset(dataset_name): + logger.info("Loading dataset {}".format(dataset_name)) + start = timer() + dataset = DatasetCatalog.get(dataset_name) + stop = timer() + logger.info("Loaded dataset {} in {:.3f}s".format(dataset_name, stop - start)) + return dataset + + +def create_argument_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + description=DOC, + formatter_class=lambda prog: argparse.HelpFormatter(prog, max_help_position=120), + ) + parser.set_defaults(func=lambda _: parser.print_help(sys.stdout)) + subparsers = parser.add_subparsers(title="Actions") + for _, action in _ACTION_REGISTRY.items(): + action.add_parser(subparsers) + return parser + + +def main(): + parser = create_argument_parser() + args = parser.parse_args() + verbosity = args.verbosity if hasattr(args, "verbosity") else None + global logger + logger = setup_logger(name=LOGGER_NAME) + logger.setLevel(verbosity_to_level(verbosity)) + args.func(args) + + +if __name__ == "__main__": + main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/tests/common.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/tests/common.py new file mode 100644 index 0000000000000000000000000000000000000000..13bf0dd3ca113e0756d3023e36272675c6b972f9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/tests/common.py @@ -0,0 +1,110 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import os +import torch + +from detectron2.config import get_cfg +from detectron2.engine import default_setup +from detectron2.modeling import build_model + +from densepose import add_dataset_category_config, add_densepose_config + +_BASE_CONFIG_DIR = "configs" +_EVOLUTION_CONFIG_SUB_DIR = "evolution" +_QUICK_SCHEDULES_CONFIG_SUB_DIR = "quick_schedules" +_BASE_CONFIG_FILE_PREFIX = "Base-" +_CONFIG_FILE_EXT = ".yaml" + + +def _get_base_config_dir(): + """ + Return the base directory for configurations + """ + return os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", _BASE_CONFIG_DIR) + + +def _get_evolution_config_dir(): + """ + Return the base directory for evolution configurations + """ + return os.path.join(_get_base_config_dir(), _EVOLUTION_CONFIG_SUB_DIR) + + +def _get_quick_schedules_config_dir(): + """ + Return the base directory for quick schedules configurations + """ + return os.path.join(_get_base_config_dir(), _QUICK_SCHEDULES_CONFIG_SUB_DIR) + + +def _collect_config_files(config_dir): + """ + Collect all configuration files (i.e. densepose_*.yaml) directly in the specified directory + """ + start = _get_base_config_dir() + results = [] + for entry in os.listdir(config_dir): + path = os.path.join(config_dir, entry) + if not os.path.isfile(path): + continue + _, ext = os.path.splitext(entry) + if ext != _CONFIG_FILE_EXT: + continue + if entry.startswith(_BASE_CONFIG_FILE_PREFIX): + continue + config_file = os.path.relpath(path, start) + results.append(config_file) + return results + + +def get_config_files(): + """ + Get all the configuration files (relative to the base configuration directory) + """ + return _collect_config_files(_get_base_config_dir()) + + +def get_evolution_config_files(): + """ + Get all the evolution configuration files (relative to the base configuration directory) + """ + return _collect_config_files(_get_evolution_config_dir()) + + +def get_quick_schedules_config_files(): + """ + Get all the quick schedules configuration files (relative to the base configuration directory) + """ + return _collect_config_files(_get_quick_schedules_config_dir()) + + +def _get_model_config(config_file): + """ + Load and return the configuration from the specified file (relative to the base configuration + directory) + """ + cfg = get_cfg() + add_dataset_category_config(cfg) + add_densepose_config(cfg) + path = os.path.join(_get_base_config_dir(), config_file) + cfg.merge_from_file(path) + if not torch.cuda.is_available(): + cfg.MODEL_DEVICE = "cpu" + return cfg + + +def get_model(config_file): + """ + Get the model from the specified file (relative to the base configuration directory) + """ + cfg = _get_model_config(config_file) + return build_model(cfg) + + +def setup(config_file): + """ + Setup the configuration from the specified file (relative to the base configuration directory) + """ + cfg = _get_model_config(config_file) + cfg.freeze() + default_setup(cfg, {}) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/tests/test_model_e2e.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/tests/test_model_e2e.py new file mode 100644 index 0000000000000000000000000000000000000000..eed131080547d84185c1d33913014a2c977b119f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/tests/test_model_e2e.py @@ -0,0 +1,43 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import unittest +import torch + +from detectron2.structures import BitMasks, Boxes, Instances + +from .common import get_model + + +# TODO(plabatut): Modularize detectron2 tests and re-use +def make_model_inputs(image, instances=None): + if instances is None: + return {"image": image} + + return {"image": image, "instances": instances} + + +def make_empty_instances(h, w): + instances = Instances((h, w)) + instances.gt_boxes = Boxes(torch.rand(0, 4)) + instances.gt_classes = torch.tensor([]).to(dtype=torch.int64) + instances.gt_masks = BitMasks(torch.rand(0, h, w)) + return instances + + +class ModelE2ETest(unittest.TestCase): + CONFIG_PATH = "" + + def setUp(self): + self.model = get_model(self.CONFIG_PATH) + + def _test_eval(self, sizes): + inputs = [make_model_inputs(torch.rand(3, size[0], size[1])) for size in sizes] + self.model.eval() + self.model(inputs) + + +class DensePoseRCNNE2ETest(ModelE2ETest): + CONFIG_PATH = "densepose_rcnn_R_101_FPN_s1x.yaml" + + def test_empty_data(self): + self._test_eval([(200, 250), (200, 249)]) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/tests/test_setup.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/tests/test_setup.py new file mode 100644 index 0000000000000000000000000000000000000000..96827f14b3a71d571c2109791233b5bcf7ef35f8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/tests/test_setup.py @@ -0,0 +1,30 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import unittest + +from .common import ( + get_config_files, + get_evolution_config_files, + get_quick_schedules_config_files, + setup, +) + + +class TestSetup(unittest.TestCase): + def _test_setup(self, config_file): + setup(config_file) + + def test_setup_configs(self): + config_files = get_config_files() + for config_file in config_files: + self._test_setup(config_file) + + def test_setup_evolution_configs(self): + config_files = get_evolution_config_files() + for config_file in config_files: + self._test_setup(config_file) + + def test_setup_quick_schedules_configs(self): + config_files = get_quick_schedules_config_files() + for config_file in config_files: + self._test_setup(config_file) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/tests/test_structures.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/tests/test_structures.py new file mode 100644 index 0000000000000000000000000000000000000000..ad97c23a43a9a72db566ec272b10f5bbda874695 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/tests/test_structures.py @@ -0,0 +1,25 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import unittest + +from densepose.data.structures import normalized_coords_transform + + +class TestStructures(unittest.TestCase): + def test_normalized_coords_transform(self): + bbox = (32, 24, 288, 216) + x0, y0, w, h = bbox + xmin, ymin, xmax, ymax = x0, y0, x0 + w, y0 + h + f = normalized_coords_transform(*bbox) + # Top-left + expected_p, actual_p = (-1, -1), f((xmin, ymin)) + self.assertEqual(expected_p, actual_p) + # Top-right + expected_p, actual_p = (1, -1), f((xmax, ymin)) + self.assertEqual(expected_p, actual_p) + # Bottom-left + expected_p, actual_p = (-1, 1), f((xmin, ymax)) + self.assertEqual(expected_p, actual_p) + # Bottom-right + expected_p, actual_p = (1, 1), f((xmax, ymax)) + self.assertEqual(expected_p, actual_p) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/train_net.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/train_net.py new file mode 100755 index 0000000000000000000000000000000000000000..9d2e7bd8b92964f752620d92e7acb662c0b86fa7 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/DensePose/train_net.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python3 +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +""" +DensePose Training Script. + +This script is similar to the training script in detectron2/tools. + +It is an example of how a user might use detectron2 for a new project. +""" + +import logging +import os +from collections import OrderedDict +from fvcore.common.file_io import PathManager + +import detectron2.utils.comm as comm +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import CfgNode, get_cfg +from detectron2.engine import DefaultTrainer, default_argument_parser, default_setup, hooks, launch +from detectron2.evaluation import COCOEvaluator, DatasetEvaluators, verify_results +from detectron2.modeling import DatasetMapperTTA +from detectron2.utils.logger import setup_logger + +from densepose import ( + DensePoseCOCOEvaluator, + DensePoseGeneralizedRCNNWithTTA, + add_dataset_category_config, + add_densepose_config, + load_from_cfg, +) +from densepose.data import DatasetMapper, build_detection_test_loader, build_detection_train_loader + + +class Trainer(DefaultTrainer): + @classmethod + def build_evaluator(cls, cfg: CfgNode, dataset_name, output_folder=None): + if output_folder is None: + output_folder = os.path.join(cfg.OUTPUT_DIR, "inference") + evaluators = [COCOEvaluator(dataset_name, cfg, True, output_folder)] + if cfg.MODEL.DENSEPOSE_ON: + evaluators.append(DensePoseCOCOEvaluator(dataset_name, True, output_folder)) + return DatasetEvaluators(evaluators) + + @classmethod + def build_test_loader(cls, cfg: CfgNode, dataset_name): + return build_detection_test_loader(cfg, dataset_name, mapper=DatasetMapper(cfg, False)) + + @classmethod + def build_train_loader(cls, cfg: CfgNode): + return build_detection_train_loader(cfg, mapper=DatasetMapper(cfg, True)) + + @classmethod + def test_with_TTA(cls, cfg: CfgNode, model): + logger = logging.getLogger("detectron2.trainer") + # In the end of training, run an evaluation with TTA + # Only support some R-CNN models. + logger.info("Running inference with test-time augmentation ...") + transform_data = load_from_cfg(cfg) + model = DensePoseGeneralizedRCNNWithTTA(cfg, model, transform_data, DatasetMapperTTA(cfg)) + evaluators = [ + cls.build_evaluator( + cfg, name, output_folder=os.path.join(cfg.OUTPUT_DIR, "inference_TTA") + ) + for name in cfg.DATASETS.TEST + ] + res = cls.test(cfg, model, evaluators) + res = OrderedDict({k + "_TTA": v for k, v in res.items()}) + return res + + +def setup(args): + cfg = get_cfg() + add_dataset_category_config(cfg) + add_densepose_config(cfg) + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup(cfg, args) + # Setup logger for "densepose" module + setup_logger(output=cfg.OUTPUT_DIR, distributed_rank=comm.get_rank(), name="densepose") + return cfg + + +def main(args): + cfg = setup(args) + # disable strict kwargs checking: allow one to specify path handle + # hints through kwargs, like timeout in DP evaluation + PathManager.set_strict_kwargs_checking(False) + + if args.eval_only: + model = Trainer.build_model(cfg) + DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load( + cfg.MODEL.WEIGHTS, resume=args.resume + ) + res = Trainer.test(cfg, model) + if cfg.TEST.AUG.ENABLED: + res.update(Trainer.test_with_TTA(cfg, model)) + if comm.is_main_process(): + verify_results(cfg, res) + return res + + trainer = Trainer(cfg) + trainer.resume_or_load(resume=args.resume) + if cfg.TEST.AUG.ENABLED: + trainer.register_hooks( + [hooks.EvalHook(0, lambda: trainer.test_with_TTA(cfg, trainer.model))] + ) + return trainer.train() + + +if __name__ == "__main__": + args = default_argument_parser().parse_args() + print("Command Line Args:", args) + launch( + main, + args.num_gpus, + num_machines=args.num_machines, + machine_rank=args.machine_rank, + dist_url=args.dist_url, + args=(args,), + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/README.md new file mode 100644 index 0000000000000000000000000000000000000000..443736fff35cc49e02807a7b941da19c0bdfa666 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/README.md @@ -0,0 +1,135 @@ +# PointRend: Image Segmentation as Rendering + +Alexander Kirillov, Yuxin Wu, Kaiming He, Ross Girshick + +[[`arXiv`](https://arxiv.org/abs/1912.08193)] [[`BibTeX`](#CitingPointRend)] + +
+ +

+ +In this repository, we release code for PointRend in Detectron2. PointRend can be flexibly applied to both instance and semantic segmentation tasks by building on top of existing state-of-the-art models. + +## Installation +Install Detectron 2 following [INSTALL.md](https://github.com/facebookresearch/detectron2/blob/master/INSTALL.md). You are ready to go! + +## Quick start and visualization + +This [Colab Notebook](https://colab.research.google.com/drive/1isGPL5h5_cKoPPhVL9XhMokRtHDvmMVL) tutorial contains examples of PointRend usage and visualizations of its point sampling stages. + +## Training + +To train a model with 8 GPUs run: +```bash +cd /path/to/detectron2/projects/PointRend +python train_net.py --config-file configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_1x_coco.yaml --num-gpus 8 +``` + +## Evaluation + +Model evaluation can be done similarly: +```bash +cd /path/to/detectron2/projects/PointRend +python train_net.py --config-file configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_1x_coco.yaml --eval-only MODEL.WEIGHTS /path/to/model_checkpoint +``` + +# Pretrained Models + +## Instance Segmentation +#### COCO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Mask
head
Backbonelr
sched
Output
resolution
mask
AP
mask
AP*
model iddownload
PointRendR50-FPN224×22436.239.7164254221model | metrics
PointRendR50-FPN224×22438.341.6164955410model | metrics
+ +AP* is COCO mask AP evaluated against the higher-quality LVIS annotations; see the paper for details. Run `python detectron2/datasets/prepare_cocofied_lvis.py` to prepare GT files for AP* evaluation. Since LVIS annotations are not exhaustive `lvis-api` and not `cocoapi` should be used to evaluate AP*. + +#### Cityscapes +Cityscapes model is trained with ImageNet pretraining. + + + + + + + + + + + + + + + + + + + + +
Mask
head
Backbonelr
sched
Output
resolution
mask
AP
model iddownload
PointRendR50-FPN224×22435.9164255101model | metrics
+ + +## Semantic Segmentation + +#### Cityscapes +Cityscapes model is trained with ImageNet pretraining. + + + + + + + + + + + + + + + + + + +
MethodBackboneOutput
resolution
mIoUmodel iddownload
SemanticFPN + PointRendR101-FPN1024×204878.6186480235model | metrics
+ +## Citing PointRend + +If you use PointRend, please use the following BibTeX entry. + +```BibTeX +@InProceedings{kirillov2019pointrend, + title={{PointRend}: Image Segmentation as Rendering}, + author={Alexander Kirillov and Yuxin Wu and Kaiming He and Ross Girshick}, + journal={ArXiv:1912.08193}, + year={2019} +} +``` diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/Base-PointRend-RCNN-FPN.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/Base-PointRend-RCNN-FPN.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d3917188afe04c7626e539f7c0bc28df4118a290 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/Base-PointRend-RCNN-FPN.yaml @@ -0,0 +1,21 @@ +_BASE_: "../../../../configs/Base-RCNN-FPN.yaml" +MODEL: + ROI_HEADS: + NAME: "PointRendROIHeads" + IN_FEATURES: ["p2", "p3", "p4", "p5"] + ROI_BOX_HEAD: + TRAIN_ON_PRED_BOXES: True + ROI_MASK_HEAD: + NAME: "CoarseMaskHead" + FC_DIM: 1024 + NUM_FC: 2 + OUTPUT_SIDE_RESOLUTION: 7 + IN_FEATURES: ["p2"] + POINT_HEAD_ON: True + POINT_HEAD: + FC_DIM: 256 + NUM_FC: 3 + IN_FEATURES: ["p2"] +INPUT: + # PointRend for instance segmenation does not work with "polygon" mask_format. + MASK_FORMAT: "bitmask" diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_1x_cityscapes.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_1x_cityscapes.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c23dbe1c8463d16f6be110ef49acd8c6142c3aa8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_1x_cityscapes.yaml @@ -0,0 +1,23 @@ +_BASE_: Base-PointRend-RCNN-FPN.yaml +MODEL: + WEIGHTS: detectron2://ImageNetPretrained/MSRA/R-50.pkl + MASK_ON: true + RESNETS: + DEPTH: 50 + ROI_HEADS: + NUM_CLASSES: 8 + POINT_HEAD: + NUM_CLASSES: 8 +DATASETS: + TEST: ("cityscapes_fine_instance_seg_val",) + TRAIN: ("cityscapes_fine_instance_seg_train",) +SOLVER: + BASE_LR: 0.01 + IMS_PER_BATCH: 8 + MAX_ITER: 24000 + STEPS: (18000,) +INPUT: + MAX_SIZE_TEST: 2048 + MAX_SIZE_TRAIN: 2048 + MIN_SIZE_TEST: 1024 + MIN_SIZE_TRAIN: (800, 832, 864, 896, 928, 960, 992, 1024) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_1x_coco.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_1x_coco.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e9fc573bf544de8610a65a7cda2a0df57aec0abf --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_1x_coco.yaml @@ -0,0 +1,9 @@ +_BASE_: Base-PointRend-RCNN-FPN.yaml +MODEL: + WEIGHTS: detectron2://ImageNetPretrained/MSRA/R-50.pkl + MASK_ON: true + RESNETS: + DEPTH: 50 +# To add COCO AP evaluation against the higher-quality LVIS annotations. +# DATASETS: +# TEST: ("coco_2017_val", "lvis_v0.5_val_cocofied") diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_3x_coco.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_3x_coco.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2f013f32aeb4122f50c5c4030e9738d9d474ba34 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_3x_coco.yaml @@ -0,0 +1,13 @@ +_BASE_: Base-PointRend-RCNN-FPN.yaml +MODEL: + WEIGHTS: detectron2://ImageNetPretrained/MSRA/R-50.pkl + MASK_ON: true + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 +# To add COCO AP evaluation against the higher-quality LVIS annotations. +# DATASETS: +# TEST: ("coco_2017_val", "lvis_v0.5_val_cocofied") + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_3x_parsing.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_3x_parsing.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a4af81dab7b47371454a273ecf962ea47ac21d49 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_3x_parsing.yaml @@ -0,0 +1,20 @@ +_BASE_: Base-PointRend-RCNN-FPN.yaml +MODEL: + WEIGHTS: detectron2://ImageNetPretrained/MSRA/R-50.pkl + MASK_ON: true + RESNETS: + DEPTH: 50 + ROI_HEADS: + NUM_CLASSES: 1 + POINT_HEAD: + NUM_CLASSES: 1 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 + IMS_PER_BATCH: 1 +# To add COCO AP evaluation against the higher-quality LVIS annotations. +# DATASETS: +# TEST: ("coco_2017_val", "lvis_v0.5_val_cocofied") +DATASETS: + TRAIN: ("CIHP_train",) + TEST: ("CIHP_val",) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_X_101_32x8d_FPN_3x_parsing.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_X_101_32x8d_FPN_3x_parsing.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8e52d82e39400f08f86a6e1a92e3e1c471403624 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/InstanceSegmentation/pointrend_rcnn_X_101_32x8d_FPN_3x_parsing.yaml @@ -0,0 +1,28 @@ +_BASE_: Base-PointRend-RCNN-FPN.yaml +MODEL: + WEIGHTS: "./X-101-32x8d.pkl" + PIXEL_STD: [57.375, 57.120, 58.395] + MASK_ON: true + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 101 + ROI_HEADS: + NUM_CLASSES: 1 + POINT_HEAD: + NUM_CLASSES: 1 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 + IMS_PER_BATCH: 1 +# To add COCO AP evaluation against the higher-quality LVIS annotations. +# DATASETS: +# TEST: ("coco_2017_val", "lvis_v0.5_val_cocofied") +INPUT: + MIN_SIZE_TRAIN: (640, 864) + MIN_SIZE_TRAIN_SAMPLING: "range" + MAX_SIZE_TRAIN: 1440 +DATASETS: + TRAIN: ("CIHP_train",) + TEST: ("CIHP_val",) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/SemanticSegmentation/Base-PointRend-Semantic-FPN.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/SemanticSegmentation/Base-PointRend-Semantic-FPN.yaml new file mode 100644 index 0000000000000000000000000000000000000000..00562a92363dc47c6ebe9ef8bebb89cd5e5b8502 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/SemanticSegmentation/Base-PointRend-Semantic-FPN.yaml @@ -0,0 +1,19 @@ +_BASE_: "../../../../configs/Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "SemanticSegmentor" + BACKBONE: + FREEZE_AT: 0 + SEM_SEG_HEAD: + NAME: "PointRendSemSegHead" + POINT_HEAD: + NUM_CLASSES: 54 + FC_DIM: 256 + NUM_FC: 3 + IN_FEATURES: ["p2"] + TRAIN_NUM_POINTS: 1024 + SUBDIVISION_STEPS: 2 + SUBDIVISION_NUM_POINTS: 8192 + COARSE_SEM_SEG_HEAD_NAME: "SemSegFPNHead" +DATASETS: + TRAIN: ("coco_2017_train_panoptic_stuffonly",) + TEST: ("coco_2017_val_panoptic_stuffonly",) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/SemanticSegmentation/pointrend_semantic_R_101_FPN_1x_cityscapes.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/SemanticSegmentation/pointrend_semantic_R_101_FPN_1x_cityscapes.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4965b068c11bc568317ea3cc8c83d8c44234b936 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/SemanticSegmentation/pointrend_semantic_R_101_FPN_1x_cityscapes.yaml @@ -0,0 +1,33 @@ +_BASE_: Base-PointRend-Semantic-FPN.yaml +MODEL: + WEIGHTS: detectron2://ImageNetPretrained/MSRA/R-101.pkl + RESNETS: + DEPTH: 101 + SEM_SEG_HEAD: + NUM_CLASSES: 19 + POINT_HEAD: + NUM_CLASSES: 19 + TRAIN_NUM_POINTS: 2048 + SUBDIVISION_NUM_POINTS: 8192 +DATASETS: + TRAIN: ("cityscapes_fine_sem_seg_train",) + TEST: ("cityscapes_fine_sem_seg_val",) +SOLVER: + BASE_LR: 0.01 + STEPS: (40000, 55000) + MAX_ITER: 65000 + IMS_PER_BATCH: 32 +INPUT: + MIN_SIZE_TRAIN: (512, 768, 1024, 1280, 1536, 1792, 2048) + MIN_SIZE_TRAIN_SAMPLING: "choice" + MIN_SIZE_TEST: 1024 + MAX_SIZE_TRAIN: 4096 + MAX_SIZE_TEST: 2048 + CROP: + ENABLED: True + TYPE: "absolute" + SIZE: (512, 1024) + SINGLE_CATEGORY_MAX_AREA: 0.75 + COLOR_AUG_SSD: True +DATALOADER: + NUM_WORKERS: 16 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/SemanticSegmentation/pointrend_semantic_R_50_FPN_1x_coco.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/SemanticSegmentation/pointrend_semantic_R_50_FPN_1x_coco.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7948bd808ea9888b20d1e118abf6bb630c485f39 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/configs/SemanticSegmentation/pointrend_semantic_R_50_FPN_1x_coco.yaml @@ -0,0 +1,5 @@ +_BASE_: Base-PointRend-Semantic-FPN.yaml +MODEL: + WEIGHTS: detectron2://ImageNetPretrained/MSRA/R-50.pkl + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/finetune_net.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/finetune_net.py new file mode 100755 index 0000000000000000000000000000000000000000..b99baf939b3788a2ee9e339beaa503cfa4d6a14f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/finetune_net.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python3 +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +""" +PointRend Training Script. + +This script is a simplified version of the training script in detectron2/tools. +""" + +import os +import torch + +import detectron2.utils.comm as comm +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import get_cfg +from detectron2.data import MetadataCatalog, build_detection_train_loader +from detectron2.engine import DefaultTrainer, default_argument_parser, default_setup, launch +from detectron2.evaluation import ( + CityscapesInstanceEvaluator, + CityscapesSemSegEvaluator, + COCOEvaluator, + DatasetEvaluators, + LVISEvaluator, + SemSegEvaluator, + verify_results, +) + +from point_rend import SemSegDatasetMapper, add_pointrend_config + +os.environ['CUDA_VISIBLE_DEVICES'] = '4' +# Register Custom Dataset +from detectron2.data.datasets import register_coco_instances +register_coco_instances("CIHP_train", {}, "/data03/v_xuyunqiu/multi_parsing/data/msrcnn_finetune_annotations/CIHP_train.json", "/data03/v_xuyunqiu/data/instance-level_human_parsing/Training/Images") +register_coco_instances("CIHP_val", {}, "/data03/v_xuyunqiu/multi_parsing/data/msrcnn_finetune_annotations/CIHP_val.json", "/data03/v_xuyunqiu/data/instance-level_human_parsing/Validation/Images") + + +class Trainer(DefaultTrainer): + """ + We use the "DefaultTrainer" which contains a number pre-defined logic for + standard training workflow. They may not work for you, especially if you + are working on a new research project. In that case you can use the cleaner + "SimpleTrainer", or write your own training loop. + """ + + @classmethod + def build_evaluator(cls, cfg, dataset_name, output_folder=None): + """ + Create evaluator(s) for a given dataset. + This uses the special metadata "evaluator_type" associated with each builtin dataset. + For your own dataset, you can simply create an evaluator manually in your + script and do not have to worry about the hacky if-else logic here. + """ + if output_folder is None: + output_folder = os.path.join(cfg.OUTPUT_DIR, "inference") + evaluator_list = [] + evaluator_type = MetadataCatalog.get(dataset_name).evaluator_type + if evaluator_type == "lvis": + return LVISEvaluator(dataset_name, cfg, True, output_folder) + if evaluator_type == "coco": + return COCOEvaluator(dataset_name, cfg, True, output_folder) + if evaluator_type == "sem_seg": + return SemSegEvaluator( + dataset_name, + distributed=True, + num_classes=cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES, + ignore_label=cfg.MODEL.SEM_SEG_HEAD.IGNORE_VALUE, + output_dir=output_folder, + ) + if evaluator_type == "cityscapes_instance": + assert ( + torch.cuda.device_count() >= comm.get_rank() + ), "CityscapesEvaluator currently do not work with multiple machines." + return CityscapesInstanceEvaluator(dataset_name) + if evaluator_type == "cityscapes_sem_seg": + assert ( + torch.cuda.device_count() >= comm.get_rank() + ), "CityscapesEvaluator currently do not work with multiple machines." + return CityscapesSemSegEvaluator(dataset_name) + if len(evaluator_list) == 0: + raise NotImplementedError( + "no Evaluator for the dataset {} with the type {}".format( + dataset_name, evaluator_type + ) + ) + if len(evaluator_list) == 1: + return evaluator_list[0] + return DatasetEvaluators(evaluator_list) + + @classmethod + def build_train_loader(cls, cfg): + if "SemanticSegmentor" in cfg.MODEL.META_ARCHITECTURE: + mapper = SemSegDatasetMapper(cfg, True) + else: + mapper = None + return build_detection_train_loader(cfg, mapper=mapper) + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg() + add_pointrend_config(cfg) + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup(cfg, args) + return cfg + + +def main(args): + cfg = setup(args) + + if args.eval_only: + model = Trainer.build_model(cfg) + DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load( + cfg.MODEL.WEIGHTS, resume=args.resume + ) + res = Trainer.test(cfg, model) + if comm.is_main_process(): + verify_results(cfg, res) + return res + + trainer = Trainer(cfg) + trainer.resume_or_load(resume=args.resume) + return trainer.train() + + +if __name__ == "__main__": + args = default_argument_parser().parse_args() + print("Command Line Args:", args) + launch( + main, + args.num_gpus, + num_machines=args.num_machines, + machine_rank=args.machine_rank, + dist_url=args.dist_url, + args=(args,), + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4020fe0a287f87cb3bd2487b5b40b7e1e2647aa8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .config import add_pointrend_config +from .coarse_mask_head import CoarseMaskHead +from .roi_heads import PointRendROIHeads +from .dataset_mapper import SemSegDatasetMapper +from .semantic_seg import PointRendSemSegHead diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/coarse_mask_head.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/coarse_mask_head.py new file mode 100644 index 0000000000000000000000000000000000000000..3f1cffb4c985dc3121a863eb7b378965b718a19d --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/coarse_mask_head.py @@ -0,0 +1,92 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import fvcore.nn.weight_init as weight_init +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.layers import Conv2d, ShapeSpec +from detectron2.modeling import ROI_MASK_HEAD_REGISTRY + + +@ROI_MASK_HEAD_REGISTRY.register() +class CoarseMaskHead(nn.Module): + """ + A mask head with fully connected layers. Given pooled features it first reduces channels and + spatial dimensions with conv layers and then uses FC layers to predict coarse masks analogously + to the standard box head. + """ + + def __init__(self, cfg, input_shape: ShapeSpec): + """ + The following attributes are parsed from config: + conv_dim: the output dimension of the conv layers + fc_dim: the feature dimenstion of the FC layers + num_fc: the number of FC layers + output_side_resolution: side resolution of the output square mask prediction + """ + super(CoarseMaskHead, self).__init__() + + # fmt: off + self.num_classes = cfg.MODEL.ROI_HEADS.NUM_CLASSES + conv_dim = cfg.MODEL.ROI_MASK_HEAD.CONV_DIM + self.fc_dim = cfg.MODEL.ROI_MASK_HEAD.FC_DIM + num_fc = cfg.MODEL.ROI_MASK_HEAD.NUM_FC + self.output_side_resolution = cfg.MODEL.ROI_MASK_HEAD.OUTPUT_SIDE_RESOLUTION + self.input_channels = input_shape.channels + self.input_h = input_shape.height + self.input_w = input_shape.width + # fmt: on + + self.conv_layers = [] + if self.input_channels > conv_dim: + self.reduce_channel_dim_conv = Conv2d( + self.input_channels, + conv_dim, + kernel_size=1, + stride=1, + padding=0, + bias=True, + activation=F.relu, + ) + self.conv_layers.append(self.reduce_channel_dim_conv) + + self.reduce_spatial_dim_conv = Conv2d( + conv_dim, conv_dim, kernel_size=2, stride=2, padding=0, bias=True, activation=F.relu + ) + self.conv_layers.append(self.reduce_spatial_dim_conv) + + input_dim = conv_dim * self.input_h * self.input_w + input_dim //= 4 + + self.fcs = [] + for k in range(num_fc): + fc = nn.Linear(input_dim, self.fc_dim) + self.add_module("coarse_mask_fc{}".format(k + 1), fc) + self.fcs.append(fc) + input_dim = self.fc_dim + + output_dim = self.num_classes * self.output_side_resolution * self.output_side_resolution + + self.prediction = nn.Linear(self.fc_dim, output_dim) + # use normal distribution initialization for mask prediction layer + nn.init.normal_(self.prediction.weight, std=0.001) + nn.init.constant_(self.prediction.bias, 0) + + for layer in self.conv_layers: + weight_init.c2_msra_fill(layer) + for layer in self.fcs: + weight_init.c2_xavier_fill(layer) + + def forward(self, x): + # unlike BaseMaskRCNNHead, this head only outputs intermediate + # features, because the features will be used later by PointHead. + N = x.shape[0] + x = x.view(N, self.input_channels, self.input_h, self.input_w) + for layer in self.conv_layers: + x = layer(x) + x = torch.flatten(x, start_dim=1) + for layer in self.fcs: + x = F.relu(layer(x)) + return self.prediction(x).view( + N, self.num_classes, self.output_side_resolution, self.output_side_resolution + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/color_augmentation.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/color_augmentation.py new file mode 100644 index 0000000000000000000000000000000000000000..27344c470adac143186e61c8a5b0f39900937634 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/color_augmentation.py @@ -0,0 +1,98 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import numpy as np +import random +import cv2 +from fvcore.transforms.transform import Transform + + +class ColorAugSSDTransform(Transform): + """ + A color related data augmentation used in Single Shot Multibox Detector (SSD). + + Wei Liu, Dragomir Anguelov, Dumitru Erhan, Christian Szegedy, + Scott Reed, Cheng-Yang Fu, Alexander C. Berg. + SSD: Single Shot MultiBox Detector. ECCV 2016. + + Implementation based on: + + https://github.com/weiliu89/caffe/blob + /4817bf8b4200b35ada8ed0dc378dceaf38c539e4 + /src/caffe/util/im_transforms.cpp + + https://github.com/chainer/chainercv/blob + /7159616642e0be7c5b3ef380b848e16b7e99355b/chainercv + /links/model/ssd/transforms.py + """ + + def __init__( + self, + img_format, + brightness_delta=32, + contrast_low=0.5, + contrast_high=1.5, + saturation_low=0.5, + saturation_high=1.5, + hue_delta=18, + ): + super().__init__() + assert img_format in ["BGR", "RGB"] + self.is_rgb = img_format == "RGB" + del img_format + self._set_attributes(locals()) + + def apply_coords(self, coords): + return coords + + def apply_segmentation(self, segmentation): + return segmentation + + def apply_image(self, img, interp=None): + if self.is_rgb: + img = img[:, :, [2, 1, 0]] + img = self.brightness(img) + if random.randrange(2): + img = self.contrast(img) + img = self.saturation(img) + img = self.hue(img) + else: + img = self.saturation(img) + img = self.hue(img) + img = self.contrast(img) + if self.is_rgb: + img = img[:, :, [2, 1, 0]] + return img + + def convert(self, img, alpha=1, beta=0): + img = img.astype(np.float32) * alpha + beta + img = np.clip(img, 0, 255) + return img.astype(np.uint8) + + def brightness(self, img): + if random.randrange(2): + return self.convert( + img, beta=random.uniform(-self.brightness_delta, self.brightness_delta) + ) + return img + + def contrast(self, img): + if random.randrange(2): + return self.convert(img, alpha=random.uniform(self.contrast_low, self.contrast_high)) + return img + + def saturation(self, img): + if random.randrange(2): + img = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) + img[:, :, 1] = self.convert( + img[:, :, 1], alpha=random.uniform(self.saturation_low, self.saturation_high) + ) + return cv2.cvtColor(img, cv2.COLOR_HSV2BGR) + return img + + def hue(self, img): + if random.randrange(2): + img = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) + img[:, :, 0] = ( + img[:, :, 0].astype(int) + random.randint(-self.hue_delta, self.hue_delta) + ) % 180 + return cv2.cvtColor(img, cv2.COLOR_HSV2BGR) + return img diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/config.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/config.py new file mode 100644 index 0000000000000000000000000000000000000000..74f63672bba7cd25679054b19ff87254a0e24974 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/config.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from detectron2.config import CfgNode as CN + + +def add_pointrend_config(cfg): + """ + Add config for PointRend. + """ + # We retry random cropping until no single category in semantic segmentation GT occupies more + # than `SINGLE_CATEGORY_MAX_AREA` part of the crop. + cfg.INPUT.CROP.SINGLE_CATEGORY_MAX_AREA = 1.0 + # Color augmentatition from SSD paper for semantic segmentation model during training. + cfg.INPUT.COLOR_AUG_SSD = False + + # Names of the input feature maps to be used by a coarse mask head. + cfg.MODEL.ROI_MASK_HEAD.IN_FEATURES = ("p2",) + cfg.MODEL.ROI_MASK_HEAD.FC_DIM = 1024 + cfg.MODEL.ROI_MASK_HEAD.NUM_FC = 2 + # The side size of a coarse mask head prediction. + cfg.MODEL.ROI_MASK_HEAD.OUTPUT_SIDE_RESOLUTION = 7 + # True if point head is used. + cfg.MODEL.ROI_MASK_HEAD.POINT_HEAD_ON = False + + cfg.MODEL.POINT_HEAD = CN() + cfg.MODEL.POINT_HEAD.NAME = "StandardPointHead" + cfg.MODEL.POINT_HEAD.NUM_CLASSES = 80 + # Names of the input feature maps to be used by a mask point head. + cfg.MODEL.POINT_HEAD.IN_FEATURES = ("p2",) + # Number of points sampled during training for a mask point head. + cfg.MODEL.POINT_HEAD.TRAIN_NUM_POINTS = 14 * 14 + # Oversampling parameter for PointRend point sampling during training. Parameter `k` in the + # original paper. + cfg.MODEL.POINT_HEAD.OVERSAMPLE_RATIO = 3 + # Importance sampling parameter for PointRend point sampling during training. Parametr `beta` in + # the original paper. + cfg.MODEL.POINT_HEAD.IMPORTANCE_SAMPLE_RATIO = 0.75 + # Number of subdivision steps during inference. + cfg.MODEL.POINT_HEAD.SUBDIVISION_STEPS = 5 + # Maximum number of points selected at each subdivision step (N). + cfg.MODEL.POINT_HEAD.SUBDIVISION_NUM_POINTS = 28 * 28 + cfg.MODEL.POINT_HEAD.FC_DIM = 256 + cfg.MODEL.POINT_HEAD.NUM_FC = 3 + cfg.MODEL.POINT_HEAD.CLS_AGNOSTIC_MASK = False + # If True, then coarse prediction features are used as inout for each layer in PointRend's MLP. + cfg.MODEL.POINT_HEAD.COARSE_PRED_EACH_LAYER = True + cfg.MODEL.POINT_HEAD.COARSE_SEM_SEG_HEAD_NAME = "SemSegFPNHead" diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/dataset_mapper.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/dataset_mapper.py new file mode 100644 index 0000000000000000000000000000000000000000..76b64ee79b679741d547c5d1ffca55ac756051ae --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/dataset_mapper.py @@ -0,0 +1,121 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import copy +import logging +import numpy as np +import torch +from fvcore.common.file_io import PathManager +from fvcore.transforms.transform import CropTransform +from PIL import Image + +from detectron2.data import detection_utils as utils +from detectron2.data import transforms as T + +from .color_augmentation import ColorAugSSDTransform + +""" +This file contains the mapping that's applied to "dataset dicts" for semantic segmentation models. +Unlike the default DatasetMapper this mapper uses cropping as the last transformation. +""" + +__all__ = ["SemSegDatasetMapper"] + + +class SemSegDatasetMapper: + """ + A callable which takes a dataset dict in Detectron2 Dataset format, + and map it into a format used by semantic segmentation models. + + The callable currently does the following: + + 1. Read the image from "file_name" + 2. Applies geometric transforms to the image and annotation + 3. Find and applies suitable cropping to the image and annotation + 4. Prepare image and annotation to Tensors + """ + + def __init__(self, cfg, is_train=True): + if cfg.INPUT.CROP.ENABLED and is_train: + self.crop_gen = T.RandomCrop(cfg.INPUT.CROP.TYPE, cfg.INPUT.CROP.SIZE) + logging.getLogger(__name__).info("CropGen used in training: " + str(self.crop_gen)) + else: + self.crop_gen = None + + self.tfm_gens = utils.build_transform_gen(cfg, is_train) + + if cfg.INPUT.COLOR_AUG_SSD: + self.tfm_gens.append(ColorAugSSDTransform(img_format=cfg.INPUT.FORMAT)) + logging.getLogger(__name__).info( + "Color augmnetation used in training: " + str(self.tfm_gens[-1]) + ) + + # fmt: off + self.img_format = cfg.INPUT.FORMAT + self.single_category_max_area = cfg.INPUT.CROP.SINGLE_CATEGORY_MAX_AREA + self.ignore_value = cfg.MODEL.SEM_SEG_HEAD.IGNORE_VALUE + # fmt: on + + self.is_train = is_train + + def __call__(self, dataset_dict): + """ + Args: + dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format. + + Returns: + dict: a format that builtin models in detectron2 accept + """ + dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code below + image = utils.read_image(dataset_dict["file_name"], format=self.img_format) + utils.check_image_size(dataset_dict, image) + assert "sem_seg_file_name" in dataset_dict + + image, transforms = T.apply_transform_gens(self.tfm_gens, image) + if self.is_train: + with PathManager.open(dataset_dict.pop("sem_seg_file_name"), "rb") as f: + sem_seg_gt = Image.open(f) + sem_seg_gt = np.asarray(sem_seg_gt, dtype="uint8") + sem_seg_gt = transforms.apply_segmentation(sem_seg_gt) + if self.crop_gen: + image, sem_seg_gt = crop_transform( + image, + sem_seg_gt, + self.crop_gen, + self.single_category_max_area, + self.ignore_value, + ) + dataset_dict["sem_seg"] = torch.as_tensor(sem_seg_gt.astype("long")) + + # Pytorch's dataloader is efficient on torch.Tensor due to shared-memory, + # but not efficient on large generic data structures due to the use of pickle & mp.Queue. + # Therefore it's important to use torch.Tensor. + dataset_dict["image"] = torch.as_tensor(np.ascontiguousarray(image.transpose(2, 0, 1))) + + if not self.is_train: + dataset_dict.pop("sem_seg_file_name", None) + return dataset_dict + + return dataset_dict + + +def crop_transform(image, sem_seg, crop_gen, single_category_max_area, ignore_value): + """ + Find a cropping window such that no single category occupies more than + `single_category_max_area` in `sem_seg`. The function retries random cropping 10 times max. + """ + if single_category_max_area >= 1.0: + crop_tfm = crop_gen.get_transform(image) + sem_seg_temp = crop_tfm.apply_segmentation(sem_seg) + else: + h, w = sem_seg.shape + crop_size = crop_gen.get_crop_size((h, w)) + for _ in range(10): + y0 = np.random.randint(h - crop_size[0] + 1) + x0 = np.random.randint(w - crop_size[1] + 1) + sem_seg_temp = sem_seg[y0 : y0 + crop_size[0], x0 : x0 + crop_size[1]] + labels, cnt = np.unique(sem_seg_temp, return_counts=True) + cnt = cnt[labels != ignore_value] + if len(cnt) > 1 and np.max(cnt) / np.sum(cnt) < single_category_max_area: + break + crop_tfm = CropTransform(x0, y0, crop_size[1], crop_size[0]) + image = crop_tfm.apply_image(image) + return image, sem_seg_temp diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/point_features.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/point_features.py new file mode 100644 index 0000000000000000000000000000000000000000..320a33de8505572eedcfa94d355bf2772ab75528 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/point_features.py @@ -0,0 +1,216 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import torch +from torch.nn import functional as F + +from detectron2.layers import cat +from detectron2.structures import Boxes + + +""" +Shape shorthand in this module: + + N: minibatch dimension size, i.e. the number of RoIs for instance segmenation or the + number of images for semantic segmenation. + R: number of ROIs, combined over all images, in the minibatch + P: number of points +""" + + +def point_sample(input, point_coords, **kwargs): + """ + A wrapper around :function:`torch.nn.functional.grid_sample` to support 3D point_coords tensors. + Unlike :function:`torch.nn.functional.grid_sample` it assumes `point_coords` to lie inside + [0, 1] x [0, 1] square. + + Args: + input (Tensor): A tensor of shape (N, C, H, W) that contains features map on a H x W grid. + point_coords (Tensor): A tensor of shape (N, P, 2) or (N, Hgrid, Wgrid, 2) that contains + [0, 1] x [0, 1] normalized point coordinates. + + Returns: + output (Tensor): A tensor of shape (N, C, P) or (N, C, Hgrid, Wgrid) that contains + features for points in `point_coords`. The features are obtained via bilinear + interplation from `input` the same way as :function:`torch.nn.functional.grid_sample`. + """ + add_dim = False + if point_coords.dim() == 3: + add_dim = True + point_coords = point_coords.unsqueeze(2) + output = F.grid_sample(input, 2.0 * point_coords - 1.0, **kwargs) + if add_dim: + output = output.squeeze(3) + return output + + +def generate_regular_grid_point_coords(R, side_size, device): + """ + Generate regular square grid of points in [0, 1] x [0, 1] coordinate space. + + Args: + R (int): The number of grids to sample, one for each region. + side_size (int): The side size of the regular grid. + device (torch.device): Desired device of returned tensor. + + Returns: + (Tensor): A tensor of shape (R, side_size^2, 2) that contains coordinates + for the regular grids. + """ + aff = torch.tensor([[[0.5, 0, 0.5], [0, 0.5, 0.5]]], device=device) + r = F.affine_grid(aff, torch.Size((1, 1, side_size, side_size)), align_corners=False) + return r.view(1, -1, 2).expand(R, -1, -1) + + +def get_uncertain_point_coords_with_randomness( + coarse_logits, uncertainty_func, num_points, oversample_ratio, importance_sample_ratio +): + """ + Sample points in [0, 1] x [0, 1] coordinate space based on their uncertainty. The unceratinties + are calculated for each point using 'uncertainty_func' function that takes point's logit + prediction as input. + See PointRend paper for details. + + Args: + coarse_logits (Tensor): A tensor of shape (N, C, Hmask, Wmask) or (N, 1, Hmask, Wmask) for + class-specific or class-agnostic prediction. + uncertainty_func: A function that takes a Tensor of shape (N, C, P) or (N, 1, P) that + contains logit predictions for P points and returns their uncertainties as a Tensor of + shape (N, 1, P). + num_points (int): The number of points P to sample. + oversample_ratio (int): Oversampling parameter. + importance_sample_ratio (float): Ratio of points that are sampled via importnace sampling. + + Returns: + point_coords (Tensor): A tensor of shape (N, P, 2) that contains the coordinates of P + sampled points. + """ + assert oversample_ratio >= 1 + assert importance_sample_ratio <= 1 and importance_sample_ratio >= 0 + num_boxes = coarse_logits.shape[0] + num_sampled = int(num_points * oversample_ratio) + point_coords = torch.rand(num_boxes, num_sampled, 2, device=coarse_logits.device) + point_logits = point_sample(coarse_logits, point_coords, align_corners=False) + # It is crucial to calculate uncertainty based on the sampled prediction value for the points. + # Calculating uncertainties of the coarse predictions first and sampling them for points leads + # to incorrect results. + # To illustrate this: assume uncertainty_func(logits)=-abs(logits), a sampled point between + # two coarse predictions with -1 and 1 logits has 0 logits, and therefore 0 uncertainty value. + # However, if we calculate uncertainties for the coarse predictions first, + # both will have -1 uncertainty, and the sampled point will get -1 uncertainty. + point_uncertainties = uncertainty_func(point_logits) + num_uncertain_points = int(importance_sample_ratio * num_points) + num_random_points = num_points - num_uncertain_points + idx = torch.topk(point_uncertainties[:, 0, :], k=num_uncertain_points, dim=1)[1] + shift = num_sampled * torch.arange(num_boxes, dtype=torch.long, device=coarse_logits.device) + idx += shift[:, None] + point_coords = point_coords.view(-1, 2)[idx.view(-1), :].view( + num_boxes, num_uncertain_points, 2 + ) + if num_random_points > 0: + point_coords = cat( + [ + point_coords, + torch.rand(num_boxes, num_random_points, 2, device=coarse_logits.device), + ], + dim=1, + ) + return point_coords + + +def get_uncertain_point_coords_on_grid(uncertainty_map, num_points): + """ + Find `num_points` most uncertain points from `uncertainty_map` grid. + + Args: + uncertainty_map (Tensor): A tensor of shape (N, 1, H, W) that contains uncertainty + values for a set of points on a regular H x W grid. + num_points (int): The number of points P to select. + + Returns: + point_indices (Tensor): A tensor of shape (N, P) that contains indices from + [0, H x W) of the most uncertain points. + point_coords (Tensor): A tensor of shape (N, P, 2) that contains [0, 1] x [0, 1] normalized + coordinates of the most uncertain points from the H x W grid. + """ + R, _, H, W = uncertainty_map.shape + h_step = 1.0 / float(H) + w_step = 1.0 / float(W) + + num_points = min(H * W, num_points) + point_indices = torch.topk(uncertainty_map.view(R, H * W), k=num_points, dim=1)[1] + point_coords = torch.zeros(R, num_points, 2, dtype=torch.float, device=uncertainty_map.device) + point_coords[:, :, 0] = w_step / 2.0 + (point_indices % W).to(torch.float) * w_step + point_coords[:, :, 1] = h_step / 2.0 + (point_indices // W).to(torch.float) * h_step + return point_indices, point_coords + + +def point_sample_fine_grained_features(features_list, feature_scales, boxes, point_coords): + """ + Get features from feature maps in `features_list` that correspond to specific point coordinates + inside each bounding box from `boxes`. + + Args: + features_list (list[Tensor]): A list of feature map tensors to get features from. + feature_scales (list[float]): A list of scales for tensors in `features_list`. + boxes (list[Boxes]): A list of I Boxes objects that contain R_1 + ... + R_I = R boxes all + together. + point_coords (Tensor): A tensor of shape (R, P, 2) that contains + [0, 1] x [0, 1] box-normalized coordinates of the P sampled points. + + Returns: + point_features (Tensor): A tensor of shape (R, C, P) that contains features sampled + from all features maps in feature_list for P sampled points for all R boxes in `boxes`. + point_coords_wrt_image (Tensor): A tensor of shape (R, P, 2) that contains image-level + coordinates of P points. + """ + cat_boxes = Boxes.cat(boxes) + num_boxes = [len(b) for b in boxes] + + point_coords_wrt_image = get_point_coords_wrt_image(cat_boxes.tensor, point_coords) + split_point_coords_wrt_image = torch.split(point_coords_wrt_image, num_boxes) + + point_features = [] + for idx_img, point_coords_wrt_image_per_image in enumerate(split_point_coords_wrt_image): + point_features_per_image = [] + for idx_feature, feature_map in enumerate(features_list): + h, w = feature_map.shape[-2:] + scale = torch.tensor([w, h], device=feature_map.device) / feature_scales[idx_feature] + point_coords_scaled = point_coords_wrt_image_per_image / scale + point_features_per_image.append( + point_sample( + feature_map[idx_img].unsqueeze(0), + point_coords_scaled.unsqueeze(0), + align_corners=False, + ) + .squeeze(0) + .transpose(1, 0) + ) + point_features.append(cat(point_features_per_image, dim=1)) + + return cat(point_features, dim=0), point_coords_wrt_image + + +def get_point_coords_wrt_image(boxes_coords, point_coords): + """ + Convert box-normalized [0, 1] x [0, 1] point cooordinates to image-level coordinates. + + Args: + boxes_coords (Tensor): A tensor of shape (R, 4) that contains bounding boxes. + coordinates. + point_coords (Tensor): A tensor of shape (R, P, 2) that contains + [0, 1] x [0, 1] box-normalized coordinates of the P sampled points. + + Returns: + point_coords_wrt_image (Tensor): A tensor of shape (R, P, 2) that contains + image-normalized coordinates of P sampled points. + """ + with torch.no_grad(): + point_coords_wrt_image = point_coords.clone() + point_coords_wrt_image[:, :, 0] = point_coords_wrt_image[:, :, 0] * ( + boxes_coords[:, None, 2] - boxes_coords[:, None, 0] + ) + point_coords_wrt_image[:, :, 1] = point_coords_wrt_image[:, :, 1] * ( + boxes_coords[:, None, 3] - boxes_coords[:, None, 1] + ) + point_coords_wrt_image[:, :, 0] += boxes_coords[:, None, 0] + point_coords_wrt_image[:, :, 1] += boxes_coords[:, None, 1] + return point_coords_wrt_image diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/point_head.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/point_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6f35baea064fbee14d9bcd0b57e354f82bf54a8c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/point_head.py @@ -0,0 +1,154 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import fvcore.nn.weight_init as weight_init +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.layers import ShapeSpec, cat +from detectron2.structures import BitMasks +from detectron2.utils.events import get_event_storage +from detectron2.utils.registry import Registry + +from .point_features import point_sample + +POINT_HEAD_REGISTRY = Registry("POINT_HEAD") +POINT_HEAD_REGISTRY.__doc__ = """ +Registry for point heads, which makes prediction for a given set of per-point features. + +The registered object will be called with `obj(cfg, input_shape)`. +""" + + +def roi_mask_point_loss(mask_logits, instances, points_coord): + """ + Compute the point-based loss for instance segmentation mask predictions. + + Args: + mask_logits (Tensor): A tensor of shape (R, C, P) or (R, 1, P) for class-specific or + class-agnostic, where R is the total number of predicted masks in all images, C is the + number of foreground classes, and P is the number of points sampled for each mask. + The values are logits. + instances (list[Instances]): A list of N Instances, where N is the number of images + in the batch. These instances are in 1:1 correspondence with the `mask_logits`. So, i_th + elememt of the list contains R_i objects and R_1 + ... + R_N is equal to R. + The ground-truth labels (class, box, mask, ...) associated with each instance are stored + in fields. + points_coords (Tensor): A tensor of shape (R, P, 2), where R is the total number of + predicted masks and P is the number of points for each mask. The coordinates are in + the image pixel coordinate space, i.e. [0, H] x [0, W]. + Returns: + point_loss (Tensor): A scalar tensor containing the loss. + """ + assert len(instances) == 0 or isinstance( + instances[0].gt_masks, BitMasks + ), "Point head works with GT in 'bitmask' format only. Set INPUT.MASK_FORMAT to 'bitmask'." + with torch.no_grad(): + cls_agnostic_mask = mask_logits.size(1) == 1 + total_num_masks = mask_logits.size(0) + + gt_classes = [] + gt_mask_logits = [] + idx = 0 + for instances_per_image in instances: + if not cls_agnostic_mask: + gt_classes_per_image = instances_per_image.gt_classes.to(dtype=torch.int64) + gt_classes.append(gt_classes_per_image) + + gt_bit_masks = instances_per_image.gt_masks.tensor + h, w = instances_per_image.gt_masks.image_size + scale = torch.tensor([w, h], dtype=torch.float, device=gt_bit_masks.device) + points_coord_grid_sample_format = ( + points_coord[idx : idx + len(instances_per_image)] / scale + ) + idx += len(instances_per_image) + gt_mask_logits.append( + point_sample( + gt_bit_masks.to(torch.float32).unsqueeze(1), + points_coord_grid_sample_format, + align_corners=False, + ).squeeze(1) + ) + gt_mask_logits = cat(gt_mask_logits) + + # torch.mean (in binary_cross_entropy_with_logits) doesn't + # accept empty tensors, so handle it separately + if gt_mask_logits.numel() == 0: + return mask_logits.sum() * 0 + + if cls_agnostic_mask: + mask_logits = mask_logits[:, 0] + else: + indices = torch.arange(total_num_masks) + gt_classes = cat(gt_classes, dim=0) + mask_logits = mask_logits[indices, gt_classes] + + # Log the training accuracy (using gt classes and 0.0 threshold for the logits) + mask_accurate = (mask_logits > 0.0) == gt_mask_logits.to(dtype=torch.uint8) + mask_accuracy = mask_accurate.nonzero().size(0) / mask_accurate.numel() + get_event_storage().put_scalar("point_rend/accuracy", mask_accuracy) + + point_loss = F.binary_cross_entropy_with_logits( + mask_logits, gt_mask_logits.to(dtype=torch.float32), reduction="mean" + ) + return point_loss + + +@POINT_HEAD_REGISTRY.register() +class StandardPointHead(nn.Module): + """ + A point head multi-layer perceptron which we model with conv1d layers with kernel 1. The head + takes both fine-grained and coarse prediction features as its input. + """ + + def __init__(self, cfg, input_shape: ShapeSpec): + """ + The following attributes are parsed from config: + fc_dim: the output dimension of each FC layers + num_fc: the number of FC layers + coarse_pred_each_layer: if True, coarse prediction features are concatenated to each + layer's input + """ + super(StandardPointHead, self).__init__() + # fmt: off + num_classes = cfg.MODEL.POINT_HEAD.NUM_CLASSES + fc_dim = cfg.MODEL.POINT_HEAD.FC_DIM + num_fc = cfg.MODEL.POINT_HEAD.NUM_FC + cls_agnostic_mask = cfg.MODEL.POINT_HEAD.CLS_AGNOSTIC_MASK + self.coarse_pred_each_layer = cfg.MODEL.POINT_HEAD.COARSE_PRED_EACH_LAYER + input_channels = input_shape.channels + # fmt: on + + fc_dim_in = input_channels + num_classes + self.fc_layers = [] + for k in range(num_fc): + fc = nn.Conv1d(fc_dim_in, fc_dim, kernel_size=1, stride=1, padding=0, bias=True) + self.add_module("fc{}".format(k + 1), fc) + self.fc_layers.append(fc) + fc_dim_in = fc_dim + fc_dim_in += num_classes if self.coarse_pred_each_layer else 0 + + num_mask_classes = 1 if cls_agnostic_mask else num_classes + self.predictor = nn.Conv1d(fc_dim_in, num_mask_classes, kernel_size=1, stride=1, padding=0) + + for layer in self.fc_layers: + weight_init.c2_msra_fill(layer) + # use normal distribution initialization for mask prediction layer + nn.init.normal_(self.predictor.weight, std=0.001) + if self.predictor.bias is not None: + nn.init.constant_(self.predictor.bias, 0) + + def forward(self, fine_grained_features, coarse_features): + x = torch.cat((fine_grained_features, coarse_features), dim=1) + for layer in self.fc_layers: + x = F.relu(layer(x)) + if self.coarse_pred_each_layer: + x = cat((x, coarse_features), dim=1) + return self.predictor(x) + + +def build_point_head(cfg, input_channels): + """ + Build a point head defined by `cfg.MODEL.POINT_HEAD.NAME`. + """ + head_name = cfg.MODEL.POINT_HEAD.NAME + return POINT_HEAD_REGISTRY.get(head_name)(cfg, input_channels) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/roi_heads.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/roi_heads.py new file mode 100644 index 0000000000000000000000000000000000000000..4f7225bf10544461bbe1e3c777863557f2ad5808 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/roi_heads.py @@ -0,0 +1,227 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import numpy as np +import torch + +from detectron2.layers import ShapeSpec, cat, interpolate +from detectron2.modeling import ROI_HEADS_REGISTRY, StandardROIHeads +from detectron2.modeling.roi_heads.mask_head import ( + build_mask_head, + mask_rcnn_inference, + mask_rcnn_loss, +) +from detectron2.modeling.roi_heads.roi_heads import select_foreground_proposals + +from .point_features import ( + generate_regular_grid_point_coords, + get_uncertain_point_coords_on_grid, + get_uncertain_point_coords_with_randomness, + point_sample, + point_sample_fine_grained_features, +) +from .point_head import build_point_head, roi_mask_point_loss + + +def calculate_uncertainty(logits, classes): + """ + We estimate uncerainty as L1 distance between 0.0 and the logit prediction in 'logits' for the + foreground class in `classes`. + + Args: + logits (Tensor): A tensor of shape (R, C, ...) or (R, 1, ...) for class-specific or + class-agnostic, where R is the total number of predicted masks in all images and C is + the number of foreground classes. The values are logits. + classes (list): A list of length R that contains either predicted of ground truth class + for eash predicted mask. + + Returns: + scores (Tensor): A tensor of shape (R, 1, ...) that contains uncertainty scores with + the most uncertain locations having the highest uncertainty score. + """ + if logits.shape[1] == 1: + gt_class_logits = logits.clone() + else: + gt_class_logits = logits[ + torch.arange(logits.shape[0], device=logits.device), classes + ].unsqueeze(1) + return -(torch.abs(gt_class_logits)) + + +@ROI_HEADS_REGISTRY.register() +class PointRendROIHeads(StandardROIHeads): + """ + The RoI heads class for PointRend instance segmentation models. + + In this class we redefine the mask head of `StandardROIHeads` leaving all other heads intact. + To avoid namespace conflict with other heads we use names starting from `mask_` for all + variables that correspond to the mask head in the class's namespace. + """ + + def __init__(self, cfg, input_shape): + # TODO use explicit args style + super().__init__(cfg, input_shape) + self._init_mask_head(cfg, input_shape) + + def _init_mask_head(self, cfg, input_shape): + # fmt: off + self.mask_on = cfg.MODEL.MASK_ON + if not self.mask_on: + return + self.mask_coarse_in_features = cfg.MODEL.ROI_MASK_HEAD.IN_FEATURES + self.mask_coarse_side_size = cfg.MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION + self._feature_scales = {k: 1.0 / v.stride for k, v in input_shape.items()} + # fmt: on + + in_channels = np.sum([input_shape[f].channels for f in self.mask_coarse_in_features]) + self.mask_coarse_head = build_mask_head( + cfg, + ShapeSpec( + channels=in_channels, + width=self.mask_coarse_side_size, + height=self.mask_coarse_side_size, + ), + ) + self._init_point_head(cfg, input_shape) + + def _init_point_head(self, cfg, input_shape): + # fmt: off + self.mask_point_on = cfg.MODEL.ROI_MASK_HEAD.POINT_HEAD_ON + if not self.mask_point_on: + return + assert cfg.MODEL.ROI_HEADS.NUM_CLASSES == cfg.MODEL.POINT_HEAD.NUM_CLASSES + self.mask_point_in_features = cfg.MODEL.POINT_HEAD.IN_FEATURES + self.mask_point_train_num_points = cfg.MODEL.POINT_HEAD.TRAIN_NUM_POINTS + self.mask_point_oversample_ratio = cfg.MODEL.POINT_HEAD.OVERSAMPLE_RATIO + self.mask_point_importance_sample_ratio = cfg.MODEL.POINT_HEAD.IMPORTANCE_SAMPLE_RATIO + # next two parameters are use in the adaptive subdivions inference procedure + self.mask_point_subdivision_steps = cfg.MODEL.POINT_HEAD.SUBDIVISION_STEPS + self.mask_point_subdivision_num_points = cfg.MODEL.POINT_HEAD.SUBDIVISION_NUM_POINTS + # fmt: on + + in_channels = np.sum([input_shape[f].channels for f in self.mask_point_in_features]) + self.mask_point_head = build_point_head( + cfg, ShapeSpec(channels=in_channels, width=1, height=1) + ) + + def _forward_mask(self, features, instances): + """ + Forward logic of the mask prediction branch. + + Args: + features (dict[str, Tensor]): #level input features for mask prediction + instances (list[Instances]): the per-image instances to train/predict masks. + In training, they can be the proposals. + In inference, they can be the predicted boxes. + + Returns: + In training, a dict of losses. + In inference, update `instances` with new fields "pred_masks" and return it. + """ + if not self.mask_on: + return {} if self.training else instances + + if self.training: + proposals, _ = select_foreground_proposals(instances, self.num_classes) + proposal_boxes = [x.proposal_boxes for x in proposals] + mask_coarse_logits = self._forward_mask_coarse(features, proposal_boxes) + + losses = {"loss_mask": mask_rcnn_loss(mask_coarse_logits, proposals)} + losses.update(self._forward_mask_point(features, mask_coarse_logits, proposals)) + return losses + else: + pred_boxes = [x.pred_boxes for x in instances] + mask_coarse_logits = self._forward_mask_coarse(features, pred_boxes) + + mask_logits = self._forward_mask_point(features, mask_coarse_logits, instances) + mask_rcnn_inference(mask_logits, instances) + return instances + + def _forward_mask_coarse(self, features, boxes): + """ + Forward logic of the coarse mask head. + """ + point_coords = generate_regular_grid_point_coords( + np.sum(len(x) for x in boxes), self.mask_coarse_side_size, boxes[0].device + ) + mask_coarse_features_list = [features[k] for k in self.mask_coarse_in_features] + features_scales = [self._feature_scales[k] for k in self.mask_coarse_in_features] + # For regular grids of points, this function is equivalent to `len(features_list)' calls + # of `ROIAlign` (with `SAMPLING_RATIO=2`), and concat the results. + mask_features, _ = point_sample_fine_grained_features( + mask_coarse_features_list, features_scales, boxes, point_coords + ) + return self.mask_coarse_head(mask_features) + + def _forward_mask_point(self, features, mask_coarse_logits, instances): + """ + Forward logic of the mask point head. + """ + if not self.mask_point_on: + return {} if self.training else mask_coarse_logits + + mask_features_list = [features[k] for k in self.mask_point_in_features] + features_scales = [self._feature_scales[k] for k in self.mask_point_in_features] + + if self.training: + proposal_boxes = [x.proposal_boxes for x in instances] + gt_classes = cat([x.gt_classes for x in instances]) + with torch.no_grad(): + point_coords = get_uncertain_point_coords_with_randomness( + mask_coarse_logits, + lambda logits: calculate_uncertainty(logits, gt_classes), + self.mask_point_train_num_points, + self.mask_point_oversample_ratio, + self.mask_point_importance_sample_ratio, + ) + + fine_grained_features, point_coords_wrt_image = point_sample_fine_grained_features( + mask_features_list, features_scales, proposal_boxes, point_coords + ) + coarse_features = point_sample(mask_coarse_logits, point_coords, align_corners=False) + point_logits = self.mask_point_head(fine_grained_features, coarse_features) + return { + "loss_mask_point": roi_mask_point_loss( + point_logits, instances, point_coords_wrt_image + ) + } + else: + pred_boxes = [x.pred_boxes for x in instances] + pred_classes = cat([x.pred_classes for x in instances]) + # The subdivision code will fail with the empty list of boxes + if len(pred_classes) == 0: + return mask_coarse_logits + + mask_logits = mask_coarse_logits.clone() + for subdivions_step in range(self.mask_point_subdivision_steps): + mask_logits = interpolate( + mask_logits, scale_factor=2, mode="bilinear", align_corners=False + ) + # If `mask_point_subdivision_num_points` is larger or equal to the + # resolution of the next step, then we can skip this step + H, W = mask_logits.shape[-2:] + if ( + self.mask_point_subdivision_num_points >= 4 * H * W + and subdivions_step < self.mask_point_subdivision_steps - 1 + ): + continue + uncertainty_map = calculate_uncertainty(mask_logits, pred_classes) + point_indices, point_coords = get_uncertain_point_coords_on_grid( + uncertainty_map, self.mask_point_subdivision_num_points + ) + fine_grained_features, _ = point_sample_fine_grained_features( + mask_features_list, features_scales, pred_boxes, point_coords + ) + coarse_features = point_sample( + mask_coarse_logits, point_coords, align_corners=False + ) + point_logits = self.mask_point_head(fine_grained_features, coarse_features) + + # put mask point predictions to the right places on the upsampled grid. + R, C, H, W = mask_logits.shape + point_indices = point_indices.unsqueeze(1).expand(-1, C, -1) + mask_logits = ( + mask_logits.reshape(R, C, H * W) + .scatter_(2, point_indices, point_logits) + .view(R, C, H, W) + ) + return mask_logits diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/semantic_seg.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/semantic_seg.py new file mode 100644 index 0000000000000000000000000000000000000000..670a0ea201a6de82f3126171e6320d56f65e1ba7 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/point_rend/semantic_seg.py @@ -0,0 +1,134 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import numpy as np +from typing import Dict +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.layers import ShapeSpec, cat +from detectron2.modeling import SEM_SEG_HEADS_REGISTRY + +from .point_features import ( + get_uncertain_point_coords_on_grid, + get_uncertain_point_coords_with_randomness, + point_sample, +) +from .point_head import build_point_head + + +def calculate_uncertainty(sem_seg_logits): + """ + For each location of the prediction `sem_seg_logits` we estimate uncerainty as the + difference between top first and top second predicted logits. + + Args: + mask_logits (Tensor): A tensor of shape (N, C, ...), where N is the minibatch size and + C is the number of foreground classes. The values are logits. + + Returns: + scores (Tensor): A tensor of shape (N, 1, ...) that contains uncertainty scores with + the most uncertain locations having the highest uncertainty score. + """ + top2_scores = torch.topk(sem_seg_logits, k=2, dim=1)[0] + return (top2_scores[:, 1] - top2_scores[:, 0]).unsqueeze(1) + + +@SEM_SEG_HEADS_REGISTRY.register() +class PointRendSemSegHead(nn.Module): + """ + A semantic segmentation head that combines a head set in `POINT_HEAD.COARSE_SEM_SEG_HEAD_NAME` + and a point head set in `MODEL.POINT_HEAD.NAME`. + """ + + def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]): + super().__init__() + + self.ignore_value = cfg.MODEL.SEM_SEG_HEAD.IGNORE_VALUE + + self.coarse_sem_seg_head = SEM_SEG_HEADS_REGISTRY.get( + cfg.MODEL.POINT_HEAD.COARSE_SEM_SEG_HEAD_NAME + )(cfg, input_shape) + self._init_point_head(cfg, input_shape) + + def _init_point_head(self, cfg, input_shape: Dict[str, ShapeSpec]): + # fmt: off + assert cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES == cfg.MODEL.POINT_HEAD.NUM_CLASSES + feature_channels = {k: v.channels for k, v in input_shape.items()} + self.in_features = cfg.MODEL.POINT_HEAD.IN_FEATURES + self.train_num_points = cfg.MODEL.POINT_HEAD.TRAIN_NUM_POINTS + self.oversample_ratio = cfg.MODEL.POINT_HEAD.OVERSAMPLE_RATIO + self.importance_sample_ratio = cfg.MODEL.POINT_HEAD.IMPORTANCE_SAMPLE_RATIO + self.subdivision_steps = cfg.MODEL.POINT_HEAD.SUBDIVISION_STEPS + self.subdivision_num_points = cfg.MODEL.POINT_HEAD.SUBDIVISION_NUM_POINTS + # fmt: on + + in_channels = np.sum([feature_channels[f] for f in self.in_features]) + self.point_head = build_point_head(cfg, ShapeSpec(channels=in_channels, width=1, height=1)) + + def forward(self, features, targets=None): + coarse_sem_seg_logits = self.coarse_sem_seg_head.layers(features) + + if self.training: + losses = self.coarse_sem_seg_head.losses(coarse_sem_seg_logits, targets) + + with torch.no_grad(): + point_coords = get_uncertain_point_coords_with_randomness( + coarse_sem_seg_logits, + calculate_uncertainty, + self.train_num_points, + self.oversample_ratio, + self.importance_sample_ratio, + ) + coarse_features = point_sample(coarse_sem_seg_logits, point_coords, align_corners=False) + + fine_grained_features = cat( + [ + point_sample(features[in_feature], point_coords, align_corners=False) + for in_feature in self.in_features + ] + ) + point_logits = self.point_head(fine_grained_features, coarse_features) + point_targets = ( + point_sample( + targets.unsqueeze(1).to(torch.float), + point_coords, + mode="nearest", + align_corners=False, + ) + .squeeze(1) + .to(torch.long) + ) + losses["loss_sem_seg_point"] = F.cross_entropy( + point_logits, point_targets, reduction="mean", ignore_index=self.ignore_value + ) + return None, losses + else: + sem_seg_logits = coarse_sem_seg_logits.clone() + for _ in range(self.subdivision_steps): + sem_seg_logits = F.interpolate( + sem_seg_logits, scale_factor=2, mode="bilinear", align_corners=False + ) + uncertainty_map = calculate_uncertainty(sem_seg_logits) + point_indices, point_coords = get_uncertain_point_coords_on_grid( + uncertainty_map, self.subdivision_num_points + ) + fine_grained_features = cat( + [ + point_sample(features[in_feature], point_coords, align_corners=False) + for in_feature in self.in_features + ] + ) + coarse_features = point_sample( + coarse_sem_seg_logits, point_coords, align_corners=False + ) + point_logits = self.point_head(fine_grained_features, coarse_features) + + # put sem seg point predictions to the right places on the upsampled grid. + N, C, H, W = sem_seg_logits.shape + point_indices = point_indices.unsqueeze(1).expand(-1, C, -1) + sem_seg_logits = ( + sem_seg_logits.reshape(N, C, H * W) + .scatter_(2, point_indices, point_logits) + .view(N, C, H, W) + ) + return sem_seg_logits, {} diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/run.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/run.sh new file mode 100644 index 0000000000000000000000000000000000000000..4ee1614b02f784cb46fa65243174ea3588eb1adc --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/run.sh @@ -0,0 +1,2 @@ +python finetune_net.py --config-file configs/InstanceSegmentation/pointrend_rcnn_X_101_32x8d_FPN_3x_parsing.yaml --num-gpus 1 +#python finetune_net.py --config-file configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_3x_parsing.yaml --num-gpus 1 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/train_net.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/train_net.py new file mode 100755 index 0000000000000000000000000000000000000000..7832867ec668c5715c4124c02b72909a318836e8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/PointRend/train_net.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +""" +PointRend Training Script. + +This script is a simplified version of the training script in detectron2/tools. +""" + +import os +import torch + +import detectron2.utils.comm as comm +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import get_cfg +from detectron2.data import MetadataCatalog, build_detection_train_loader +from detectron2.engine import DefaultTrainer, default_argument_parser, default_setup, launch +from detectron2.evaluation import ( + CityscapesInstanceEvaluator, + CityscapesSemSegEvaluator, + COCOEvaluator, + DatasetEvaluators, + LVISEvaluator, + SemSegEvaluator, + verify_results, +) + +from point_rend import SemSegDatasetMapper, add_pointrend_config + + +class Trainer(DefaultTrainer): + """ + We use the "DefaultTrainer" which contains a number pre-defined logic for + standard training workflow. They may not work for you, especially if you + are working on a new research project. In that case you can use the cleaner + "SimpleTrainer", or write your own training loop. + """ + + @classmethod + def build_evaluator(cls, cfg, dataset_name, output_folder=None): + """ + Create evaluator(s) for a given dataset. + This uses the special metadata "evaluator_type" associated with each builtin dataset. + For your own dataset, you can simply create an evaluator manually in your + script and do not have to worry about the hacky if-else logic here. + """ + if output_folder is None: + output_folder = os.path.join(cfg.OUTPUT_DIR, "inference") + evaluator_list = [] + evaluator_type = MetadataCatalog.get(dataset_name).evaluator_type + if evaluator_type == "lvis": + return LVISEvaluator(dataset_name, cfg, True, output_folder) + if evaluator_type == "coco": + return COCOEvaluator(dataset_name, cfg, True, output_folder) + if evaluator_type == "sem_seg": + return SemSegEvaluator( + dataset_name, + distributed=True, + num_classes=cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES, + ignore_label=cfg.MODEL.SEM_SEG_HEAD.IGNORE_VALUE, + output_dir=output_folder, + ) + if evaluator_type == "cityscapes_instance": + assert ( + torch.cuda.device_count() >= comm.get_rank() + ), "CityscapesEvaluator currently do not work with multiple machines." + return CityscapesInstanceEvaluator(dataset_name) + if evaluator_type == "cityscapes_sem_seg": + assert ( + torch.cuda.device_count() >= comm.get_rank() + ), "CityscapesEvaluator currently do not work with multiple machines." + return CityscapesSemSegEvaluator(dataset_name) + if len(evaluator_list) == 0: + raise NotImplementedError( + "no Evaluator for the dataset {} with the type {}".format( + dataset_name, evaluator_type + ) + ) + if len(evaluator_list) == 1: + return evaluator_list[0] + return DatasetEvaluators(evaluator_list) + + @classmethod + def build_train_loader(cls, cfg): + if "SemanticSegmentor" in cfg.MODEL.META_ARCHITECTURE: + mapper = SemSegDatasetMapper(cfg, True) + else: + mapper = None + return build_detection_train_loader(cfg, mapper=mapper) + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg() + add_pointrend_config(cfg) + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup(cfg, args) + return cfg + + +def main(args): + cfg = setup(args) + + if args.eval_only: + model = Trainer.build_model(cfg) + DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load( + cfg.MODEL.WEIGHTS, resume=args.resume + ) + res = Trainer.test(cfg, model) + if comm.is_main_process(): + verify_results(cfg, res) + return res + + trainer = Trainer(cfg) + trainer.resume_or_load(resume=args.resume) + return trainer.train() + + +if __name__ == "__main__": + args = default_argument_parser().parse_args() + print("Command Line Args:", args) + launch( + main, + args.num_gpus, + num_machines=args.num_machines, + machine_rank=args.machine_rank, + dist_url=args.dist_url, + args=(args,), + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/README.md new file mode 100644 index 0000000000000000000000000000000000000000..36263bd87401a98f273831f4ec98fcb5c65d3412 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/README.md @@ -0,0 +1,31 @@ + +Here are a few projects that are built on detectron2. +They are examples of how to use detectron2 as a library, to make your projects more +maintainable. + +## Projects by Facebook + +Note that these are research projects, and therefore may not have the same level +of support or stability of detectron2. + ++ [DensePose: Dense Human Pose Estimation In The Wild](DensePose) ++ [Scale-Aware Trident Networks for Object Detection](TridentNet) ++ [TensorMask: A Foundation for Dense Object Segmentation](TensorMask) ++ [Mesh R-CNN](https://github.com/facebookresearch/meshrcnn) ++ [PointRend: Image Segmentation as Rendering](PointRend) ++ [Momentum Contrast for Unsupervised Visual Representation Learning](https://github.com/facebookresearch/moco/tree/master/detection) + + +## External Projects + +External projects in the community that use detectron2: + + + ++ [VoVNet backbones](https://github.com/youngwanLEE/vovnet-detectron2). ++ [AdelaiDet](https://github.com/aim-uofa/adet), a detection toolbox from the Universtiy of Adelaide. ++ [CenterMask : Real-Time Anchor-Free Instance Segmentation](https://github.com/youngwanLEE/centermask2) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/README.md new file mode 100644 index 0000000000000000000000000000000000000000..6831508b9aea37f0e88bec62c98f2bf2b64240ab --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/README.md @@ -0,0 +1,64 @@ + +# TensorMask in Detectron2 +**A Foundation for Dense Object Segmentation** + +Xinlei Chen, Ross Girshick, Kaiming He, Piotr Dollár + +[[`arXiv`](https://arxiv.org/abs/1903.12174)] [[`BibTeX`](#CitingTensorMask)] + +
+ +
+ +In this repository, we release code for TensorMask in Detectron2. +TensorMask is a dense sliding-window instance segmentation framework that, for the first time, achieves results close to the well-developed Mask R-CNN framework -- both qualitatively and quantitatively. It establishes a conceptually complementary direction for object instance segmentation research. + +## Installation +First install Detectron2 following the [documentation](https://detectron2.readthedocs.io/tutorials/install.html) and +[setup the dataset](../../datasets). Then compile the TensorMask-specific op (`swap_align2nat`): +```bash +cd /path/to/detectron2/projects/TensorMask +python setup.py build develop +``` + +## Training + +To train a model, run: +```bash +python /path/to/detectron2/projects/TensorMask/train_net.py --config-file +``` + +For example, to launch TensorMask BiPyramid training (1x schedule) with ResNet-50 backbone on 8 GPUs, +one should execute: +```bash +python /path/to/detectron2/projects/TensorMask/train_net.py --config-file configs/tensormask_R_50_FPN_1x.yaml --num-gpus 8 +``` + +## Evaluation + +Model evaluation can be done similarly (6x schedule with scale augmentation): +```bash +python /path/to/detectron2/projects/TensorMask/train_net.py --config-file configs/tensormask_R_50_FPN_6x.yaml --eval-only MODEL.WEIGHTS /path/to/model_checkpoint +``` + +# Pretrained Models + +| Backbone | lr sched | AP box | AP mask | download | +| -------- | -------- | -- | --- | -------- | +| R50 | 1x | 37.6 | 32.4 | model \|  metrics | +| R50 | 6x | 41.4 | 35.8 | model \|  metrics | + + +## Citing TensorMask + +If you use TensorMask, please use the following BibTeX entry. + +``` +@InProceedings{chen2019tensormask, + title={Tensormask: A Foundation for Dense Object Segmentation}, + author={Chen, Xinlei and Girshick, Ross and He, Kaiming and Doll{\'a}r, Piotr}, + journal={The International Conference on Computer Vision (ICCV)}, + year={2019} +} +``` + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/configs/Base-TensorMask.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/configs/Base-TensorMask.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a7245349b4aa9cfa00f20074cc7cb5cdb02607f9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/configs/Base-TensorMask.yaml @@ -0,0 +1,25 @@ +MODEL: + META_ARCHITECTURE: "TensorMask" + MASK_ON: True + BACKBONE: + NAME: "build_retinanet_resnet_fpn_backbone" + RESNETS: + OUT_FEATURES: ["res2", "res3", "res4", "res5"] + ANCHOR_GENERATOR: + SIZES: [[44, 60], [88, 120], [176, 240], [352, 480], [704, 960], [1408, 1920]] + ASPECT_RATIOS: [[1.0]] + FPN: + IN_FEATURES: ["res2", "res3", "res4", "res5"] + FUSE_TYPE: "avg" + TENSOR_MASK: + ALIGNED_ON: True + BIPYRAMID_ON: True +DATASETS: + TRAIN: ("coco_2017_train",) + TEST: ("coco_2017_val",) +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.02 + STEPS: (60000, 80000) + MAX_ITER: 90000 +VERSION: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/configs/tensormask_R_50_FPN_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/configs/tensormask_R_50_FPN_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5d5eee135a93149a0c4b2148a47cee02e8aed8eb --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/configs/tensormask_R_50_FPN_1x.yaml @@ -0,0 +1,5 @@ +_BASE_: "Base-TensorMask.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/configs/tensormask_R_50_FPN_6x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/configs/tensormask_R_50_FPN_6x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..366a965c4adfdbba2482593c0c81f3e6af50dfd2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/configs/tensormask_R_50_FPN_6x.yaml @@ -0,0 +1,11 @@ +_BASE_: "Base-TensorMask.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (480000, 520000) + MAX_ITER: 540000 +INPUT: + MIN_SIZE_TRAIN_SAMPLING: "range" + MIN_SIZE_TRAIN: (640, 800) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/setup.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..0194e76608966b528ab32879edc40a8e4ac3225f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/setup.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import glob +import os +from setuptools import find_packages, setup +import torch +from torch.utils.cpp_extension import CUDA_HOME, CppExtension, CUDAExtension + + +def get_extensions(): + this_dir = os.path.dirname(os.path.abspath(__file__)) + extensions_dir = os.path.join(this_dir, "tensormask", "layers", "csrc") + + main_source = os.path.join(extensions_dir, "vision.cpp") + sources = glob.glob(os.path.join(extensions_dir, "**", "*.cpp")) + source_cuda = glob.glob(os.path.join(extensions_dir, "**", "*.cu")) + glob.glob( + os.path.join(extensions_dir, "*.cu") + ) + + sources = [main_source] + sources + + extension = CppExtension + + extra_compile_args = {"cxx": []} + define_macros = [] + + if (torch.cuda.is_available() and CUDA_HOME is not None) or os.getenv("FORCE_CUDA", "0") == "1": + extension = CUDAExtension + sources += source_cuda + define_macros += [("WITH_CUDA", None)] + extra_compile_args["nvcc"] = [ + "-DCUDA_HAS_FP16=1", + "-D__CUDA_NO_HALF_OPERATORS__", + "-D__CUDA_NO_HALF_CONVERSIONS__", + "-D__CUDA_NO_HALF2_OPERATORS__", + ] + + # It's better if pytorch can do this by default .. + CC = os.environ.get("CC", None) + if CC is not None: + extra_compile_args["nvcc"].append("-ccbin={}".format(CC)) + + sources = [os.path.join(extensions_dir, s) for s in sources] + + include_dirs = [extensions_dir] + + ext_modules = [ + extension( + "tensormask._C", + sources, + include_dirs=include_dirs, + define_macros=define_macros, + extra_compile_args=extra_compile_args, + ) + ] + + return ext_modules + + +setup( + name="tensormask", + version="0.1", + author="FAIR", + packages=find_packages(exclude=("configs", "tests")), + python_requires=">=3.6", + ext_modules=get_extensions(), + cmdclass={"build_ext": torch.utils.cpp_extension.BuildExtension}, +) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e3b642a55519867dc52ccc57a36c32c72c3d34da --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .config import add_tensormask_config +from .arch import TensorMask diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/arch.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/arch.py new file mode 100644 index 0000000000000000000000000000000000000000..a3e89c6b4283b28fe8028300e146d7b7543f0da1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/arch.py @@ -0,0 +1,904 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import copy +import logging +import math +from typing import List +import torch +import torch.nn.functional as F +from fvcore.nn import sigmoid_focal_loss_star_jit, smooth_l1_loss +from torch import nn + +from detectron2.layers import ShapeSpec, batched_nms, cat, paste_masks_in_image +from detectron2.modeling.anchor_generator import DefaultAnchorGenerator +from detectron2.modeling.backbone import build_backbone +from detectron2.modeling.box_regression import Box2BoxTransform +from detectron2.modeling.meta_arch.build import META_ARCH_REGISTRY +from detectron2.modeling.meta_arch.retinanet import ( + permute_all_cls_and_box_to_N_HWA_K_and_concat, + permute_to_N_HWA_K, +) +from detectron2.structures import Boxes, ImageList, Instances +from detectron2.utils.logger import log_first_n + +from tensormask.layers import SwapAlign2Nat + +__all__ = ["TensorMask"] + + +def _assignment_rule( + gt_boxes, + anchor_boxes, + unit_lengths, + min_anchor_size, + scale_thresh=2.0, + spatial_thresh=1.0, + uniqueness_on=True, +): + """ + Given two lists of boxes of N ground truth boxes and M anchor boxes, + compute the assignment between the two, following the assignment rules in + https://arxiv.org/abs/1903.12174. + The box order must be (xmin, ymin, xmax, ymax), so please make sure to convert + to BoxMode.XYXY_ABS before calling this function. + + Args: + gt_boxes, anchor_boxes (Boxes): two Boxes. Contains N & M boxes/anchors, respectively. + unit_lengths (Tensor): Contains the unit lengths of M anchor boxes. + min_anchor_size (float): Minimum size of the anchor, in pixels + scale_thresh (float): The `scale` threshold: the maximum size of the anchor + should not be greater than scale_thresh x max(h, w) of + the ground truth box. + spatial_thresh (float): The `spatial` threshold: the l2 distance between the + center of the anchor and the ground truth box should not + be greater than spatial_thresh x u where u is the unit length. + + Returns: + matches (Tensor[int64]): a vector of length M, where matches[i] is a matched + ground-truth index in [0, N) + match_labels (Tensor[int8]): a vector of length M, where pred_labels[i] indicates + whether a prediction is a true or false positive or ignored + """ + gt_boxes, anchor_boxes = gt_boxes.tensor, anchor_boxes.tensor + N = gt_boxes.shape[0] + M = anchor_boxes.shape[0] + if N == 0 or M == 0: + return ( + gt_boxes.new_full((N,), 0, dtype=torch.int64), + gt_boxes.new_full((N,), -1, dtype=torch.int8), + ) + + # Containment rule + lt = torch.min(gt_boxes[:, None, :2], anchor_boxes[:, :2]) # [N,M,2] + rb = torch.max(gt_boxes[:, None, 2:], anchor_boxes[:, 2:]) # [N,M,2] + union = cat([lt, rb], dim=2) # [N,M,4] + + dummy_gt_boxes = torch.zeros_like(gt_boxes) + anchor = dummy_gt_boxes[:, None, :] + anchor_boxes[:, :] # [N,M,4] + + contain_matrix = torch.all(union == anchor, dim=2) # [N,M] + + # Centrality rule, scale + gt_size_lower = torch.max(gt_boxes[:, 2:] - gt_boxes[:, :2], dim=1)[0] # [N] + gt_size_upper = gt_size_lower * scale_thresh # [N] + # Fall back for small objects + gt_size_upper[gt_size_upper < min_anchor_size] = min_anchor_size + # Due to sampling of locations, the anchor sizes are deducted with sampling strides + anchor_size = ( + torch.max(anchor_boxes[:, 2:] - anchor_boxes[:, :2], dim=1)[0] - unit_lengths + ) # [M] + + size_diff_upper = gt_size_upper[:, None] - anchor_size # [N,M] + scale_matrix = size_diff_upper >= 0 # [N,M] + + # Centrality rule, spatial + gt_center = (gt_boxes[:, 2:] + gt_boxes[:, :2]) / 2 # [N,2] + anchor_center = (anchor_boxes[:, 2:] + anchor_boxes[:, :2]) / 2 # [M,2] + offset_center = gt_center[:, None, :] - anchor_center[:, :] # [N,M,2] + offset_center /= unit_lengths[:, None] # [N,M,2] + spatial_square = spatial_thresh * spatial_thresh + spatial_matrix = torch.sum(offset_center * offset_center, dim=2) <= spatial_square + + assign_matrix = (contain_matrix & scale_matrix & spatial_matrix).int() + + # assign_matrix is N (gt) x M (predicted) + # Max over gt elements (dim 0) to find best gt candidate for each prediction + matched_vals, matches = assign_matrix.max(dim=0) + match_labels = matches.new_full(matches.size(), 1, dtype=torch.int8) + + match_labels[matched_vals == 0] = 0 + match_labels[matched_vals == 1] = 1 + + # find all the elements that match to ground truths multiple times + not_unique_idxs = assign_matrix.sum(dim=0) > 1 + if uniqueness_on: + match_labels[not_unique_idxs] = 0 + else: + match_labels[not_unique_idxs] = -1 + + return matches, match_labels + + +# TODO make the paste_mask function in d2 core support mask list +def _paste_mask_lists_in_image(masks, boxes, image_shape, threshold=0.5): + """ + Paste a list of masks that are of various resolutions (e.g., 28 x 28) into an image. + The location, height, and width for pasting each mask is determined by their + corresponding bounding boxes in boxes. + + Args: + masks (list(Tensor)): A list of Tensor of shape (1, Hmask_i, Wmask_i). + Values are in [0, 1]. The list length, Bimg, is the + number of detected object instances in the image. + boxes (Boxes): A Boxes of length Bimg. boxes.tensor[i] and masks[i] correspond + to the same object instance. + image_shape (tuple): height, width + threshold (float): A threshold in [0, 1] for converting the (soft) masks to + binary masks. + + Returns: + img_masks (Tensor): A tensor of shape (Bimg, Himage, Wimage), where Bimg is the + number of detected object instances and Himage, Wimage are the image width + and height. img_masks[i] is a binary mask for object instance i. + """ + if len(masks) == 0: + return torch.empty((0, 1) + image_shape, dtype=torch.uint8) + + # Loop over masks groups. Each group has the same mask prediction size. + img_masks = [] + ind_masks = [] + mask_sizes = torch.tensor([m.shape[-1] for m in masks]) + unique_sizes = torch.unique(mask_sizes) + for msize in unique_sizes.tolist(): + cur_ind = torch.where(mask_sizes == msize)[0] + ind_masks.append(cur_ind) + + cur_masks = cat([masks[i] for i in cur_ind]) + cur_boxes = boxes[cur_ind] + img_masks.append(paste_masks_in_image(cur_masks, cur_boxes, image_shape, threshold)) + + img_masks = cat(img_masks) + ind_masks = cat(ind_masks) + + img_masks_out = torch.empty_like(img_masks) + img_masks_out[ind_masks, :, :] = img_masks + + return img_masks_out + + +def _postprocess(results, result_mask_info, output_height, output_width, mask_threshold=0.5): + """ + Post-process the output boxes for TensorMask. + The input images are often resized when entering an object detector. + As a result, we often need the outputs of the detector in a different + resolution from its inputs. + + This function will postprocess the raw outputs of TensorMask + to produce outputs according to the desired output resolution. + + Args: + results (Instances): the raw outputs from the detector. + `results.image_size` contains the input image resolution the detector sees. + This object might be modified in-place. Note that it does not contain the field + `pred_masks`, which is provided by another input `result_masks`. + result_mask_info (list[Tensor], Boxes): a pair of two items for mask related results. + The first item is a list of #detection tensors, each is the predicted masks. + The second item is the anchors corresponding to the predicted masks. + output_height, output_width: the desired output resolution. + + Returns: + Instances: the postprocessed output from the model, based on the output resolution + """ + scale_x, scale_y = (output_width / results.image_size[1], output_height / results.image_size[0]) + results = Instances((output_height, output_width), **results.get_fields()) + + output_boxes = results.pred_boxes + output_boxes.tensor[:, 0::2] *= scale_x + output_boxes.tensor[:, 1::2] *= scale_y + output_boxes.clip(results.image_size) + + inds_nonempty = output_boxes.nonempty() + results = results[inds_nonempty] + result_masks, result_anchors = result_mask_info + if result_masks: + result_anchors.tensor[:, 0::2] *= scale_x + result_anchors.tensor[:, 1::2] *= scale_y + result_masks = [x for (i, x) in zip(inds_nonempty.tolist(), result_masks) if i] + results.pred_masks = _paste_mask_lists_in_image( + result_masks, + result_anchors[inds_nonempty], + results.image_size, + threshold=mask_threshold, + ) + return results + + +class TensorMaskAnchorGenerator(DefaultAnchorGenerator): + """ + For a set of image sizes and feature maps, computes a set of anchors for TensorMask. + It also computes the unit lengths and indexes for each anchor box. + """ + + def grid_anchors_with_unit_lengths_and_indexes(self, grid_sizes): + anchors = [] + unit_lengths = [] + indexes = [] + for lvl, (size, stride, base_anchors) in enumerate( + zip(grid_sizes, self.strides, self.cell_anchors) + ): + grid_height, grid_width = size + device = base_anchors.device + shifts_x = torch.arange( + 0, grid_width * stride, step=stride, dtype=torch.float32, device=device + ) + shifts_y = torch.arange( + 0, grid_height * stride, step=stride, dtype=torch.float32, device=device + ) + shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x) + shifts = torch.stack((shift_x, shift_y, shift_x, shift_y), dim=2) + # Stack anchors in shapes of (HWA, 4) + cur_anchor = (shifts[:, :, None, :] + base_anchors.view(1, 1, -1, 4)).view(-1, 4) + anchors.append(cur_anchor) + unit_lengths.append( + torch.full((cur_anchor.shape[0],), stride, dtype=torch.float32, device=device) + ) + # create mask indexes using mesh grid + shifts_l = torch.full((1,), lvl, dtype=torch.int64, device=device) + shifts_i = torch.zeros((1,), dtype=torch.int64, device=device) + shifts_h = torch.arange(0, grid_height, dtype=torch.int64, device=device) + shifts_w = torch.arange(0, grid_width, dtype=torch.int64, device=device) + shifts_a = torch.arange(0, base_anchors.shape[0], dtype=torch.int64, device=device) + grids = torch.meshgrid(shifts_l, shifts_i, shifts_h, shifts_w, shifts_a) + + indexes.append(torch.stack(grids, dim=5).view(-1, 5)) + + return anchors, unit_lengths, indexes + + def forward(self, features): + """ + Returns: + list[list[Boxes]]: a list of #image elements. Each is a list of #feature level Boxes. + The Boxes contains anchors of this image on the specific feature level. + list[list[Tensor]]: a list of #image elements. Each is a list of #feature level tensors. + The tensor contains strides, or unit lengths for the anchors. + list[list[Tensor]]: a list of #image elements. Each is a list of #feature level tensors. + The Tensor contains indexes for the anchors, with the last dimension meaning + (L, N, H, W, A), where L is level, I is image (not set yet), H is height, + W is width, and A is anchor. + """ + num_images = len(features[0]) + grid_sizes = [feature_map.shape[-2:] for feature_map in features] + anchors_list, lengths_list, indexes_list = self.grid_anchors_with_unit_lengths_and_indexes( + grid_sizes + ) + + # Convert anchors from Tensor to Boxes + anchors_per_im = [Boxes(x) for x in anchors_list] + + # TODO it can be simplified to not return duplicated information for + # each image, just like detectron2's own AnchorGenerator + anchors = [copy.deepcopy(anchors_per_im) for _ in range(num_images)] + unit_lengths = [copy.deepcopy(lengths_list) for _ in range(num_images)] + indexes = [copy.deepcopy(indexes_list) for _ in range(num_images)] + + return anchors, unit_lengths, indexes + + +@META_ARCH_REGISTRY.register() +class TensorMask(nn.Module): + """ + TensorMask model. Creates FPN backbone, anchors and a head for classification + and box regression. Calculates and applies proper losses to class, box, and + masks. + """ + + def __init__(self, cfg): + super().__init__() + + # fmt: off + self.num_classes = cfg.MODEL.TENSOR_MASK.NUM_CLASSES + self.in_features = cfg.MODEL.TENSOR_MASK.IN_FEATURES + self.anchor_sizes = cfg.MODEL.ANCHOR_GENERATOR.SIZES + self.num_levels = len(cfg.MODEL.ANCHOR_GENERATOR.SIZES) + # Loss parameters: + self.focal_loss_alpha = cfg.MODEL.TENSOR_MASK.FOCAL_LOSS_ALPHA + self.focal_loss_gamma = cfg.MODEL.TENSOR_MASK.FOCAL_LOSS_GAMMA + # Inference parameters: + self.score_threshold = cfg.MODEL.TENSOR_MASK.SCORE_THRESH_TEST + self.topk_candidates = cfg.MODEL.TENSOR_MASK.TOPK_CANDIDATES_TEST + self.nms_threshold = cfg.MODEL.TENSOR_MASK.NMS_THRESH_TEST + self.detections_im = cfg.TEST.DETECTIONS_PER_IMAGE + # Mask parameters: + self.mask_on = cfg.MODEL.MASK_ON + self.mask_loss_weight = cfg.MODEL.TENSOR_MASK.MASK_LOSS_WEIGHT + self.mask_pos_weight = torch.tensor(cfg.MODEL.TENSOR_MASK.POSITIVE_WEIGHT, + dtype=torch.float32) + self.bipyramid_on = cfg.MODEL.TENSOR_MASK.BIPYRAMID_ON + # fmt: on + + # build the backbone + self.backbone = build_backbone(cfg) + + backbone_shape = self.backbone.output_shape() + feature_shapes = [backbone_shape[f] for f in self.in_features] + feature_strides = [x.stride for x in feature_shapes] + # build anchors + self.anchor_generator = TensorMaskAnchorGenerator(cfg, feature_shapes) + self.num_anchors = self.anchor_generator.num_cell_anchors[0] + anchors_min_level = cfg.MODEL.ANCHOR_GENERATOR.SIZES[0] + self.mask_sizes = [size // feature_strides[0] for size in anchors_min_level] + self.min_anchor_size = min(anchors_min_level) - feature_strides[0] + + # head of the TensorMask + self.head = TensorMaskHead( + cfg, self.num_levels, self.num_anchors, self.mask_sizes, feature_shapes + ) + # box transform + self.box2box_transform = Box2BoxTransform(weights=cfg.MODEL.TENSOR_MASK.BBOX_REG_WEIGHTS) + self.register_buffer("pixel_mean", torch.Tensor(cfg.MODEL.PIXEL_MEAN).view(-1, 1, 1)) + self.register_buffer("pixel_std", torch.Tensor(cfg.MODEL.PIXEL_STD).view(-1, 1, 1)) + + @property + def device(self): + return self.pixel_mean.device + + def forward(self, batched_inputs): + """ + Args: + batched_inputs: a list, batched outputs of :class:`DetectionTransform` . + Each item in the list contains the inputs for one image. + For now, each item in the list is a dict that contains: + image: Tensor, image in (C, H, W) format. + instances: Instances + Other information that's included in the original dicts, such as: + "height", "width" (int): the output resolution of the model, used in inference. + See :meth:`postprocess` for details. + Returns: + losses (dict[str: Tensor]): mapping from a named loss to a tensor + storing the loss. Used during training only. + """ + images = self.preprocess_image(batched_inputs) + if "instances" in batched_inputs[0]: + gt_instances = [x["instances"].to(self.device) for x in batched_inputs] + elif "targets" in batched_inputs[0]: + log_first_n( + logging.WARN, "'targets' in the model inputs is now renamed to 'instances'!", n=10 + ) + gt_instances = [x["targets"].to(self.device) for x in batched_inputs] + else: + gt_instances = None + + features = self.backbone(images.tensor) + features = [features[f] for f in self.in_features] + # apply the TensorMask head + pred_logits, pred_deltas, pred_masks = self.head(features) + # generate anchors based on features, is it image specific? + anchors, unit_lengths, indexes = self.anchor_generator(features) + + if self.training: + # get ground truths for class labels and box targets, it will label each anchor + gt_class_info, gt_delta_info, gt_mask_info, num_fg = self.get_ground_truth( + anchors, unit_lengths, indexes, gt_instances + ) + # compute the loss + return self.losses( + gt_class_info, + gt_delta_info, + gt_mask_info, + num_fg, + pred_logits, + pred_deltas, + pred_masks, + ) + else: + # do inference to get the output + results = self.inference(pred_logits, pred_deltas, pred_masks, anchors, indexes, images) + processed_results = [] + for results_im, input_im, image_size in zip( + results, batched_inputs, images.image_sizes + ): + height = input_im.get("height", image_size[0]) + width = input_im.get("width", image_size[1]) + # this is to do post-processing with the image size + result_box, result_mask = results_im + r = _postprocess(result_box, result_mask, height, width) + processed_results.append({"instances": r}) + return processed_results + + def losses( + self, + gt_class_info, + gt_delta_info, + gt_mask_info, + num_fg, + pred_logits, + pred_deltas, + pred_masks, + ): + """ + Args: + For `gt_class_info`, `gt_delta_info`, `gt_mask_info` and `num_fg` parameters, see + :meth:`TensorMask.get_ground_truth`. + For `pred_logits`, `pred_deltas` and `pred_masks`, see + :meth:`TensorMaskHead.forward`. + + Returns: + losses (dict[str: Tensor]): mapping from a named loss to a scalar tensor + storing the loss. Used during training only. The potential dict keys are: + "loss_cls", "loss_box_reg" and "loss_mask". + """ + gt_classes_target, gt_valid_inds = gt_class_info + gt_deltas, gt_fg_inds = gt_delta_info + gt_masks, gt_mask_inds = gt_mask_info + loss_normalizer = torch.tensor(max(1, num_fg), dtype=torch.float32, device=self.device) + + # classification and regression + pred_logits, pred_deltas = permute_all_cls_and_box_to_N_HWA_K_and_concat( + pred_logits, pred_deltas, self.num_classes + ) + loss_cls = ( + sigmoid_focal_loss_star_jit( + pred_logits[gt_valid_inds], + gt_classes_target[gt_valid_inds], + alpha=self.focal_loss_alpha, + gamma=self.focal_loss_gamma, + reduction="sum", + ) + / loss_normalizer + ) + + if num_fg == 0: + loss_box_reg = pred_deltas.sum() * 0 + else: + loss_box_reg = ( + smooth_l1_loss(pred_deltas[gt_fg_inds], gt_deltas, beta=0.0, reduction="sum") + / loss_normalizer + ) + losses = {"loss_cls": loss_cls, "loss_box_reg": loss_box_reg} + + # mask prediction + if self.mask_on: + loss_mask = 0 + for lvl in range(self.num_levels): + cur_level_factor = 2 ** lvl if self.bipyramid_on else 1 + for anc in range(self.num_anchors): + cur_gt_mask_inds = gt_mask_inds[lvl][anc] + if cur_gt_mask_inds is None: + loss_mask += pred_masks[lvl][anc][0, 0, 0, 0] * 0 + else: + cur_mask_size = self.mask_sizes[anc] * cur_level_factor + # TODO maybe there are numerical issues when mask sizes are large + cur_size_divider = torch.tensor( + self.mask_loss_weight / (cur_mask_size ** 2), + dtype=torch.float32, + device=self.device, + ) + + cur_pred_masks = pred_masks[lvl][anc][ + cur_gt_mask_inds[:, 0], # N + :, # V x U + cur_gt_mask_inds[:, 1], # H + cur_gt_mask_inds[:, 2], # W + ] + + loss_mask += F.binary_cross_entropy_with_logits( + cur_pred_masks.view(-1, cur_mask_size, cur_mask_size), # V, U + gt_masks[lvl][anc].to(dtype=torch.float32), + reduction="sum", + weight=cur_size_divider, + pos_weight=self.mask_pos_weight, + ) + losses["loss_mask"] = loss_mask / loss_normalizer + return losses + + @torch.no_grad() + def get_ground_truth(self, anchors, unit_lengths, indexes, targets): + """ + Args: + anchors (list[list[Boxes]]): a list of N=#image elements. Each is a + list of #feature level Boxes. The Boxes contains anchors of + this image on the specific feature level. + unit_lengths (list[list[Tensor]]): a list of N=#image elements. Each is a + list of #feature level Tensor. The tensor contains unit lengths for anchors of + this image on the specific feature level. + indexes (list[list[Tensor]]): a list of N=#image elements. Each is a + list of #feature level Tensor. The tensor contains the 5D index of + each anchor, the second dimension means (L, N, H, W, A), where L + is level, I is image, H is height, W is width, and A is anchor. + targets (list[Instances]): a list of N `Instances`s. The i-th + `Instances` contains the ground-truth per-instance annotations + for the i-th input image. Specify `targets` during training only. + + Returns: + gt_class_info (Tensor, Tensor): A pair of two tensors for classification. + The first one is an integer tensor of shape (R, #classes) storing ground-truth + labels for each anchor. R is the total number of anchors in the batch. + The second one is an integer tensor of shape (R,), to indicate which + anchors are valid for loss computation, which anchors are not. + gt_delta_info (Tensor, Tensor): A pair of two tensors for boxes. + The first one, of shape (F, 4). F=#foreground anchors. + The last dimension represents ground-truth box2box transform + targets (dx, dy, dw, dh) that map each anchor to its matched ground-truth box. + Only foreground anchors have values in this tensor. Could be `None` if F=0. + The second one, of shape (R,), is an integer tensor indicating which anchors + are foreground ones used for box regression. Could be `None` if F=0. + gt_mask_info (list[list[Tensor]], list[list[Tensor]]): A pair of two lists for masks. + The first one is a list of P=#feature level elements. Each is a + list of A=#anchor tensors. Each tensor contains the ground truth + masks of the same size and for the same feature level. Could be `None`. + The second one is a list of P=#feature level elements. Each is a + list of A=#anchor tensors. Each tensor contains the location of the ground truth + masks of the same size and for the same feature level. The second dimension means + (N, H, W), where N is image, H is height, and W is width. Could be `None`. + num_fg (int): F=#foreground anchors, used later for loss normalization. + """ + gt_classes = [] + gt_deltas = [] + gt_masks = [[[] for _ in range(self.num_anchors)] for _ in range(self.num_levels)] + gt_mask_inds = [[[] for _ in range(self.num_anchors)] for _ in range(self.num_levels)] + + anchors = [Boxes.cat(anchors_i) for anchors_i in anchors] + unit_lengths = [cat(unit_lengths_i) for unit_lengths_i in unit_lengths] + indexes = [cat(indexes_i) for indexes_i in indexes] + + num_fg = 0 + for i, (anchors_im, unit_lengths_im, indexes_im, targets_im) in enumerate( + zip(anchors, unit_lengths, indexes, targets) + ): + # Initialize all + gt_classes_i = torch.full_like( + unit_lengths_im, self.num_classes, dtype=torch.int64, device=self.device + ) + # Ground truth classes + has_gt = len(targets_im) > 0 + if has_gt: + # Compute the pairwise matrix + gt_matched_inds, anchor_labels = _assignment_rule( + targets_im.gt_boxes, anchors_im, unit_lengths_im, self.min_anchor_size + ) + # Find the foreground instances + fg_inds = anchor_labels == 1 + fg_anchors = anchors_im[fg_inds] + num_fg += len(fg_anchors) + # Find the ground truths for foreground instances + gt_fg_matched_inds = gt_matched_inds[fg_inds] + # Assign labels for foreground instances + gt_classes_i[fg_inds] = targets_im.gt_classes[gt_fg_matched_inds] + # Anchors with label -1 are ignored, others are left as negative + gt_classes_i[anchor_labels == -1] = -1 + + # Boxes + # Ground truth box regression, only for foregrounds + matched_gt_boxes = targets_im[gt_fg_matched_inds].gt_boxes + # Compute box regression offsets for foregrounds only + gt_deltas_i = self.box2box_transform.get_deltas( + fg_anchors.tensor, matched_gt_boxes.tensor + ) + gt_deltas.append(gt_deltas_i) + + # Masks + if self.mask_on: + # Compute masks for each level and each anchor + matched_indexes = indexes_im[fg_inds, :] + for lvl in range(self.num_levels): + ids_lvl = matched_indexes[:, 0] == lvl + if torch.any(ids_lvl): + cur_level_factor = 2 ** lvl if self.bipyramid_on else 1 + for anc in range(self.num_anchors): + ids_lvl_anchor = ids_lvl & (matched_indexes[:, 4] == anc) + if torch.any(ids_lvl_anchor): + gt_masks[lvl][anc].append( + targets_im[ + gt_fg_matched_inds[ids_lvl_anchor] + ].gt_masks.crop_and_resize( + fg_anchors[ids_lvl_anchor].tensor, + self.mask_sizes[anc] * cur_level_factor, + ) + ) + # Select (N, H, W) dimensions + gt_mask_inds_lvl_anc = matched_indexes[ids_lvl_anchor, 1:4] + # Set the image index to the current image + gt_mask_inds_lvl_anc[:, 0] = i + gt_mask_inds[lvl][anc].append(gt_mask_inds_lvl_anc) + gt_classes.append(gt_classes_i) + + # Classes and boxes + gt_classes = cat(gt_classes) + gt_valid_inds = gt_classes >= 0 + gt_fg_inds = gt_valid_inds & (gt_classes < self.num_classes) + gt_classes_target = torch.zeros( + (gt_classes.shape[0], self.num_classes), dtype=torch.float32, device=self.device + ) + gt_classes_target[gt_fg_inds, gt_classes[gt_fg_inds]] = 1 + gt_deltas = cat(gt_deltas) if gt_deltas else None + + # Masks + gt_masks = [[cat(mla) if mla else None for mla in ml] for ml in gt_masks] + gt_mask_inds = [[cat(ila) if ila else None for ila in il] for il in gt_mask_inds] + return ( + (gt_classes_target, gt_valid_inds), + (gt_deltas, gt_fg_inds), + (gt_masks, gt_mask_inds), + num_fg, + ) + + def inference(self, pred_logits, pred_deltas, pred_masks, anchors, indexes, images): + """ + Arguments: + pred_logits, pred_deltas, pred_masks: Same as the output of: + meth:`TensorMaskHead.forward` + anchors, indexes: Same as the input of meth:`TensorMask.get_ground_truth` + images (ImageList): the input images + + Returns: + results (List[Instances]): a list of #images elements. + """ + assert len(anchors) == len(images) + results = [] + + pred_logits = [permute_to_N_HWA_K(x, self.num_classes) for x in pred_logits] + pred_deltas = [permute_to_N_HWA_K(x, 4) for x in pred_deltas] + + pred_logits = cat(pred_logits, dim=1) + pred_deltas = cat(pred_deltas, dim=1) + + for img_idx, (anchors_im, indexes_im) in enumerate(zip(anchors, indexes)): + # Get the size of the current image + image_size = images.image_sizes[img_idx] + + logits_im = pred_logits[img_idx] + deltas_im = pred_deltas[img_idx] + + if self.mask_on: + masks_im = [[mla[img_idx] for mla in ml] for ml in pred_masks] + else: + masks_im = [None] * self.num_levels + results_im = self.inference_single_image( + logits_im, + deltas_im, + masks_im, + Boxes.cat(anchors_im), + cat(indexes_im), + tuple(image_size), + ) + results.append(results_im) + return results + + def inference_single_image( + self, pred_logits, pred_deltas, pred_masks, anchors, indexes, image_size + ): + """ + Single-image inference. Return bounding-box detection results by thresholding + on scores and applying non-maximum suppression (NMS). + + Arguments: + pred_logits (list[Tensor]): list of #feature levels. Each entry contains + tensor of size (AxHxW, K) + pred_deltas (list[Tensor]): Same shape as 'pred_logits' except that K becomes 4. + pred_masks (list[list[Tensor]]): List of #feature levels, each is a list of #anchors. + Each entry contains tensor of size (M_i*M_i, H, W). `None` if mask_on=False. + anchors (list[Boxes]): list of #feature levels. Each entry contains + a Boxes object, which contains all the anchors for that + image in that feature level. + image_size (tuple(H, W)): a tuple of the image height and width. + + Returns: + Same as `inference`, but for only one image. + """ + pred_logits = pred_logits.flatten().sigmoid_() + # We get top locations across all levels to accelerate the inference speed, + # which does not seem to affect the accuracy. + # First select values above the threshold + logits_top_idxs = torch.where(pred_logits > self.score_threshold)[0] + # Then get the top values + num_topk = min(self.topk_candidates, logits_top_idxs.shape[0]) + pred_prob, topk_idxs = pred_logits[logits_top_idxs].sort(descending=True) + # Keep top k scoring values + pred_prob = pred_prob[:num_topk] + # Keep top k values + top_idxs = logits_top_idxs[topk_idxs[:num_topk]] + + # class index + cls_idxs = top_idxs % self.num_classes + # HWA index + top_idxs //= self.num_classes + # predict boxes + pred_boxes = self.box2box_transform.apply_deltas( + pred_deltas[top_idxs], anchors[top_idxs].tensor + ) + # apply nms + keep = batched_nms(pred_boxes, pred_prob, cls_idxs, self.nms_threshold) + # pick the top ones + keep = keep[: self.detections_im] + + results = Instances(image_size) + results.pred_boxes = Boxes(pred_boxes[keep]) + results.scores = pred_prob[keep] + results.pred_classes = cls_idxs[keep] + + # deal with masks + result_masks, result_anchors = [], None + if self.mask_on: + # index and anchors, useful for masks + top_indexes = indexes[top_idxs] + top_anchors = anchors[top_idxs] + result_indexes = top_indexes[keep] + result_anchors = top_anchors[keep] + # Get masks and do sigmoid + for lvl, _, h, w, anc in result_indexes.tolist(): + cur_size = self.mask_sizes[anc] * (2 ** lvl if self.bipyramid_on else 1) + result_masks.append( + torch.sigmoid(pred_masks[lvl][anc][:, h, w].view(1, cur_size, cur_size)) + ) + + return results, (result_masks, result_anchors) + + def preprocess_image(self, batched_inputs): + """ + Normalize, pad and batch the input images. + """ + images = [x["image"].to(self.device) for x in batched_inputs] + images = [(x - self.pixel_mean) / self.pixel_std for x in images] + images = ImageList.from_tensors(images, self.backbone.size_divisibility) + return images + + +class TensorMaskHead(nn.Module): + def __init__(self, cfg, num_levels, num_anchors, mask_sizes, input_shape: List[ShapeSpec]): + """ + TensorMask head. + """ + super().__init__() + # fmt: off + self.in_features = cfg.MODEL.TENSOR_MASK.IN_FEATURES + in_channels = input_shape[0].channels + num_classes = cfg.MODEL.TENSOR_MASK.NUM_CLASSES + cls_channels = cfg.MODEL.TENSOR_MASK.CLS_CHANNELS + num_convs = cfg.MODEL.TENSOR_MASK.NUM_CONVS + # box parameters + bbox_channels = cfg.MODEL.TENSOR_MASK.BBOX_CHANNELS + # mask parameters + self.mask_on = cfg.MODEL.MASK_ON + self.mask_sizes = mask_sizes + mask_channels = cfg.MODEL.TENSOR_MASK.MASK_CHANNELS + self.align_on = cfg.MODEL.TENSOR_MASK.ALIGNED_ON + self.bipyramid_on = cfg.MODEL.TENSOR_MASK.BIPYRAMID_ON + # fmt: on + + # class subnet + cls_subnet = [] + cur_channels = in_channels + for _ in range(num_convs): + cls_subnet.append( + nn.Conv2d(cur_channels, cls_channels, kernel_size=3, stride=1, padding=1) + ) + cur_channels = cls_channels + cls_subnet.append(nn.ReLU()) + + self.cls_subnet = nn.Sequential(*cls_subnet) + self.cls_score = nn.Conv2d( + cur_channels, num_anchors * num_classes, kernel_size=3, stride=1, padding=1 + ) + modules_list = [self.cls_subnet, self.cls_score] + + # box subnet + bbox_subnet = [] + cur_channels = in_channels + for _ in range(num_convs): + bbox_subnet.append( + nn.Conv2d(cur_channels, bbox_channels, kernel_size=3, stride=1, padding=1) + ) + cur_channels = bbox_channels + bbox_subnet.append(nn.ReLU()) + + self.bbox_subnet = nn.Sequential(*bbox_subnet) + self.bbox_pred = nn.Conv2d( + cur_channels, num_anchors * 4, kernel_size=3, stride=1, padding=1 + ) + modules_list.extend([self.bbox_subnet, self.bbox_pred]) + + # mask subnet + if self.mask_on: + mask_subnet = [] + cur_channels = in_channels + for _ in range(num_convs): + mask_subnet.append( + nn.Conv2d(cur_channels, mask_channels, kernel_size=3, stride=1, padding=1) + ) + cur_channels = mask_channels + mask_subnet.append(nn.ReLU()) + + self.mask_subnet = nn.Sequential(*mask_subnet) + modules_list.append(self.mask_subnet) + for mask_size in self.mask_sizes: + cur_mask_module = "mask_pred_%02d" % mask_size + self.add_module( + cur_mask_module, + nn.Conv2d( + cur_channels, mask_size * mask_size, kernel_size=1, stride=1, padding=0 + ), + ) + modules_list.append(getattr(self, cur_mask_module)) + if self.align_on: + if self.bipyramid_on: + for lvl in range(num_levels): + cur_mask_module = "align2nat_%02d" % lvl + lambda_val = 2 ** lvl + setattr(self, cur_mask_module, SwapAlign2Nat(lambda_val)) + # Also the fusing layer, stay at the same channel size + mask_fuse = [ + nn.Conv2d(cur_channels, cur_channels, kernel_size=3, stride=1, padding=1), + nn.ReLU(), + ] + self.mask_fuse = nn.Sequential(*mask_fuse) + modules_list.append(self.mask_fuse) + else: + self.align2nat = SwapAlign2Nat(1) + + # Initialization + for modules in modules_list: + for layer in modules.modules(): + if isinstance(layer, nn.Conv2d): + torch.nn.init.normal_(layer.weight, mean=0, std=0.01) + torch.nn.init.constant_(layer.bias, 0) + + # Use prior in model initialization to improve stability + bias_value = -(math.log((1 - 0.01) / 0.01)) + torch.nn.init.constant_(self.cls_score.bias, bias_value) + + def forward(self, features): + """ + Arguments: + features (list[Tensor]): FPN feature map tensors in high to low resolution. + Each tensor in the list correspond to different feature levels. + + Returns: + pred_logits (list[Tensor]): #lvl tensors, each has shape (N, AxK, Hi, Wi). + The tensor predicts the classification probability + at each spatial position for each of the A anchors and K object + classes. + pred_deltas (list[Tensor]): #lvl tensors, each has shape (N, Ax4, Hi, Wi). + The tensor predicts 4-vector (dx,dy,dw,dh) box + regression values for every anchor. These values are the + relative offset between the anchor and the ground truth box. + pred_masks (list(list[Tensor])): #lvl list of tensors, each is a list of + A tensors of shape (N, M_{i,a}, Hi, Wi). + The tensor predicts a dense set of M_ixM_i masks at every location. + """ + pred_logits = [self.cls_score(self.cls_subnet(x)) for x in features] + pred_deltas = [self.bbox_pred(self.bbox_subnet(x)) for x in features] + + pred_masks = None + if self.mask_on: + mask_feats = [self.mask_subnet(x) for x in features] + + if self.bipyramid_on: + mask_feat_high_res = mask_feats[0] + H, W = mask_feat_high_res.shape[-2:] + mask_feats_up = [] + for lvl, mask_feat in enumerate(mask_feats): + lambda_val = 2.0 ** lvl + mask_feat_up = mask_feat + if lvl > 0: + mask_feat_up = F.interpolate( + mask_feat, scale_factor=lambda_val, mode="bilinear", align_corners=False + ) + mask_feats_up.append( + self.mask_fuse(mask_feat_up[:, :, :H, :W] + mask_feat_high_res) + ) + mask_feats = mask_feats_up + + pred_masks = [] + for lvl, mask_feat in enumerate(mask_feats): + cur_masks = [] + for mask_size in self.mask_sizes: + cur_mask_module = getattr(self, "mask_pred_%02d" % mask_size) + cur_mask = cur_mask_module(mask_feat) + if self.align_on: + if self.bipyramid_on: + cur_mask_module = getattr(self, "align2nat_%02d" % lvl) + cur_mask = cur_mask_module(cur_mask) + else: + cur_mask = self.align2nat(cur_mask) + cur_masks.append(cur_mask) + pred_masks.append(cur_masks) + return pred_logits, pred_deltas, pred_masks diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/config.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/config.py new file mode 100644 index 0000000000000000000000000000000000000000..44479f211811bd4060c6afef9ed86791b0dcd0d4 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/config.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from detectron2.config import CfgNode as CN + + +def add_tensormask_config(cfg): + """ + Add config for TensorMask. + """ + cfg.MODEL.TENSOR_MASK = CN() + + # Anchor parameters + cfg.MODEL.TENSOR_MASK.IN_FEATURES = ["p2", "p3", "p4", "p5", "p6", "p7"] + + # Convolutions to use in the towers + cfg.MODEL.TENSOR_MASK.NUM_CONVS = 4 + + # Number of foreground classes. + cfg.MODEL.TENSOR_MASK.NUM_CLASSES = 80 + # Channel size for the classification tower + cfg.MODEL.TENSOR_MASK.CLS_CHANNELS = 256 + + cfg.MODEL.TENSOR_MASK.SCORE_THRESH_TEST = 0.05 + # Only the top (1000 * #levels) candidate boxes across all levels are + # considered jointly during test (to improve speed) + cfg.MODEL.TENSOR_MASK.TOPK_CANDIDATES_TEST = 6000 + cfg.MODEL.TENSOR_MASK.NMS_THRESH_TEST = 0.5 + + # Box parameters + # Channel size for the box tower + cfg.MODEL.TENSOR_MASK.BBOX_CHANNELS = 128 + # Weights on (dx, dy, dw, dh) + cfg.MODEL.TENSOR_MASK.BBOX_REG_WEIGHTS = (1.5, 1.5, 0.75, 0.75) + + # Loss parameters + cfg.MODEL.TENSOR_MASK.FOCAL_LOSS_GAMMA = 3.0 + cfg.MODEL.TENSOR_MASK.FOCAL_LOSS_ALPHA = 0.3 + + # Mask parameters + # Channel size for the mask tower + cfg.MODEL.TENSOR_MASK.MASK_CHANNELS = 128 + # Mask loss weight + cfg.MODEL.TENSOR_MASK.MASK_LOSS_WEIGHT = 2.0 + # weight on positive pixels within the mask + cfg.MODEL.TENSOR_MASK.POSITIVE_WEIGHT = 1.5 + # Whether to predict in the aligned representation + cfg.MODEL.TENSOR_MASK.ALIGNED_ON = False + # Whether to use the bipyramid architecture + cfg.MODEL.TENSOR_MASK.BIPYRAMID_ON = False diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cbbac429a69ce7cb17872e27b868f5603de5dc64 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .swap_align2nat import SwapAlign2Nat, swap_align2nat + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/csrc/SwapAlign2Nat/SwapAlign2Nat.h b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/csrc/SwapAlign2Nat/SwapAlign2Nat.h new file mode 100644 index 0000000000000000000000000000000000000000..2ec037391f1c5a40e69190bbdb50f71501d54825 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/csrc/SwapAlign2Nat/SwapAlign2Nat.h @@ -0,0 +1,54 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#pragma once +#include + +namespace tensormask { + +#ifdef WITH_CUDA +at::Tensor SwapAlign2Nat_forward_cuda( + const at::Tensor& X, + const int lambda_val, + const float pad_val); + +at::Tensor SwapAlign2Nat_backward_cuda( + const at::Tensor& gY, + const int lambda_val, + const int batch_size, + const int channel, + const int height, + const int width); +#endif + +inline at::Tensor SwapAlign2Nat_forward( + const at::Tensor& X, + const int lambda_val, + const float pad_val) { + if (X.type().is_cuda()) { +#ifdef WITH_CUDA + return SwapAlign2Nat_forward_cuda(X, lambda_val, pad_val); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + AT_ERROR("Not implemented on the CPU"); +} + +inline at::Tensor SwapAlign2Nat_backward( + const at::Tensor& gY, + const int lambda_val, + const int batch_size, + const int channel, + const int height, + const int width) { + if (gY.type().is_cuda()) { +#ifdef WITH_CUDA + return SwapAlign2Nat_backward_cuda( + gY, lambda_val, batch_size, channel, height, width); +#else + AT_ERROR("Not compiled with GPU support"); +#endif + } + AT_ERROR("Not implemented on the CPU"); +} + +} // namespace tensormask diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/csrc/SwapAlign2Nat/SwapAlign2Nat_cuda.cu b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/csrc/SwapAlign2Nat/SwapAlign2Nat_cuda.cu new file mode 100644 index 0000000000000000000000000000000000000000..06de4a4d046523be9959dee73dfc1c2c20852ce1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/csrc/SwapAlign2Nat/SwapAlign2Nat_cuda.cu @@ -0,0 +1,526 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#include +#include +#include +#include + +// TODO make it in a common file +#define CUDA_1D_KERNEL_LOOP(i, n) \ + for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n; \ + i += blockDim.x * gridDim.x) + +template +__device__ inline T get_pixel_val( + const T* tensor, + const int idx, + const int H, + const int W, + const int y, + const int x, + const int V, + const int U, + const int v, + const int u, + const T pad_val) { + if ((y < 0) || (y >= H) || (x < 0) || (x >= W) || (v < 0) || (v >= V) || + (u < 0) || (u >= U)) { + return pad_val; + } else { + return tensor[(((idx * V + v) * U + u) * H + y) * W + x]; + } +} + +template +__device__ inline void add_pixel_val( + T* tensor, + const T val, + const int idx, + const int H, + const int W, + const int y, + const int x, + const int V, + const int U, + const int v, + const int u) { + if ((val == 0.) || (y < 0) || (y >= H) || (x < 0) || (x >= W) || (v < 0) || + (v >= V) || (u < 0) || (u >= U)) { + return; + } else { + atomicAdd(tensor + ((((idx * V + v) * U + u) * H + y) * W + x), val); + } +} + +template +__global__ void SwapAlign2NatForwardFeat( + const int nthreads, + const T* bottom_data, + const int Vout, + const int Uout, + const float hVout, + const float hUout, + const int Vin, + const int Uin, + const float lambda, + const int Hin, + const int Win, + const int Hout, + const int Wout, + const T pad_val, + T* top_data) { + CUDA_1D_KERNEL_LOOP(index, nthreads) { + int idx = index; + const int x = idx % Wout; + idx /= Wout; + const int y = idx % Hout; + idx /= Hout; + const int u = idx % Uout; + idx /= Uout; + const int v = idx % Vout; + idx /= Vout; + + const float ox = x * lambda + u - hUout + 0.5; + const int xf = static_cast(floor(ox)); + const int xc = static_cast(ceil(ox)); + const float xwc = ox - xf; + const float xwf = 1. - xwc; + + const float oy = y * lambda + v - hVout + 0.5; + const int yf = static_cast(floor(oy)); + const int yc = static_cast(ceil(oy)); + const float ywc = oy - yf; + const float ywf = 1. - ywc; + + const float ou = (u + 0.5) / lambda - 0.5; + const int uf = static_cast(floor(ou)); + const int uc = static_cast(ceil(ou)); + const float uwc = ou - uf; + const float uwf = 1. - uwc; + + const float ov = (v + 0.5) / lambda - 0.5; + const int vf = static_cast(floor(ov)); + const int vc = static_cast(ceil(ov)); + const float vwc = ov - vf; + const float vwf = 1. - vwc; + + T val = ywf * xwf * vwf * uwf * + get_pixel_val( + bottom_data, idx, Hin, Win, yf, xf, Vin, Uin, vf, uf, pad_val) + + ywf * xwf * vwf * uwc * + get_pixel_val( + bottom_data, idx, Hin, Win, yf, xf, Vin, Uin, vf, uc, pad_val) + + ywf * xwf * vwc * uwf * + get_pixel_val( + bottom_data, idx, Hin, Win, yf, xf, Vin, Uin, vc, uf, pad_val) + + ywf * xwf * vwc * uwc * + get_pixel_val( + bottom_data, idx, Hin, Win, yf, xf, Vin, Uin, vc, uc, pad_val) + + ywf * xwc * vwf * uwf * + get_pixel_val( + bottom_data, idx, Hin, Win, yf, xc, Vin, Uin, vf, uf, pad_val) + + ywf * xwc * vwf * uwc * + get_pixel_val( + bottom_data, idx, Hin, Win, yf, xc, Vin, Uin, vf, uc, pad_val) + + ywf * xwc * vwc * uwf * + get_pixel_val( + bottom_data, idx, Hin, Win, yf, xc, Vin, Uin, vc, uf, pad_val) + + ywf * xwc * vwc * uwc * + get_pixel_val( + bottom_data, idx, Hin, Win, yf, xc, Vin, Uin, vc, uc, pad_val) + + ywc * xwf * vwf * uwf * + get_pixel_val( + bottom_data, idx, Hin, Win, yc, xf, Vin, Uin, vf, uf, pad_val) + + ywc * xwf * vwf * uwc * + get_pixel_val( + bottom_data, idx, Hin, Win, yc, xf, Vin, Uin, vf, uc, pad_val) + + ywc * xwf * vwc * uwf * + get_pixel_val( + bottom_data, idx, Hin, Win, yc, xf, Vin, Uin, vc, uf, pad_val) + + ywc * xwf * vwc * uwc * + get_pixel_val( + bottom_data, idx, Hin, Win, yc, xf, Vin, Uin, vc, uc, pad_val) + + ywc * xwc * vwf * uwf * + get_pixel_val( + bottom_data, idx, Hin, Win, yc, xc, Vin, Uin, vf, uf, pad_val) + + ywc * xwc * vwf * uwc * + get_pixel_val( + bottom_data, idx, Hin, Win, yc, xc, Vin, Uin, vf, uc, pad_val) + + ywc * xwc * vwc * uwf * + get_pixel_val( + bottom_data, idx, Hin, Win, yc, xc, Vin, Uin, vc, uf, pad_val) + + ywc * xwc * vwc * uwc * + get_pixel_val( + bottom_data, idx, Hin, Win, yc, xc, Vin, Uin, vc, uc, pad_val); + + top_data[index] = val; + } +} + +template +__global__ void SwapAlign2NatBackwardFeat( + const int nthreads, + const T* top_diff, + const int Vout, + const int Uout, + const float hVout, + const float hUout, + const int Vin, + const int Uin, + const float lambda, + const int Hin, + const int Win, + const int Hout, + const int Wout, + T* bottom_diff) { + CUDA_1D_KERNEL_LOOP(index, nthreads) { + int idx = index; + const int x = idx % Wout; + idx /= Wout; + const int y = idx % Hout; + idx /= Hout; + const int u = idx % Uout; + idx /= Uout; + const int v = idx % Vout; + idx /= Vout; + + const float ox = x * lambda + u - hUout + 0.5; + const int xf = static_cast(floor(ox)); + const int xc = static_cast(ceil(ox)); + const float xwc = ox - xf; + const float xwf = 1. - xwc; + + const float oy = y * lambda + v - hVout + 0.5; + const int yf = static_cast(floor(oy)); + const int yc = static_cast(ceil(oy)); + const float ywc = oy - yf; + const float ywf = 1. - ywc; + + const float ou = (u + 0.5) / lambda - 0.5; + const int uf = static_cast(floor(ou)); + const int uc = static_cast(ceil(ou)); + const float uwc = ou - uf; + const float uwf = 1. - uwc; + + const float ov = (v + 0.5) / lambda - 0.5; + const int vf = static_cast(floor(ov)); + const int vc = static_cast(ceil(ov)); + const float vwc = ov - vf; + const float vwf = 1. - vwc; + + const T grad = top_diff[index]; + + add_pixel_val( + bottom_diff, + ywf * xwf * vwf * uwf * grad, + idx, + Hin, + Win, + yf, + xf, + Vin, + Uin, + vf, + uf); + add_pixel_val( + bottom_diff, + ywf * xwf * vwf * uwc * grad, + idx, + Hin, + Win, + yf, + xf, + Vin, + Uin, + vf, + uc); + add_pixel_val( + bottom_diff, + ywf * xwf * vwc * uwf * grad, + idx, + Hin, + Win, + yf, + xf, + Vin, + Uin, + vc, + uf); + add_pixel_val( + bottom_diff, + ywf * xwf * vwc * uwc * grad, + idx, + Hin, + Win, + yf, + xf, + Vin, + Uin, + vc, + uc); + add_pixel_val( + bottom_diff, + ywf * xwc * vwf * uwf * grad, + idx, + Hin, + Win, + yf, + xc, + Vin, + Uin, + vf, + uf); + add_pixel_val( + bottom_diff, + ywf * xwc * vwf * uwc * grad, + idx, + Hin, + Win, + yf, + xc, + Vin, + Uin, + vf, + uc); + add_pixel_val( + bottom_diff, + ywf * xwc * vwc * uwf * grad, + idx, + Hin, + Win, + yf, + xc, + Vin, + Uin, + vc, + uf); + add_pixel_val( + bottom_diff, + ywf * xwc * vwc * uwc * grad, + idx, + Hin, + Win, + yf, + xc, + Vin, + Uin, + vc, + uc); + add_pixel_val( + bottom_diff, + ywc * xwf * vwf * uwf * grad, + idx, + Hin, + Win, + yc, + xf, + Vin, + Uin, + vf, + uf); + add_pixel_val( + bottom_diff, + ywc * xwf * vwf * uwc * grad, + idx, + Hin, + Win, + yc, + xf, + Vin, + Uin, + vf, + uc); + add_pixel_val( + bottom_diff, + ywc * xwf * vwc * uwf * grad, + idx, + Hin, + Win, + yc, + xf, + Vin, + Uin, + vc, + uf); + add_pixel_val( + bottom_diff, + ywc * xwf * vwc * uwc * grad, + idx, + Hin, + Win, + yc, + xf, + Vin, + Uin, + vc, + uc); + add_pixel_val( + bottom_diff, + ywc * xwc * vwf * uwf * grad, + idx, + Hin, + Win, + yc, + xc, + Vin, + Uin, + vf, + uf); + add_pixel_val( + bottom_diff, + ywc * xwc * vwf * uwc * grad, + idx, + Hin, + Win, + yc, + xc, + Vin, + Uin, + vf, + uc); + add_pixel_val( + bottom_diff, + ywc * xwc * vwc * uwf * grad, + idx, + Hin, + Win, + yc, + xc, + Vin, + Uin, + vc, + uf); + add_pixel_val( + bottom_diff, + ywc * xwc * vwc * uwc * grad, + idx, + Hin, + Win, + yc, + xc, + Vin, + Uin, + vc, + uc); + } +} + +namespace tensormask { + +at::Tensor SwapAlign2Nat_forward_cuda( + const at::Tensor& X, + const int lambda_val, + const float pad_val) { + AT_ASSERTM(X.device().is_cuda(), "input must be a CUDA tensor"); + AT_ASSERTM(X.ndimension() == 4, "input must be a 4D tensor"); + AT_ASSERTM(lambda_val >= 1, "lambda should be greater or equal to 1"); + const int N = X.size(0); + const int C = X.size(1); + const int Vin = static_cast(sqrt(static_cast(C))); + const int Uin = C / Vin; + AT_ASSERTM( + C == Vin * Uin && Vin == Uin, "#channels should be a square number"); + const int Vout = lambda_val * Vin; + const int Uout = lambda_val * Uin; + const int Hin = X.size(2); + const int Win = X.size(3); + const float lambda = static_cast(lambda_val); + const int Hout = static_cast(ceil(Hin / lambda)); + const int Wout = static_cast(ceil(Win / lambda)); + const float hVout = Vout / 2.; + const float hUout = Uout / 2.; + + at::cuda::CUDAGuard device_guard(X.device()); + + at::Tensor Y = at::empty({N, Vout * Uout, Hout, Wout}, X.options()); + + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + dim3 grid(std::min(at::cuda::ATenCeilDiv(Y.numel(), 512L), 4096L)); + dim3 block(512); + + if (Y.numel() == 0) { + AT_CUDA_CHECK(cudaGetLastError()); + return Y; + } + + auto X_ = X.contiguous(); + AT_DISPATCH_FLOATING_TYPES(X.scalar_type(), "SwapAlign2Nat_forward", [&] { + SwapAlign2NatForwardFeat<<>>( + Y.numel(), + X_.data_ptr(), + Vout, + Uout, + hVout, + hUout, + Vin, + Uin, + lambda, + Hin, + Win, + Hout, + Wout, + pad_val, + Y.data_ptr()); + }); + cudaDeviceSynchronize(); + AT_CUDA_CHECK(cudaGetLastError()); + return Y; +} + +at::Tensor SwapAlign2Nat_backward_cuda( + const at::Tensor& gY, + const int lambda_val, + const int batch_size, + const int channel, + const int height, + const int width) { + AT_ASSERTM(gY.device().is_cuda(), "input gradient must be a CUDA tensor"); + AT_ASSERTM(gY.ndimension() == 4, "input gradient must be a 4D tensor"); + AT_ASSERTM(lambda_val >= 1, "lambda should be greater or equal to 1"); + const int Vin = static_cast(sqrt(static_cast(channel))); + const int Uin = channel / Vin; + const int Vout = lambda_val * Vin; + const int Uout = lambda_val * Uin; + const float hVout = Vout / 2.; + const float hUout = Uout / 2.; + const int Hout = gY.size(2); + const int Wout = gY.size(3); + + at::cuda::CUDAGuard device_guard(gY.device()); + + at::Tensor gX = at::zeros({batch_size, channel, height, width}, gY.options()); + + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + dim3 grid(std::min(at::cuda::ATenCeilDiv(gY.numel(), 512L), 4096L)); + dim3 block(512); + + // handle possibly empty gradients + if (gY.numel() == 0) { + AT_CUDA_CHECK(cudaGetLastError()); + return gX; + } + + auto gY_ = gY.contiguous(); + AT_DISPATCH_FLOATING_TYPES(gY.scalar_type(), "SwapAlign2Nat_backward", [&] { + SwapAlign2NatBackwardFeat<<>>( + gY.numel(), + gY_.data_ptr(), + Vout, + Uout, + hVout, + hUout, + Vin, + Uin, + static_cast(lambda_val), + height, + width, + Hout, + Wout, + gX.data_ptr()); + }); + AT_CUDA_CHECK(cudaGetLastError()); + return gX; +} + +} // namespace tensormask diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/csrc/vision.cpp b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/csrc/vision.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ad8e472c2cfc7c10e00cd6b00fc22c0dd9384dd1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/csrc/vision.cpp @@ -0,0 +1,19 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +#include +#include "SwapAlign2Nat/SwapAlign2Nat.h" + +namespace tensormask { + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def( + "swap_align2nat_forward", + &SwapAlign2Nat_forward, + "SwapAlign2Nat_forward"); + m.def( + "swap_align2nat_backward", + &SwapAlign2Nat_backward, + "SwapAlign2Nat_backward"); +} + +} // namespace tensormask diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/swap_align2nat.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/swap_align2nat.py new file mode 100644 index 0000000000000000000000000000000000000000..a72c98a968577eff2302d75e4cb41620e4ecf582 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tensormask/layers/swap_align2nat.py @@ -0,0 +1,61 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from torch import nn +from torch.autograd import Function +from torch.autograd.function import once_differentiable + +from tensormask import _C + + +class _SwapAlign2Nat(Function): + @staticmethod + def forward(ctx, X, lambda_val, pad_val): + ctx.lambda_val = lambda_val + ctx.input_shape = X.size() + + Y = _C.swap_align2nat_forward(X, lambda_val, pad_val) + return Y + + @staticmethod + @once_differentiable + def backward(ctx, gY): + lambda_val = ctx.lambda_val + bs, ch, h, w = ctx.input_shape + + gX = _C.swap_align2nat_backward(gY, lambda_val, bs, ch, h, w) + + return gX, None, None + + +swap_align2nat = _SwapAlign2Nat.apply + + +class SwapAlign2Nat(nn.Module): + """ + The op `SwapAlign2Nat` described in https://arxiv.org/abs/1903.12174. + Given an input tensor that predicts masks of shape (N, C=VxU, H, W), + apply the op, it will return masks of shape (N, V'xU', H', W') where + the unit lengths of (V, U) and (H, W) are swapped, and the mask representation + is transformed from aligned to natural. + Args: + lambda_val (int): the relative unit length ratio between (V, U) and (H, W), + as we always have larger unit lengths for (V, U) than (H, W), + lambda_val is always >= 1. + pad_val (float): padding value for the values falling outside of the input + tensor, default set to -6 as sigmoid(-6) is ~0, indicating + that is no masks outside of the tensor. + """ + + def __init__(self, lambda_val, pad_val=-6.0): + super(SwapAlign2Nat, self).__init__() + self.lambda_val = lambda_val + self.pad_val = pad_val + + def forward(self, X): + return swap_align2nat(X, self.lambda_val, self.pad_val) + + def __repr__(self): + tmpstr = self.__class__.__name__ + "(" + tmpstr += "lambda_val=" + str(self.lambda_val) + tmpstr += ", pad_val=" + str(self.pad_val) + tmpstr += ")" + return tmpstr diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tests/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..168f9979a4623806934b0ff1102ac166704e7dec --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tests/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tests/test_swap_align2nat.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tests/test_swap_align2nat.py new file mode 100755 index 0000000000000000000000000000000000000000..b3d018ce199ddaa19af25e8304d969e8f59c747a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/tests/test_swap_align2nat.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import unittest +import torch +from torch.autograd import gradcheck + +from tensormask.layers.swap_align2nat import SwapAlign2Nat + + +class SwapAlign2NatTest(unittest.TestCase): + @unittest.skipIf(not torch.cuda.is_available(), "CUDA not available") + def test_swap_align2nat_gradcheck_cuda(self): + dtype = torch.float64 + device = torch.device("cuda") + m = SwapAlign2Nat(2).to(dtype=dtype, device=device) + x = torch.rand(2, 4, 10, 10, dtype=dtype, device=device, requires_grad=True) + + self.assertTrue(gradcheck(m, x), "gradcheck failed for SwapAlign2Nat CUDA") + + def _swap_align2nat(self, tensor, lambda_val): + """ + The basic setup for testing Swap_Align + """ + op = SwapAlign2Nat(lambda_val, pad_val=0.0) + input = torch.from_numpy(tensor[None, :, :, :].astype("float32")) + output = op.forward(input.cuda()).cpu().numpy() + return output[0] + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/train_net.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/train_net.py new file mode 100755 index 0000000000000000000000000000000000000000..b898fc77b7f52cae6ff398ac5aec73c59ab928ab --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TensorMask/train_net.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +""" +TensorMask Training Script. + +This script is a simplified version of the training script in detectron2/tools. +""" + +import os + +import detectron2.utils.comm as comm +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import get_cfg +from detectron2.engine import DefaultTrainer, default_argument_parser, default_setup, launch +from detectron2.evaluation import COCOEvaluator, verify_results + +from tensormask import add_tensormask_config + + +class Trainer(DefaultTrainer): + @classmethod + def build_evaluator(cls, cfg, dataset_name, output_folder=None): + if output_folder is None: + output_folder = os.path.join(cfg.OUTPUT_DIR, "inference") + return COCOEvaluator(dataset_name, cfg, True, output_folder) + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg() + add_tensormask_config(cfg) + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup(cfg, args) + return cfg + + +def main(args): + cfg = setup(args) + + if args.eval_only: + model = Trainer.build_model(cfg) + DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load( + cfg.MODEL.WEIGHTS, resume=args.resume + ) + res = Trainer.test(cfg, model) + if comm.is_main_process(): + verify_results(cfg, res) + return res + + trainer = Trainer(cfg) + trainer.resume_or_load(resume=args.resume) + return trainer.train() + + +if __name__ == "__main__": + args = default_argument_parser().parse_args() + print("Command Line Args:", args) + launch( + main, + args.num_gpus, + num_machines=args.num_machines, + machine_rank=args.machine_rank, + dist_url=args.dist_url, + args=(args,), + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4b7a90102d008a498e93dff595a09206be5269e7 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/README.md @@ -0,0 +1,60 @@ + +# TridentNet in Detectron2 +**Scale-Aware Trident Networks for Object Detection** + +Yanghao Li\*, Yuntao Chen\*, Naiyan Wang, Zhaoxiang Zhang + +[[`TridentNet`](https://github.com/TuSimple/simpledet/tree/master/models/tridentnet)] [[`arXiv`](https://arxiv.org/abs/1901.01892)] [[`BibTeX`](#CitingTridentNet)] + +
+ +
+ +In this repository, we implement TridentNet-Fast in Detectron2. +Trident Network (TridentNet) aims to generate scale-specific feature maps with a uniform representational power. We construct a parallel multi-branch architecture in which each branch shares the same transformation parameters but with different receptive fields. TridentNet-Fast is a fast approximation version of TridentNet that could achieve significant improvements without any additional parameters and computational cost. + +## Training + +To train a model, run +```bash +python /path/to/detectron2/projects/TridentNet/train_net.py --config-file +``` + +For example, to launch end-to-end TridentNet training with ResNet-50 backbone on 8 GPUs, +one should execute: +```bash +python /path/to/detectron2/projects/TridentNet/train_net.py --config-file configs/tridentnet_fast_R_50_C4_1x.yaml --num-gpus 8 +``` + +## Evaluation + +Model evaluation can be done similarly: +```bash +python /path/to/detectron2/projects/TridentNet/train_net.py --config-file configs/tridentnet_fast_R_50_C4_1x.yaml --eval-only MODEL.WEIGHTS model.pth +``` + +## Results on MS-COCO in Detectron2 + +|Model|Backbone|Head|lr sched|AP|AP50|AP75|APs|APm|APl|download| +|-----|--------|----|--------|--|----|----|---|---|---|--------| +|Faster|R50-C4|C5-512ROI|1X|35.7|56.1|38.0|19.2|40.9|48.7|model \| metrics| +|TridentFast|R50-C4|C5-128ROI|1X|38.0|58.1|40.8|19.5|42.2|54.6|model \| metrics| +|Faster|R50-C4|C5-512ROI|3X|38.4|58.7|41.3|20.7|42.7|53.1|model \| metrics| +|TridentFast|R50-C4|C5-128ROI|3X|40.6|60.8|43.6|23.4|44.7|57.1|model \| metrics| +|Faster|R101-C4|C5-512ROI|3X|41.1|61.4|44.0|22.2|45.5|55.9|model \| metrics| +|TridentFast|R101-C4|C5-128ROI|3X|43.6|63.4|47.0|24.3|47.8|60.0|model \| metrics| + + +## Citing TridentNet + +If you use TridentNet, please use the following BibTeX entry. + +``` +@InProceedings{li2019scale, + title={Scale-Aware Trident Networks for Object Detection}, + author={Li, Yanghao and Chen, Yuntao and Wang, Naiyan and Zhang, Zhaoxiang}, + journal={The International Conference on Computer Vision (ICCV)}, + year={2019} +} +``` + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/configs/Base-TridentNet-Fast-C4.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/configs/Base-TridentNet-Fast-C4.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8c3d80797ba9ae63a5669ccbd74a0d2006fee3b7 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/configs/Base-TridentNet-Fast-C4.yaml @@ -0,0 +1,29 @@ +MODEL: + META_ARCHITECTURE: "GeneralizedRCNN" + BACKBONE: + NAME: "build_trident_resnet_backbone" + ROI_HEADS: + NAME: "TridentRes5ROIHeads" + POSITIVE_FRACTION: 0.5 + BATCH_SIZE_PER_IMAGE: 128 + PROPOSAL_APPEND_GT: False + PROPOSAL_GENERATOR: + NAME: "TridentRPN" + RPN: + POST_NMS_TOPK_TRAIN: 500 + TRIDENT: + NUM_BRANCH: 3 + BRANCH_DILATIONS: [1, 2, 3] + TEST_BRANCH_IDX: 1 + TRIDENT_STAGE: "res4" +DATASETS: + TRAIN: ("coco_2017_train",) + TEST: ("coco_2017_val",) +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.02 + STEPS: (60000, 80000) + MAX_ITER: 90000 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +VERSION: 2 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/configs/tridentnet_fast_R_101_C4_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/configs/tridentnet_fast_R_101_C4_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..bc83c2f9e7b7653c8982e657b5f116abe6ad6e1f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/configs/tridentnet_fast_R_101_C4_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "Base-TridentNet-Fast-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: False + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/configs/tridentnet_fast_R_50_C4_1x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/configs/tridentnet_fast_R_50_C4_1x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fda2cb6622d732c0f70d74d567c26182a9a41c44 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/configs/tridentnet_fast_R_50_C4_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "Base-TridentNet-Fast-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/configs/tridentnet_fast_R_50_C4_3x.yaml b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/configs/tridentnet_fast_R_50_C4_3x.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ebf89d03ea043810b02e71ecc2c1711c250e161c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/configs/tridentnet_fast_R_50_C4_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "Base-TridentNet-Fast-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/train_net.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/train_net.py new file mode 100755 index 0000000000000000000000000000000000000000..eac2ec5c39e4a3ce2221f354dcea288bffcb1fbb --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/train_net.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +""" +TridentNet Training Script. + +This script is a simplified version of the training script in detectron2/tools. +""" + +import os + +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import get_cfg +from detectron2.engine import DefaultTrainer, default_argument_parser, default_setup, launch +from detectron2.evaluation import COCOEvaluator + +from tridentnet import add_tridentnet_config + + +class Trainer(DefaultTrainer): + @classmethod + def build_evaluator(cls, cfg, dataset_name, output_folder=None): + if output_folder is None: + output_folder = os.path.join(cfg.OUTPUT_DIR, "inference") + return COCOEvaluator(dataset_name, cfg, True, output_folder) + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg() + add_tridentnet_config(cfg) + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup(cfg, args) + return cfg + + +def main(args): + cfg = setup(args) + + if args.eval_only: + model = Trainer.build_model(cfg) + DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load( + cfg.MODEL.WEIGHTS, resume=args.resume + ) + res = Trainer.test(cfg, model) + return res + + trainer = Trainer(cfg) + trainer.resume_or_load(resume=args.resume) + return trainer.train() + + +if __name__ == "__main__": + args = default_argument_parser().parse_args() + print("Command Line Args:", args) + launch( + main, + args.num_gpus, + num_machines=args.num_machines, + machine_rank=args.machine_rank, + dist_url=args.dist_url, + args=(args,), + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2fcdeb45a03d3835b3c2498ca8021a11d8cb4758 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/__init__.py @@ -0,0 +1,9 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from .config import add_tridentnet_config +from .trident_backbone import ( + TridentBottleneckBlock, + build_trident_resnet_backbone, + make_trident_stage, +) +from .trident_rpn import TridentRPN +from .trident_rcnn import TridentRes5ROIHeads, TridentStandardROIHeads diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/config.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/config.py new file mode 100644 index 0000000000000000000000000000000000000000..f33f473cb32633d9ba6582f0406ffe0a929d23c6 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/config.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from detectron2.config import CfgNode as CN + + +def add_tridentnet_config(cfg): + """ + Add config for tridentnet. + """ + _C = cfg + + _C.MODEL.TRIDENT = CN() + + # Number of branches for TridentNet. + _C.MODEL.TRIDENT.NUM_BRANCH = 3 + # Specify the dilations for each branch. + _C.MODEL.TRIDENT.BRANCH_DILATIONS = [1, 2, 3] + # Specify the stage for applying trident blocks. Default stage is Res4 according to the + # TridentNet paper. + _C.MODEL.TRIDENT.TRIDENT_STAGE = "res4" + # Specify the test branch index TridentNet Fast inference: + # - use -1 to aggregate results of all branches during inference. + # - otherwise, only using specified branch for fast inference. Recommended setting is + # to use the middle branch. + _C.MODEL.TRIDENT.TEST_BRANCH_IDX = 1 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/trident_backbone.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/trident_backbone.py new file mode 100644 index 0000000000000000000000000000000000000000..232dfaf1ca01c0395c0ceea544bfbdee0d45ce1a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/trident_backbone.py @@ -0,0 +1,223 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import fvcore.nn.weight_init as weight_init +import torch +import torch.nn.functional as F + +from detectron2.layers import Conv2d, FrozenBatchNorm2d, get_norm +from detectron2.modeling import BACKBONE_REGISTRY, ResNet, ResNetBlockBase, make_stage +from detectron2.modeling.backbone.resnet import BasicStem, BottleneckBlock, DeformBottleneckBlock + +from .trident_conv import TridentConv + +__all__ = ["TridentBottleneckBlock", "make_trident_stage", "build_trident_resnet_backbone"] + + +class TridentBottleneckBlock(ResNetBlockBase): + def __init__( + self, + in_channels, + out_channels, + *, + bottleneck_channels, + stride=1, + num_groups=1, + norm="BN", + stride_in_1x1=False, + num_branch=3, + dilations=(1, 2, 3), + concat_output=False, + test_branch_idx=-1, + ): + """ + Args: + num_branch (int): the number of branches in TridentNet. + dilations (tuple): the dilations of multiple branches in TridentNet. + concat_output (bool): if concatenate outputs of multiple branches in TridentNet. + Use 'True' for the last trident block. + """ + super().__init__(in_channels, out_channels, stride) + + assert num_branch == len(dilations) + + self.num_branch = num_branch + self.concat_output = concat_output + self.test_branch_idx = test_branch_idx + + if in_channels != out_channels: + self.shortcut = Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=stride, + bias=False, + norm=get_norm(norm, out_channels), + ) + else: + self.shortcut = None + + stride_1x1, stride_3x3 = (stride, 1) if stride_in_1x1 else (1, stride) + + self.conv1 = Conv2d( + in_channels, + bottleneck_channels, + kernel_size=1, + stride=stride_1x1, + bias=False, + norm=get_norm(norm, bottleneck_channels), + ) + + self.conv2 = TridentConv( + bottleneck_channels, + bottleneck_channels, + kernel_size=3, + stride=stride_3x3, + paddings=dilations, + bias=False, + groups=num_groups, + dilations=dilations, + num_branch=num_branch, + test_branch_idx=test_branch_idx, + norm=get_norm(norm, bottleneck_channels), + ) + + self.conv3 = Conv2d( + bottleneck_channels, + out_channels, + kernel_size=1, + bias=False, + norm=get_norm(norm, out_channels), + ) + + for layer in [self.conv1, self.conv2, self.conv3, self.shortcut]: + if layer is not None: # shortcut can be None + weight_init.c2_msra_fill(layer) + + def forward(self, x): + num_branch = self.num_branch if self.training or self.test_branch_idx == -1 else 1 + if not isinstance(x, list): + x = [x] * num_branch + out = [self.conv1(b) for b in x] + out = [F.relu_(b) for b in out] + + out = self.conv2(out) + out = [F.relu_(b) for b in out] + + out = [self.conv3(b) for b in out] + + if self.shortcut is not None: + shortcut = [self.shortcut(b) for b in x] + else: + shortcut = x + + out = [out_b + shortcut_b for out_b, shortcut_b in zip(out, shortcut)] + out = [F.relu_(b) for b in out] + if self.concat_output: + out = torch.cat(out) + return out + + +def make_trident_stage(block_class, num_blocks, first_stride, **kwargs): + """ + Create a resnet stage by creating many blocks for TridentNet. + """ + blocks = [] + for i in range(num_blocks - 1): + blocks.append(block_class(stride=first_stride if i == 0 else 1, **kwargs)) + kwargs["in_channels"] = kwargs["out_channels"] + blocks.append(block_class(stride=1, concat_output=True, **kwargs)) + return blocks + + +@BACKBONE_REGISTRY.register() +def build_trident_resnet_backbone(cfg, input_shape): + """ + Create a ResNet instance from config for TridentNet. + + Returns: + ResNet: a :class:`ResNet` instance. + """ + # need registration of new blocks/stems? + norm = cfg.MODEL.RESNETS.NORM + stem = BasicStem( + in_channels=input_shape.channels, + out_channels=cfg.MODEL.RESNETS.STEM_OUT_CHANNELS, + norm=norm, + ) + freeze_at = cfg.MODEL.BACKBONE.FREEZE_AT + + if freeze_at >= 1: + for p in stem.parameters(): + p.requires_grad = False + stem = FrozenBatchNorm2d.convert_frozen_batchnorm(stem) + + # fmt: off + out_features = cfg.MODEL.RESNETS.OUT_FEATURES + depth = cfg.MODEL.RESNETS.DEPTH + num_groups = cfg.MODEL.RESNETS.NUM_GROUPS + width_per_group = cfg.MODEL.RESNETS.WIDTH_PER_GROUP + bottleneck_channels = num_groups * width_per_group + in_channels = cfg.MODEL.RESNETS.STEM_OUT_CHANNELS + out_channels = cfg.MODEL.RESNETS.RES2_OUT_CHANNELS + stride_in_1x1 = cfg.MODEL.RESNETS.STRIDE_IN_1X1 + res5_dilation = cfg.MODEL.RESNETS.RES5_DILATION + deform_on_per_stage = cfg.MODEL.RESNETS.DEFORM_ON_PER_STAGE + deform_modulated = cfg.MODEL.RESNETS.DEFORM_MODULATED + deform_num_groups = cfg.MODEL.RESNETS.DEFORM_NUM_GROUPS + num_branch = cfg.MODEL.TRIDENT.NUM_BRANCH + branch_dilations = cfg.MODEL.TRIDENT.BRANCH_DILATIONS + trident_stage = cfg.MODEL.TRIDENT.TRIDENT_STAGE + test_branch_idx = cfg.MODEL.TRIDENT.TEST_BRANCH_IDX + # fmt: on + assert res5_dilation in {1, 2}, "res5_dilation cannot be {}.".format(res5_dilation) + + num_blocks_per_stage = {50: [3, 4, 6, 3], 101: [3, 4, 23, 3], 152: [3, 8, 36, 3]}[depth] + + stages = [] + + res_stage_idx = {"res2": 2, "res3": 3, "res4": 4, "res5": 5} + out_stage_idx = [res_stage_idx[f] for f in out_features] + trident_stage_idx = res_stage_idx[trident_stage] + max_stage_idx = max(out_stage_idx) + for idx, stage_idx in enumerate(range(2, max_stage_idx + 1)): + dilation = res5_dilation if stage_idx == 5 else 1 + first_stride = 1 if idx == 0 or (stage_idx == 5 and dilation == 2) else 2 + stage_kargs = { + "num_blocks": num_blocks_per_stage[idx], + "first_stride": first_stride, + "in_channels": in_channels, + "bottleneck_channels": bottleneck_channels, + "out_channels": out_channels, + "num_groups": num_groups, + "norm": norm, + "stride_in_1x1": stride_in_1x1, + "dilation": dilation, + } + if stage_idx == trident_stage_idx: + assert not deform_on_per_stage[ + idx + ], "Not support deformable conv in Trident blocks yet." + stage_kargs["block_class"] = TridentBottleneckBlock + stage_kargs["num_branch"] = num_branch + stage_kargs["dilations"] = branch_dilations + stage_kargs["test_branch_idx"] = test_branch_idx + stage_kargs.pop("dilation") + elif deform_on_per_stage[idx]: + stage_kargs["block_class"] = DeformBottleneckBlock + stage_kargs["deform_modulated"] = deform_modulated + stage_kargs["deform_num_groups"] = deform_num_groups + else: + stage_kargs["block_class"] = BottleneckBlock + blocks = ( + make_trident_stage(**stage_kargs) + if stage_idx == trident_stage_idx + else make_stage(**stage_kargs) + ) + in_channels = out_channels + out_channels *= 2 + bottleneck_channels *= 2 + + if freeze_at >= stage_idx: + for block in blocks: + block.freeze() + stages.append(blocks) + return ResNet(stem, stages, out_features=out_features) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/trident_conv.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/trident_conv.py new file mode 100644 index 0000000000000000000000000000000000000000..7e2d5252bda5ebb2e9eee10af9c9a14fc72bb8fe --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/trident_conv.py @@ -0,0 +1,107 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import torch +from torch import nn +from torch.nn import functional as F +from torch.nn.modules.utils import _pair + +from detectron2.layers.wrappers import _NewEmptyTensorOp + + +class TridentConv(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + paddings=0, + dilations=1, + groups=1, + num_branch=1, + test_branch_idx=-1, + bias=False, + norm=None, + activation=None, + ): + super(TridentConv, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = _pair(kernel_size) + self.num_branch = num_branch + self.stride = _pair(stride) + self.groups = groups + self.with_bias = bias + if isinstance(paddings, int): + paddings = [paddings] * self.num_branch + if isinstance(dilations, int): + dilations = [dilations] * self.num_branch + self.paddings = [_pair(padding) for padding in paddings] + self.dilations = [_pair(dilation) for dilation in dilations] + self.test_branch_idx = test_branch_idx + self.norm = norm + self.activation = activation + + assert len({self.num_branch, len(self.paddings), len(self.dilations)}) == 1 + + self.weight = nn.Parameter( + torch.Tensor(out_channels, in_channels // groups, *self.kernel_size) + ) + if bias: + self.bias = nn.Parameter(torch.Tensor(out_channels)) + else: + self.bias = None + + nn.init.kaiming_uniform_(self.weight, nonlinearity="relu") + if self.bias is not None: + nn.init.constant_(self.bias, 0) + + def forward(self, inputs): + num_branch = self.num_branch if self.training or self.test_branch_idx == -1 else 1 + assert len(inputs) == num_branch + + if inputs[0].numel() == 0: + output_shape = [ + (i + 2 * p - (di * (k - 1) + 1)) // s + 1 + for i, p, di, k, s in zip( + inputs[0].shape[-2:], self.padding, self.dilation, self.kernel_size, self.stride + ) + ] + output_shape = [input[0].shape[0], self.weight.shape[0]] + output_shape + return [_NewEmptyTensorOp.apply(input, output_shape) for input in inputs] + + if self.training or self.test_branch_idx == -1: + outputs = [ + F.conv2d(input, self.weight, self.bias, self.stride, padding, dilation, self.groups) + for input, dilation, padding in zip(inputs, self.dilations, self.paddings) + ] + else: + outputs = [ + F.conv2d( + inputs[0], + self.weight, + self.bias, + self.stride, + self.paddings[self.test_branch_idx], + self.dilations[self.test_branch_idx], + self.groups, + ) + ] + + if self.norm is not None: + outputs = [self.norm(x) for x in outputs] + if self.activation is not None: + outputs = [self.activation(x) for x in outputs] + return outputs + + def extra_repr(self): + tmpstr = "in_channels=" + str(self.in_channels) + tmpstr += ", out_channels=" + str(self.out_channels) + tmpstr += ", kernel_size=" + str(self.kernel_size) + tmpstr += ", num_branch=" + str(self.num_branch) + tmpstr += ", test_branch_idx=" + str(self.test_branch_idx) + tmpstr += ", stride=" + str(self.stride) + tmpstr += ", paddings=" + str(self.paddings) + tmpstr += ", dilations=" + str(self.dilations) + tmpstr += ", groups=" + str(self.groups) + tmpstr += ", bias=" + str(self.with_bias) + return tmpstr diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/trident_rcnn.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/trident_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..65deb90977c525f9e42ea9b2581944832a9af47e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/trident_rcnn.py @@ -0,0 +1,116 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from detectron2.layers import batched_nms +from detectron2.modeling import ROI_HEADS_REGISTRY, StandardROIHeads +from detectron2.modeling.roi_heads.roi_heads import Res5ROIHeads +from detectron2.structures import Instances + + +def merge_branch_instances(instances, num_branch, nms_thresh, topk_per_image): + """ + Merge detection results from different branches of TridentNet. + Return detection results by applying non-maximum suppression (NMS) on bounding boxes + and keep the unsuppressed boxes and other instances (e.g mask) if any. + + Args: + instances (list[Instances]): A list of N * num_branch instances that store detection + results. Contain N images and each image has num_branch instances. + num_branch (int): Number of branches used for merging detection results for each image. + nms_thresh (float): The threshold to use for box non-maximum suppression. Value in [0, 1]. + topk_per_image (int): The number of top scoring detections to return. Set < 0 to return + all detections. + + Returns: + results: (list[Instances]): A list of N instances, one for each image in the batch, + that stores the topk most confidence detections after merging results from multiple + branches. + """ + if num_branch == 1: + return instances + + batch_size = len(instances) // num_branch + results = [] + for i in range(batch_size): + instance = Instances.cat([instances[i + batch_size * j] for j in range(num_branch)]) + + # Apply per-class NMS + keep = batched_nms( + instance.pred_boxes.tensor, instance.scores, instance.pred_classes, nms_thresh + ) + keep = keep[:topk_per_image] + result = instance[keep] + + results.append(result) + + return results + + +@ROI_HEADS_REGISTRY.register() +class TridentRes5ROIHeads(Res5ROIHeads): + """ + The TridentNet ROIHeads in a typical "C4" R-CNN model. + See :class:`Res5ROIHeads`. + """ + + def __init__(self, cfg, input_shape): + super().__init__(cfg, input_shape) + + self.num_branch = cfg.MODEL.TRIDENT.NUM_BRANCH + self.trident_fast = cfg.MODEL.TRIDENT.TEST_BRANCH_IDX != -1 + + def forward(self, images, features, proposals, targets=None): + """ + See :class:`Res5ROIHeads.forward`. + """ + num_branch = self.num_branch if self.training or not self.trident_fast else 1 + all_targets = targets * num_branch if targets is not None else None + pred_instances, losses = super().forward(images, features, proposals, all_targets) + del images, all_targets, targets + + if self.training: + return pred_instances, losses + else: + pred_instances = merge_branch_instances( + pred_instances, + num_branch, + self.box_predictor.test_nms_thresh, + self.box_predictor.test_topk_per_image, + ) + + return pred_instances, {} + + +@ROI_HEADS_REGISTRY.register() +class TridentStandardROIHeads(StandardROIHeads): + """ + The `StandardROIHeads` for TridentNet. + See :class:`StandardROIHeads`. + """ + + def __init__(self, cfg, input_shape): + super(TridentStandardROIHeads, self).__init__(cfg, input_shape) + + self.num_branch = cfg.MODEL.TRIDENT.NUM_BRANCH + self.trident_fast = cfg.MODEL.TRIDENT.TEST_BRANCH_IDX != -1 + + def forward(self, images, features, proposals, targets=None): + """ + See :class:`Res5ROIHeads.forward`. + """ + # Use 1 branch if using trident_fast during inference. + num_branch = self.num_branch if self.training or not self.trident_fast else 1 + # Duplicate targets for all branches in TridentNet. + all_targets = targets * num_branch if targets is not None else None + pred_instances, losses = super().forward(images, features, proposals, all_targets) + del images, all_targets, targets + + if self.training: + return pred_instances, losses + else: + pred_instances = merge_branch_instances( + pred_instances, + num_branch, + self.box_predictor.test_nms_thresh, + self.box_predictor.test_topk_per_image, + ) + + return pred_instances, {} diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/trident_rpn.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/trident_rpn.py new file mode 100644 index 0000000000000000000000000000000000000000..c30137f312232ccccd86182108949fbe34b97231 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/projects/TridentNet/tridentnet/trident_rpn.py @@ -0,0 +1,32 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import torch + +from detectron2.modeling import PROPOSAL_GENERATOR_REGISTRY +from detectron2.modeling.proposal_generator.rpn import RPN +from detectron2.structures import ImageList + + +@PROPOSAL_GENERATOR_REGISTRY.register() +class TridentRPN(RPN): + """ + Trident RPN subnetwork. + """ + + def __init__(self, cfg, input_shape): + super(TridentRPN, self).__init__(cfg, input_shape) + + self.num_branch = cfg.MODEL.TRIDENT.NUM_BRANCH + self.trident_fast = cfg.MODEL.TRIDENT.TEST_BRANCH_IDX != -1 + + def forward(self, images, features, gt_instances=None): + """ + See :class:`RPN.forward`. + """ + num_branch = self.num_branch if self.training or not self.trident_fast else 1 + # Duplicate images and gt_instances for all branches in TridentNet. + all_images = ImageList( + torch.cat([images.tensor] * num_branch), images.image_sizes * num_branch + ) + all_gt_instances = gt_instances * num_branch if gt_instances is not None else None + + return super(TridentRPN, self).forward(all_images, features, all_gt_instances) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/setup.cfg b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..b09bba99ca88d5cc900d1cc7fb0947d0443522be --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/setup.cfg @@ -0,0 +1,26 @@ +[isort] +line_length=100 +multi_line_output=3 +include_trailing_comma=True +known_standard_library=numpy,setuptools,mock +skip=./datasets,docs +skip_glob=*/__init__.py +known_myself=detectron2 +known_third_party=fvcore,matplotlib,cv2,torch,torchvision,PIL,pycocotools,yacs,termcolor,cityscapesscripts,tabulate,tqdm,scipy,lvis,psutil,pkg_resources,caffe2,onnx +no_lines_before=STDLIB,THIRDPARTY +sections=FUTURE,STDLIB,THIRDPARTY,myself,FIRSTPARTY,LOCALFOLDER +default_section=FIRSTPARTY + +[mypy] +python_version=3.6 +ignore_missing_imports = True +warn_unused_configs = True +disallow_untyped_defs = True +check_untyped_defs = True +warn_unused_ignores = True +warn_redundant_casts = True +show_column_numbers = True +follow_imports = silent +allow_redefinition = True +; Require all functions to be annotated +disallow_incomplete_defs = True diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/setup.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..a863fab1b7658a888df8623b57fe53673698cf60 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/setup.py @@ -0,0 +1,156 @@ +#!/usr/bin/env python +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import glob +import os +import shutil +from os import path +from setuptools import find_packages, setup +from typing import List +import torch +from torch.utils.cpp_extension import CUDA_HOME, CppExtension, CUDAExtension + +torch_ver = [int(x) for x in torch.__version__.split(".")[:2]] +assert torch_ver >= [1, 4], "Requires PyTorch >= 1.4" + + +def get_version(): + init_py_path = path.join(path.abspath(path.dirname(__file__)), "detectron2", "__init__.py") + init_py = open(init_py_path, "r").readlines() + version_line = [l.strip() for l in init_py if l.startswith("__version__")][0] + version = version_line.split("=")[-1].strip().strip("'\"") + + # The following is used to build release packages. + # Users should never use it. + suffix = os.getenv("D2_VERSION_SUFFIX", "") + version = version + suffix + if os.getenv("BUILD_NIGHTLY", "0") == "1": + from datetime import datetime + + date_str = datetime.today().strftime("%y%m%d") + version = version + ".dev" + date_str + + new_init_py = [l for l in init_py if not l.startswith("__version__")] + new_init_py.append('__version__ = "{}"\n'.format(version)) + with open(init_py_path, "w") as f: + f.write("".join(new_init_py)) + return version + + +def get_extensions(): + this_dir = path.dirname(path.abspath(__file__)) + extensions_dir = path.join(this_dir, "detectron2", "layers", "csrc") + + main_source = path.join(extensions_dir, "vision.cpp") + sources = glob.glob(path.join(extensions_dir, "**", "*.cpp")) + source_cuda = glob.glob(path.join(extensions_dir, "**", "*.cu")) + glob.glob( + path.join(extensions_dir, "*.cu") + ) + + sources = [main_source] + sources + extension = CppExtension + + extra_compile_args = {"cxx": []} + define_macros = [] + + if ( + torch.cuda.is_available() and CUDA_HOME is not None and os.path.isdir(CUDA_HOME) + ) or os.getenv("FORCE_CUDA", "0") == "1": + extension = CUDAExtension + sources += source_cuda + define_macros += [("WITH_CUDA", None)] + extra_compile_args["nvcc"] = [ + "-DCUDA_HAS_FP16=1", + "-D__CUDA_NO_HALF_OPERATORS__", + "-D__CUDA_NO_HALF_CONVERSIONS__", + "-D__CUDA_NO_HALF2_OPERATORS__", + ] + + # It's better if pytorch can do this by default .. + CC = os.environ.get("CC", None) + if CC is not None: + extra_compile_args["nvcc"].append("-ccbin={}".format(CC)) + + include_dirs = [extensions_dir] + + ext_modules = [ + extension( + "detectron2._C", + sources, + include_dirs=include_dirs, + define_macros=define_macros, + extra_compile_args=extra_compile_args, + ) + ] + + return ext_modules + + +def get_model_zoo_configs() -> List[str]: + """ + Return a list of configs to include in package for model zoo. Copy over these configs inside + detectron2/model_zoo. + """ + + # Use absolute paths while symlinking. + source_configs_dir = path.join(path.dirname(path.realpath(__file__)), "configs") + destination = path.join( + path.dirname(path.realpath(__file__)), "detectron2", "model_zoo", "configs" + ) + # Symlink the config directory inside package to have a cleaner pip install. + + # Remove stale symlink/directory from a previous build. + if path.exists(source_configs_dir): + if path.islink(destination): + os.unlink(destination) + elif path.isdir(destination): + shutil.rmtree(destination) + + if not path.exists(destination): + try: + os.symlink(source_configs_dir, destination) + except OSError: + # Fall back to copying if symlink fails: ex. on Windows. + shutil.copytree(source_configs_dir, destination) + + config_paths = glob.glob("configs/**/*.yaml", recursive=True) + return config_paths + + +setup( + name="detectron2", + version=get_version(), + author="FAIR", + url="https://github.com/facebookresearch/detectron2", + description="Detectron2 is FAIR's next-generation research " + "platform for object detection and segmentation.", + packages=find_packages(exclude=("configs", "tests*")), + package_data={"detectron2.model_zoo": get_model_zoo_configs()}, + python_requires=">=3.6", + install_requires=[ + "termcolor>=1.1", + "Pillow", # you can also use pillow-simd for better performance + "yacs>=0.1.6", + "tabulate", + "cloudpickle", + "matplotlib", + "mock", + "tqdm>4.29.0", + "tensorboard", + "fvcore>=0.1.1", + "future", # used by caffe2 + "pydot", # used to save caffe2 SVGs + ], + extras_require={ + "all": ["shapely", "psutil"], + "dev": [ + "flake8==3.7.9", + "isort", + "black @ git+https://github.com/psf/black@673327449f86fce558adde153bb6cbe54bfebad2", + "flake8-bugbear", + "flake8-comprehensions", + ], + }, + ext_modules=get_extensions(), + cmdclass={"build_ext": torch.utils.cpp_extension.BuildExtension}, +) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f560384045ab4f6bc2beabef1170308fca117eb3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/README.md @@ -0,0 +1,9 @@ +## Unit Tests + +To run the unittests, do: +``` +cd detectron2 +python -m unittest discover -v -s ./tests +``` + +There are also end-to-end inference & training tests, in [dev/run_*_tests.sh](../dev). diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..168f9979a4623806934b0ff1102ac166704e7dec --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_coco.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2cd807d0ae465ad2e060a373f2e75db2483771c7 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_coco.py @@ -0,0 +1,77 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import json +import numpy as np +import os +import tempfile +import unittest +import pycocotools + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.data.datasets.coco import convert_to_coco_dict, load_coco_json +from detectron2.structures import BoxMode + + +def make_mask(): + """ + Makes a donut shaped binary mask. + """ + H = 100 + W = 100 + mask = np.zeros([H, W], dtype=np.uint8) + for x in range(W): + for y in range(H): + d = np.linalg.norm(np.array([W, H]) / 2 - np.array([x, y])) + if d > 10 and d < 20: + mask[y, x] = 1 + return mask + + +def make_dataset_dicts(mask): + """ + Returns a list of dicts that represents a single COCO data point for + object detection. The single instance given by `mask` is represented by + RLE. + """ + record = {} + record["file_name"] = "test" + record["image_id"] = 0 + record["height"] = mask.shape[0] + record["width"] = mask.shape[1] + + y, x = np.nonzero(mask) + segmentation = pycocotools.mask.encode(np.asarray(mask, order="F")) + min_x = np.min(x) + max_x = np.max(x) + min_y = np.min(y) + max_y = np.max(y) + obj = { + "bbox": [min_x, min_y, max_x, max_y], + "bbox_mode": BoxMode.XYXY_ABS, + "category_id": 0, + "iscrowd": 0, + "segmentation": segmentation, + } + record["annotations"] = [obj] + return [record] + + +class TestRLEToJson(unittest.TestCase): + def test(self): + # Make a dummy dataset. + mask = make_mask() + DatasetCatalog.register("test_dataset", lambda: make_dataset_dicts(mask)) + MetadataCatalog.get("test_dataset").set(thing_classes=["test_label"]) + + # Dump to json. + json_dict = convert_to_coco_dict("test_dataset") + with tempfile.TemporaryDirectory() as tmpdir: + json_file_name = os.path.join(tmpdir, "test.json") + with open(json_file_name, "w") as f: + json.dump(json_dict, f) + # Load from json. + dicts = load_coco_json(json_file_name, "") + + # Check the loaded mask matches the original. + anno = dicts[0]["annotations"][0] + loaded_mask = pycocotools.mask.decode(anno["segmentation"]) + self.assertTrue(np.array_equal(loaded_mask, mask)) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_detection_utils.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_detection_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..bdd94dd92366418347cc74a58e807240fd795111 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_detection_utils.py @@ -0,0 +1,116 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +import copy +import numpy as np +import unittest +import pycocotools.mask as mask_util + +from detectron2.data import detection_utils +from detectron2.data import transforms as T +from detectron2.structures import BitMasks, BoxMode + + +class TestTransformAnnotations(unittest.TestCase): + def test_transform_simple_annotation(self): + transforms = T.TransformList([T.HFlipTransform(400)]) + anno = { + "bbox": np.asarray([10, 10, 200, 300]), + "bbox_mode": BoxMode.XYXY_ABS, + "category_id": 3, + "segmentation": [[10, 10, 100, 100, 100, 10], [150, 150, 200, 150, 200, 200]], + } + + output = detection_utils.transform_instance_annotations(anno, transforms, (400, 400)) + self.assertTrue(np.allclose(output["bbox"], [200, 10, 390, 300])) + self.assertEqual(len(output["segmentation"]), len(anno["segmentation"])) + self.assertTrue(np.allclose(output["segmentation"][0], [390, 10, 300, 100, 300, 10])) + + detection_utils.annotations_to_instances([output, output], (400, 400)) + + def test_flip_keypoints(self): + transforms = T.TransformList([T.HFlipTransform(400)]) + anno = { + "bbox": np.asarray([10, 10, 200, 300]), + "bbox_mode": BoxMode.XYXY_ABS, + "keypoints": np.random.rand(17, 3) * 50 + 15, + } + + output = detection_utils.transform_instance_annotations( + copy.deepcopy(anno), + transforms, + (400, 400), + keypoint_hflip_indices=detection_utils.create_keypoint_hflip_indices( + ["keypoints_coco_2017_train"] + ), + ) + # The first keypoint is nose + self.assertTrue(np.allclose(output["keypoints"][0, 0], 400 - anno["keypoints"][0, 0])) + # The last 16 keypoints are 8 left-right pairs + self.assertTrue( + np.allclose( + output["keypoints"][1:, 0].reshape(-1, 2)[:, ::-1], + 400 - anno["keypoints"][1:, 0].reshape(-1, 2), + ) + ) + self.assertTrue( + np.allclose( + output["keypoints"][1:, 1:].reshape(-1, 2, 2)[:, ::-1, :], + anno["keypoints"][1:, 1:].reshape(-1, 2, 2), + ) + ) + + def test_transform_RLE(self): + transforms = T.TransformList([T.HFlipTransform(400)]) + mask = np.zeros((300, 400), order="F").astype("uint8") + mask[:, :200] = 1 + + anno = { + "bbox": np.asarray([10, 10, 200, 300]), + "bbox_mode": BoxMode.XYXY_ABS, + "segmentation": mask_util.encode(mask[:, :, None])[0], + "category_id": 3, + } + output = detection_utils.transform_instance_annotations( + copy.deepcopy(anno), transforms, (300, 400) + ) + mask = output["segmentation"] + self.assertTrue((mask[:, 200:] == 1).all()) + self.assertTrue((mask[:, :200] == 0).all()) + + inst = detection_utils.annotations_to_instances( + [output, output], (400, 400), mask_format="bitmask" + ) + self.assertTrue(isinstance(inst.gt_masks, BitMasks)) + + def test_transform_RLE_resize(self): + transforms = T.TransformList( + [T.HFlipTransform(400), T.ScaleTransform(300, 400, 400, 400, "bilinear")] + ) + mask = np.zeros((300, 400), order="F").astype("uint8") + mask[:, :200] = 1 + + anno = { + "bbox": np.asarray([10, 10, 200, 300]), + "bbox_mode": BoxMode.XYXY_ABS, + "segmentation": mask_util.encode(mask[:, :, None])[0], + "category_id": 3, + } + output = detection_utils.transform_instance_annotations( + copy.deepcopy(anno), transforms, (400, 400) + ) + + inst = detection_utils.annotations_to_instances( + [output, output], (400, 400), mask_format="bitmask" + ) + self.assertTrue(isinstance(inst.gt_masks, BitMasks)) + + def test_gen_crop(self): + instance = {"bbox": [10, 10, 100, 100], "bbox_mode": BoxMode.XYXY_ABS} + t = detection_utils.gen_crop_transform_with_instance((10, 10), (150, 150), instance) + # the box center must fall into the cropped region + self.assertTrue(t.x0 <= 55 <= t.x0 + t.w) + + def test_gen_crop_outside_boxes(self): + instance = {"bbox": [10, 10, 100, 100], "bbox_mode": BoxMode.XYXY_ABS} + with self.assertRaises(AssertionError): + detection_utils.gen_crop_transform_with_instance((10, 10), (15, 15), instance) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_rotation_transform.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_rotation_transform.py new file mode 100644 index 0000000000000000000000000000000000000000..45faf7e25eb08d70e92e5f6be326083ed0d23c76 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_rotation_transform.py @@ -0,0 +1,62 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import numpy as np +import unittest + +from detectron2.data.transforms.transform import RotationTransform + + +class TestRotationTransform(unittest.TestCase): + def assertEqualsArrays(self, a1, a2): + self.assertTrue(np.allclose(a1, a2)) + + def randomData(self, h=5, w=5): + image = np.random.rand(h, w) + coords = np.array([[i, j] for j in range(h + 1) for i in range(w + 1)], dtype=float) + return image, coords, h, w + + def test180(self): + image, coords, h, w = self.randomData(6, 6) + rot = RotationTransform(h, w, 180, expand=False, center=None) + self.assertEqualsArrays(rot.apply_image(image), image[::-1, ::-1]) + rotated_coords = [[w - c[0], h - c[1]] for c in coords] + self.assertEqualsArrays(rot.apply_coords(coords), rotated_coords) + + def test45_coords(self): + _, coords, h, w = self.randomData(4, 6) + rot = RotationTransform(h, w, 45, expand=False, center=None) + rotated_coords = [ + [(x + y - (h + w) / 2) / np.sqrt(2) + w / 2, h / 2 + (y + (w - h) / 2 - x) / np.sqrt(2)] + for (x, y) in coords + ] + self.assertEqualsArrays(rot.apply_coords(coords), rotated_coords) + + def test90(self): + image, coords, h, w = self.randomData() + rot = RotationTransform(h, w, 90, expand=False, center=None) + self.assertEqualsArrays(rot.apply_image(image), image.T[::-1]) + rotated_coords = [[c[1], w - c[0]] for c in coords] + self.assertEqualsArrays(rot.apply_coords(coords), rotated_coords) + + def test90_expand(self): # non-square image + image, coords, h, w = self.randomData(h=5, w=8) + rot = RotationTransform(h, w, 90, expand=True, center=None) + self.assertEqualsArrays(rot.apply_image(image), image.T[::-1]) + rotated_coords = [[c[1], w - c[0]] for c in coords] + self.assertEqualsArrays(rot.apply_coords(coords), rotated_coords) + + def test_center_expand(self): + # center has no effect if expand=True because it only affects shifting + image, coords, h, w = self.randomData(h=5, w=8) + angle = np.random.randint(360) + rot1 = RotationTransform(h, w, angle, expand=True, center=None) + rot2 = RotationTransform(h, w, angle, expand=True, center=(0, 0)) + rot3 = RotationTransform(h, w, angle, expand=True, center=(h, w)) + rot4 = RotationTransform(h, w, angle, expand=True, center=(2, 5)) + for r1 in [rot1, rot2, rot3, rot4]: + for r2 in [rot1, rot2, rot3, rot4]: + self.assertEqualsArrays(r1.apply_image(image), r2.apply_image(image)) + self.assertEqualsArrays(r1.apply_coords(coords), r2.apply_coords(coords)) + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_sampler.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..1256a87a9cc3405ac20bb6b2cf1ee0b22b8f180f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_sampler.py @@ -0,0 +1,23 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +import unittest +from torch.utils.data.sampler import SequentialSampler + +from detectron2.data.samplers import GroupedBatchSampler + + +class TestGroupedBatchSampler(unittest.TestCase): + def test_missing_group_id(self): + sampler = SequentialSampler(list(range(100))) + group_ids = [1] * 100 + samples = GroupedBatchSampler(sampler, group_ids, 2) + + for mini_batch in samples: + self.assertEqual(len(mini_batch), 2) + + def test_groups(self): + sampler = SequentialSampler(list(range(100))) + group_ids = [1, 0] * 50 + samples = GroupedBatchSampler(sampler, group_ids, 2) + + for mini_batch in samples: + self.assertEqual((mini_batch[0] + mini_batch[1]) % 2, 0) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_transforms.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..6d8551887aca5d5fa773d33227cb1685f4e2a8c8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/data/test_transforms.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import logging +import numpy as np +import unittest +from unittest import mock + +from detectron2.config import get_cfg +from detectron2.data import detection_utils +from detectron2.data import transforms as T +from detectron2.utils.logger import setup_logger + +logger = logging.getLogger(__name__) + + +class TestTransforms(unittest.TestCase): + def setUp(self): + setup_logger() + + def test_apply_rotated_boxes(self): + np.random.seed(125) + cfg = get_cfg() + is_train = True + transform_gen = detection_utils.build_transform_gen(cfg, is_train) + image = np.random.rand(200, 300) + image, transforms = T.apply_transform_gens(transform_gen, image) + image_shape = image.shape[:2] # h, w + assert image_shape == (800, 1200) + annotation = {"bbox": [179, 97, 62, 40, -56]} + + boxes = np.array([annotation["bbox"]], dtype=np.float64) # boxes.shape = (1, 5) + transformed_bbox = transforms.apply_rotated_box(boxes)[0] + + expected_bbox = np.array([484, 388, 248, 160, 56], dtype=np.float64) + err_msg = "transformed_bbox = {}, expected {}".format(transformed_bbox, expected_bbox) + assert np.allclose(transformed_bbox, expected_bbox), err_msg + + def test_apply_rotated_boxes_unequal_scaling_factor(self): + np.random.seed(125) + h, w = 400, 200 + newh, neww = 800, 800 + image = np.random.rand(h, w) + transform_gen = [] + transform_gen.append(T.Resize(shape=(newh, neww))) + image, transforms = T.apply_transform_gens(transform_gen, image) + image_shape = image.shape[:2] # h, w + assert image_shape == (newh, neww) + + boxes = np.array( + [ + [150, 100, 40, 20, 0], + [150, 100, 40, 20, 30], + [150, 100, 40, 20, 90], + [150, 100, 40, 20, -90], + ], + dtype=np.float64, + ) + transformed_boxes = transforms.apply_rotated_box(boxes) + + expected_bboxes = np.array( + [ + [600, 200, 160, 40, 0], + [600, 200, 144.22205102, 52.91502622, 49.10660535], + [600, 200, 80, 80, 90], + [600, 200, 80, 80, -90], + ], + dtype=np.float64, + ) + err_msg = "transformed_boxes = {}, expected {}".format(transformed_boxes, expected_bboxes) + assert np.allclose(transformed_boxes, expected_bboxes), err_msg + + def test_print_transform_gen(self): + t = T.RandomCrop("relative", (100, 100)) + self.assertTrue(str(t) == "RandomCrop(crop_type='relative', crop_size=(100, 100))") + + t = T.RandomFlip(prob=0.5) + self.assertTrue(str(t) == "RandomFlip(prob=0.5)") + + t = T.RandomFlip() + self.assertTrue(str(t) == "RandomFlip()") + + def test_random_apply_prob_out_of_range_check(self): + # GIVEN + test_probabilities = {0.0: True, 0.5: True, 1.0: True, -0.01: False, 1.01: False} + + # WHEN + for given_probability, is_valid in test_probabilities.items(): + # THEN + if not is_valid: + self.assertRaises(AssertionError, T.RandomApply, None, prob=given_probability) + else: + T.RandomApply(T.NoOpTransform(), prob=given_probability) + + def test_random_apply_wrapping_transform_gen_probability_occured_evaluation(self): + # GIVEN + transform_mock = mock.MagicMock(name="MockTransform", spec=T.TransformGen) + image_mock = mock.MagicMock(name="MockImage") + random_apply = T.RandomApply(transform_mock, prob=0.001) + + # WHEN + with mock.patch.object(random_apply, "_rand_range", return_value=0.0001): + transform = random_apply.get_transform(image_mock) + + # THEN + transform_mock.get_transform.assert_called_once_with(image_mock) + self.assertIsNot(transform, transform_mock) + + def test_random_apply_wrapping_std_transform_probability_occured_evaluation(self): + # GIVEN + transform_mock = mock.MagicMock(name="MockTransform", spec=T.Transform) + image_mock = mock.MagicMock(name="MockImage") + random_apply = T.RandomApply(transform_mock, prob=0.001) + + # WHEN + with mock.patch.object(random_apply, "_rand_range", return_value=0.0001): + transform = random_apply.get_transform(image_mock) + + # THEN + self.assertIs(transform, transform_mock) + + def test_random_apply_probability_not_occured_evaluation(self): + # GIVEN + transform_mock = mock.MagicMock(name="MockTransform", spec=T.TransformGen) + image_mock = mock.MagicMock(name="MockImage") + random_apply = T.RandomApply(transform_mock, prob=0.001) + + # WHEN + with mock.patch.object(random_apply, "_rand_range", return_value=0.9): + transform = random_apply.get_transform(image_mock) + + # THEN + transform_mock.get_transform.assert_not_called() + self.assertIsInstance(transform, T.NoOpTransform) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/test_mask_ops.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/test_mask_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..d180627354b6b9d8e0776d70f78e91ee5e530210 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/test_mask_ops.py @@ -0,0 +1,190 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import contextlib +import io +import numpy as np +import unittest +from collections import defaultdict +import torch +import tqdm +from fvcore.common.benchmark import benchmark +from fvcore.common.file_io import PathManager +from pycocotools.coco import COCO +from tabulate import tabulate +from torch.nn import functional as F + +from detectron2.data import MetadataCatalog +from detectron2.layers.mask_ops import ( + pad_masks, + paste_mask_in_image_old, + paste_masks_in_image, + scale_boxes, +) +from detectron2.structures import BitMasks, Boxes, BoxMode, PolygonMasks +from detectron2.structures.masks import polygons_to_bitmask + + +def iou_between_full_image_bit_masks(a, b): + intersect = (a & b).sum() + union = (a | b).sum() + return intersect / union + + +def rasterize_polygons_with_grid_sample(full_image_bit_mask, box, mask_size, threshold=0.5): + x0, y0, x1, y1 = box[0], box[1], box[2], box[3] + + img_h, img_w = full_image_bit_mask.shape + + mask_y = np.arange(0.0, mask_size) + 0.5 # mask y sample coords in [0.5, mask_size - 0.5] + mask_x = np.arange(0.0, mask_size) + 0.5 # mask x sample coords in [0.5, mask_size - 0.5] + mask_y = mask_y / mask_size * (y1 - y0) + y0 + mask_x = mask_x / mask_size * (x1 - x0) + x0 + + mask_x = (mask_x - 0.5) / (img_w - 1) * 2 + -1 + mask_y = (mask_y - 0.5) / (img_h - 1) * 2 + -1 + gy, gx = torch.meshgrid(torch.from_numpy(mask_y), torch.from_numpy(mask_x)) + ind = torch.stack([gx, gy], dim=-1).to(dtype=torch.float32) + + full_image_bit_mask = torch.from_numpy(full_image_bit_mask) + mask = F.grid_sample( + full_image_bit_mask[None, None, :, :].to(dtype=torch.float32), + ind[None, :, :, :], + align_corners=True, + ) + + return mask[0, 0] >= threshold + + +class TestMaskCropPaste(unittest.TestCase): + def setUp(self): + json_file = MetadataCatalog.get("coco_2017_val_100").json_file + if not PathManager.isfile(json_file): + raise unittest.SkipTest("{} not found".format(json_file)) + with contextlib.redirect_stdout(io.StringIO()): + json_file = PathManager.get_local_path(json_file) + self.coco = COCO(json_file) + + def test_crop_paste_consistency(self): + """ + rasterize_polygons_within_box (used in training) + and + paste_masks_in_image (used in inference) + should be inverse operations to each other. + + This function runs several implementation of the above two operations and prints + the reconstruction error. + """ + + anns = self.coco.loadAnns(self.coco.getAnnIds(iscrowd=False)) # avoid crowd annotations + + selected_anns = anns[:100] + + ious = [] + for ann in tqdm.tqdm(selected_anns): + results = self.process_annotation(ann) + ious.append([k[2] for k in results]) + + ious = np.array(ious) + mean_ious = ious.mean(axis=0) + table = [] + res_dic = defaultdict(dict) + for row, iou in zip(results, mean_ious): + table.append((row[0], row[1], iou)) + res_dic[row[0]][row[1]] = iou + print(tabulate(table, headers=["rasterize", "paste", "iou"], tablefmt="simple")) + # assert that the reconstruction is good: + self.assertTrue(res_dic["polygon"]["aligned"] > 0.94) + self.assertTrue(res_dic["roialign"]["aligned"] > 0.95) + + def process_annotation(self, ann, mask_side_len=28): + # Parse annotation data + img_info = self.coco.loadImgs(ids=[ann["image_id"]])[0] + height, width = img_info["height"], img_info["width"] + gt_polygons = [np.array(p, dtype=np.float64) for p in ann["segmentation"]] + gt_bbox = BoxMode.convert(ann["bbox"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS) + gt_bit_mask = polygons_to_bitmask(gt_polygons, height, width) + + # Run rasterize .. + torch_gt_bbox = torch.tensor(gt_bbox).to(dtype=torch.float32).reshape(-1, 4) + box_bitmasks = { + "polygon": PolygonMasks([gt_polygons]).crop_and_resize(torch_gt_bbox, mask_side_len)[0], + "gridsample": rasterize_polygons_with_grid_sample(gt_bit_mask, gt_bbox, mask_side_len), + "roialign": BitMasks(torch.from_numpy(gt_bit_mask[None, :, :])).crop_and_resize( + torch_gt_bbox, mask_side_len + )[0], + } + + # Run paste .. + results = defaultdict(dict) + for k, box_bitmask in box_bitmasks.items(): + padded_bitmask, scale = pad_masks(box_bitmask[None, :, :], 1) + scaled_boxes = scale_boxes(torch_gt_bbox, scale) + + r = results[k] + r["old"] = paste_mask_in_image_old( + padded_bitmask[0], scaled_boxes[0], height, width, threshold=0.5 + ) + r["aligned"] = paste_masks_in_image( + box_bitmask[None, :, :], Boxes(torch_gt_bbox), (height, width) + )[0] + + table = [] + for rasterize_method, r in results.items(): + for paste_method, mask in r.items(): + mask = np.asarray(mask) + iou = iou_between_full_image_bit_masks(gt_bit_mask.astype("uint8"), mask) + table.append((rasterize_method, paste_method, iou)) + return table + + def test_polygon_area(self): + # Draw polygon boxes + for d in [5.0, 10.0, 1000.0]: + polygon = PolygonMasks([[[0, 0, 0, d, d, d, d, 0]]]) + area = polygon.area()[0] + target = d ** 2 + self.assertEqual(area, target) + + # Draw polygon triangles + for d in [5.0, 10.0, 1000.0]: + polygon = PolygonMasks([[[0, 0, 0, d, d, d]]]) + area = polygon.area()[0] + target = d ** 2 / 2 + self.assertEqual(area, target) + + +def benchmark_paste(): + S = 800 + H, W = image_shape = (S, S) + N = 64 + torch.manual_seed(42) + masks = torch.rand(N, 28, 28) + + center = torch.rand(N, 2) * 600 + 100 + wh = torch.clamp(torch.randn(N, 2) * 40 + 200, min=50) + x0y0 = torch.clamp(center - wh * 0.5, min=0.0) + x1y1 = torch.clamp(center + wh * 0.5, max=S) + boxes = Boxes(torch.cat([x0y0, x1y1], axis=1)) + + def func(device, n=3): + m = masks.to(device=device) + b = boxes.to(device=device) + + def bench(): + for _ in range(n): + paste_masks_in_image(m, b, image_shape) + if device.type == "cuda": + torch.cuda.synchronize() + + return bench + + specs = [{"device": torch.device("cpu"), "n": 3}] + if torch.cuda.is_available(): + specs.append({"device": torch.device("cuda"), "n": 3}) + + benchmark(func, "paste_masks", specs, num_iters=10, warmup_iters=2) + + +if __name__ == "__main__": + benchmark_paste() + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/test_nms_rotated.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/test_nms_rotated.py new file mode 100644 index 0000000000000000000000000000000000000000..94b346c524d2c372273dfe992df045962b9605cd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/test_nms_rotated.py @@ -0,0 +1,188 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from __future__ import absolute_import, division, print_function, unicode_literals +import numpy as np +import unittest +import torch +from torchvision import ops + +from detectron2.layers import batched_nms, batched_nms_rotated, nms_rotated + + +def nms_edit_distance(keep1, keep2): + """ + Compare the "keep" result of two nms call. + They are allowed to be different in terms of edit distance + due to floating point precision issues, e.g., + if a box happen to have an IoU of 0.5 with another box, + one implentation may choose to keep it while another may discard it. + """ + if torch.equal(keep1, keep2): + # they should be equal most of the time + return 0 + keep1, keep2 = tuple(keep1.cpu()), tuple(keep2.cpu()) + m, n = len(keep1), len(keep2) + + # edit distance with DP + f = [np.arange(n + 1), np.arange(n + 1)] + for i in range(m): + cur_row = i % 2 + other_row = (i + 1) % 2 + f[other_row][0] = i + 1 + for j in range(n): + f[other_row][j + 1] = ( + f[cur_row][j] + if keep1[i] == keep2[j] + else min(min(f[cur_row][j], f[cur_row][j + 1]), f[other_row][j]) + 1 + ) + return f[m % 2][n] + + +class TestNMSRotated(unittest.TestCase): + def reference_horizontal_nms(self, boxes, scores, iou_threshold): + """ + Args: + box_scores (N, 5): boxes in corner-form and probabilities. + (Note here 5 == 4 + 1, i.e., 4-dim horizontal box + 1-dim prob) + iou_threshold: intersection over union threshold. + Returns: + picked: a list of indexes of the kept boxes + """ + picked = [] + _, indexes = scores.sort(descending=True) + while len(indexes) > 0: + current = indexes[0] + picked.append(current.item()) + if len(indexes) == 1: + break + current_box = boxes[current, :] + indexes = indexes[1:] + rest_boxes = boxes[indexes, :] + iou = ops.box_iou(rest_boxes, current_box.unsqueeze(0)).squeeze(1) + indexes = indexes[iou <= iou_threshold] + + return torch.as_tensor(picked) + + def _create_tensors(self, N): + boxes = torch.rand(N, 4) * 100 + # Note: the implementation of this function in torchvision is: + # boxes[:, 2:] += torch.rand(N, 2) * 100 + # but it does not guarantee non-negative widths/heights constraints: + # boxes[:, 2] >= boxes[:, 0] and boxes[:, 3] >= boxes[:, 1]: + boxes[:, 2:] += boxes[:, :2] + scores = torch.rand(N) + return boxes, scores + + def test_batched_nms_rotated_0_degree_cpu(self): + N = 2000 + num_classes = 50 + boxes, scores = self._create_tensors(N) + idxs = torch.randint(0, num_classes, (N,)) + rotated_boxes = torch.zeros(N, 5) + rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0 + rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0 + rotated_boxes[:, 2] = boxes[:, 2] - boxes[:, 0] + rotated_boxes[:, 3] = boxes[:, 3] - boxes[:, 1] + err_msg = "Rotated NMS with 0 degree is incompatible with horizontal NMS for IoU={}" + for iou in [0.2, 0.5, 0.8]: + backup = boxes.clone() + keep_ref = batched_nms(boxes, scores, idxs, iou) + assert torch.allclose(boxes, backup), "boxes modified by batched_nms" + backup = rotated_boxes.clone() + keep = batched_nms_rotated(rotated_boxes, scores, idxs, iou) + assert torch.allclose( + rotated_boxes, backup + ), "rotated_boxes modified by batched_nms_rotated" + self.assertLessEqual(nms_edit_distance(keep, keep_ref), 1, err_msg.format(iou)) + + @unittest.skipIf(not torch.cuda.is_available(), "CUDA not available") + def test_batched_nms_rotated_0_degree_cuda(self): + N = 2000 + num_classes = 50 + boxes, scores = self._create_tensors(N) + idxs = torch.randint(0, num_classes, (N,)) + rotated_boxes = torch.zeros(N, 5) + rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0 + rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0 + rotated_boxes[:, 2] = boxes[:, 2] - boxes[:, 0] + rotated_boxes[:, 3] = boxes[:, 3] - boxes[:, 1] + err_msg = "Rotated NMS with 0 degree is incompatible with horizontal NMS for IoU={}" + for iou in [0.2, 0.5, 0.8]: + backup = boxes.clone() + keep_ref = batched_nms(boxes.cuda(), scores.cuda(), idxs, iou) + self.assertTrue(torch.allclose(boxes, backup), "boxes modified by batched_nms") + backup = rotated_boxes.clone() + keep = batched_nms_rotated(rotated_boxes.cuda(), scores.cuda(), idxs, iou) + self.assertTrue( + torch.allclose(rotated_boxes, backup), + "rotated_boxes modified by batched_nms_rotated", + ) + self.assertLessEqual(nms_edit_distance(keep, keep_ref), 1, err_msg.format(iou)) + + def test_nms_rotated_0_degree_cpu(self): + N = 1000 + boxes, scores = self._create_tensors(N) + rotated_boxes = torch.zeros(N, 5) + rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0 + rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0 + rotated_boxes[:, 2] = boxes[:, 2] - boxes[:, 0] + rotated_boxes[:, 3] = boxes[:, 3] - boxes[:, 1] + err_msg = "Rotated NMS incompatible between CPU and reference implementation for IoU={}" + for iou in [0.5]: + keep_ref = self.reference_horizontal_nms(boxes, scores, iou) + keep = nms_rotated(rotated_boxes, scores, iou) + self.assertLessEqual(nms_edit_distance(keep, keep_ref), 1, err_msg.format(iou)) + + def test_nms_rotated_90_degrees_cpu(self): + N = 1000 + boxes, scores = self._create_tensors(N) + rotated_boxes = torch.zeros(N, 5) + rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0 + rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0 + # Note for rotated_boxes[:, 2] and rotated_boxes[:, 3]: + # widths and heights are intentionally swapped here for 90 degrees case + # so that the reference horizontal nms could be used + rotated_boxes[:, 2] = boxes[:, 3] - boxes[:, 1] + rotated_boxes[:, 3] = boxes[:, 2] - boxes[:, 0] + + rotated_boxes[:, 4] = torch.ones(N) * 90 + err_msg = "Rotated NMS incompatible between CPU and reference implementation for IoU={}" + for iou in [0.2, 0.5, 0.8]: + keep_ref = self.reference_horizontal_nms(boxes, scores, iou) + keep = nms_rotated(rotated_boxes, scores, iou) + assert torch.equal(keep, keep_ref), err_msg.format(iou) + + def test_nms_rotated_180_degrees_cpu(self): + N = 1000 + boxes, scores = self._create_tensors(N) + rotated_boxes = torch.zeros(N, 5) + rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0 + rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0 + rotated_boxes[:, 2] = boxes[:, 2] - boxes[:, 0] + rotated_boxes[:, 3] = boxes[:, 3] - boxes[:, 1] + rotated_boxes[:, 4] = torch.ones(N) * 180 + err_msg = "Rotated NMS incompatible between CPU and reference implementation for IoU={}" + for iou in [0.2, 0.5, 0.8]: + keep_ref = self.reference_horizontal_nms(boxes, scores, iou) + keep = nms_rotated(rotated_boxes, scores, iou) + assert torch.equal(keep, keep_ref), err_msg.format(iou) + + @unittest.skipIf(not torch.cuda.is_available(), "CUDA not available") + def test_nms_rotated_0_degree_cuda(self): + N = 1000 + boxes, scores = self._create_tensors(N) + rotated_boxes = torch.zeros(N, 5) + rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0 + rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0 + rotated_boxes[:, 2] = boxes[:, 2] - boxes[:, 0] + rotated_boxes[:, 3] = boxes[:, 3] - boxes[:, 1] + err_msg = "Rotated NMS incompatible between CPU and CUDA for IoU={}" + + for iou in [0.2, 0.5, 0.8]: + r_cpu = nms_rotated(rotated_boxes, scores, iou) + r_cuda = nms_rotated(rotated_boxes.cuda(), scores.cuda(), iou) + + assert torch.equal(r_cpu, r_cuda.cpu()), err_msg.format(iou) + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/test_roi_align.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/test_roi_align.py new file mode 100644 index 0000000000000000000000000000000000000000..633d7c29c41b94b8a57c15aff728f23a71b535d1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/test_roi_align.py @@ -0,0 +1,152 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import numpy as np +import unittest +import cv2 +import torch +from fvcore.common.benchmark import benchmark + +from detectron2.layers.roi_align import ROIAlign + + +class ROIAlignTest(unittest.TestCase): + def test_forward_output(self): + input = np.arange(25).reshape(5, 5).astype("float32") + """ + 0 1 2 3 4 + 5 6 7 8 9 + 10 11 12 13 14 + 15 16 17 18 19 + 20 21 22 23 24 + """ + + output = self._simple_roialign(input, [1, 1, 3, 3], (4, 4), aligned=False) + output_correct = self._simple_roialign(input, [1, 1, 3, 3], (4, 4), aligned=True) + + # without correction: + old_results = [ + [7.5, 8, 8.5, 9], + [10, 10.5, 11, 11.5], + [12.5, 13, 13.5, 14], + [15, 15.5, 16, 16.5], + ] + + # with 0.5 correction: + correct_results = [ + [4.5, 5.0, 5.5, 6.0], + [7.0, 7.5, 8.0, 8.5], + [9.5, 10.0, 10.5, 11.0], + [12.0, 12.5, 13.0, 13.5], + ] + # This is an upsampled version of [[6, 7], [11, 12]] + + self.assertTrue(np.allclose(output.flatten(), np.asarray(old_results).flatten())) + self.assertTrue( + np.allclose(output_correct.flatten(), np.asarray(correct_results).flatten()) + ) + + # Also see similar issues in tensorflow at + # https://github.com/tensorflow/tensorflow/issues/26278 + + def test_resize(self): + H, W = 30, 30 + input = np.random.rand(H, W).astype("float32") * 100 + box = [10, 10, 20, 20] + output = self._simple_roialign(input, box, (5, 5), aligned=True) + + input2x = cv2.resize(input, (W // 2, H // 2), interpolation=cv2.INTER_LINEAR) + box2x = [x / 2 for x in box] + output2x = self._simple_roialign(input2x, box2x, (5, 5), aligned=True) + diff = np.abs(output2x - output) + self.assertTrue(diff.max() < 1e-4) + + def _simple_roialign(self, img, box, resolution, aligned=True): + """ + RoiAlign with scale 1.0 and 0 sample ratio. + """ + if isinstance(resolution, int): + resolution = (resolution, resolution) + op = ROIAlign(resolution, 1.0, 0, aligned=aligned) + input = torch.from_numpy(img[None, None, :, :].astype("float32")) + + rois = [0] + list(box) + rois = torch.from_numpy(np.asarray(rois)[None, :].astype("float32")) + output = op.forward(input, rois) + if torch.cuda.is_available(): + output_cuda = op.forward(input.cuda(), rois.cuda()).cpu() + self.assertTrue(torch.allclose(output, output_cuda)) + return output[0, 0] + + def _simple_roialign_with_grad(self, img, box, resolution, device): + if isinstance(resolution, int): + resolution = (resolution, resolution) + + op = ROIAlign(resolution, 1.0, 0, aligned=True) + input = torch.from_numpy(img[None, None, :, :].astype("float32")) + + rois = [0] + list(box) + rois = torch.from_numpy(np.asarray(rois)[None, :].astype("float32")) + input = input.to(device=device) + rois = rois.to(device=device) + input.requires_grad = True + output = op.forward(input, rois) + return input, output + + def test_empty_box(self): + img = np.random.rand(5, 5) + box = [3, 4, 5, 4] + o = self._simple_roialign(img, box, 7) + self.assertTrue(o.shape == (7, 7)) + self.assertTrue((o == 0).all()) + + for dev in ["cpu"] + ["cuda"] if torch.cuda.is_available() else []: + input, output = self._simple_roialign_with_grad(img, box, 7, torch.device(dev)) + output.sum().backward() + self.assertTrue(torch.allclose(input.grad, torch.zeros_like(input))) + + def test_empty_batch(self): + input = torch.zeros(0, 3, 10, 10, dtype=torch.float32) + rois = torch.zeros(0, 5, dtype=torch.float32) + op = ROIAlign((7, 7), 1.0, 0, aligned=True) + output = op.forward(input, rois) + self.assertTrue(output.shape == (0, 3, 7, 7)) + + +def benchmark_roi_align(): + from detectron2 import _C + + def random_boxes(mean_box, stdev, N, maxsize): + ret = torch.rand(N, 4) * stdev + torch.tensor(mean_box, dtype=torch.float) + ret.clamp_(min=0, max=maxsize) + return ret + + def func(N, C, H, W, nboxes_per_img): + input = torch.rand(N, C, H, W) + boxes = [] + batch_idx = [] + for k in range(N): + b = random_boxes([80, 80, 130, 130], 24, nboxes_per_img, H) + # try smaller boxes: + # b = random_boxes([100, 100, 110, 110], 4, nboxes_per_img, H) + boxes.append(b) + batch_idx.append(torch.zeros(nboxes_per_img, 1, dtype=torch.float32) + k) + boxes = torch.cat(boxes, axis=0) + batch_idx = torch.cat(batch_idx, axis=0) + boxes = torch.cat([batch_idx, boxes], axis=1) + + input = input.cuda() + boxes = boxes.cuda() + + def bench(): + _C.roi_align_forward(input, boxes, 1.0, 7, 7, 0, True) + torch.cuda.synchronize() + + return bench + + args = [dict(N=2, C=512, H=256, W=256, nboxes_per_img=500)] + benchmark(func, "cuda_roialign", args, num_iters=20, warmup_iters=1) + + +if __name__ == "__main__": + if torch.cuda.is_available(): + benchmark_roi_align() + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/test_roi_align_rotated.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/test_roi_align_rotated.py new file mode 100644 index 0000000000000000000000000000000000000000..1915b59ff6774a54ee0e5dbfdbe0ecf89f2e2235 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/layers/test_roi_align_rotated.py @@ -0,0 +1,176 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import unittest +import cv2 +import torch +from torch.autograd import Variable, gradcheck + +from detectron2.layers.roi_align import ROIAlign +from detectron2.layers.roi_align_rotated import ROIAlignRotated + +logger = logging.getLogger(__name__) + + +class ROIAlignRotatedTest(unittest.TestCase): + def _box_to_rotated_box(self, box, angle): + return [ + (box[0] + box[2]) / 2.0, + (box[1] + box[3]) / 2.0, + box[2] - box[0], + box[3] - box[1], + angle, + ] + + def _rot90(self, img, num): + num = num % 4 # note: -1 % 4 == 3 + for _ in range(num): + img = img.transpose(0, 1).flip(0) + return img + + def test_forward_output_0_90_180_270(self): + for i in range(4): + # i = 0, 1, 2, 3 corresponding to 0, 90, 180, 270 degrees + img = torch.arange(25, dtype=torch.float32).reshape(5, 5) + """ + 0 1 2 3 4 + 5 6 7 8 9 + 10 11 12 13 14 + 15 16 17 18 19 + 20 21 22 23 24 + """ + box = [1, 1, 3, 3] + rotated_box = self._box_to_rotated_box(box=box, angle=90 * i) + + result = self._simple_roi_align_rotated(img=img, box=rotated_box, resolution=(4, 4)) + + # Here's an explanation for 0 degree case: + # point 0 in the original input lies at [0.5, 0.5] + # (the center of bin [0, 1] x [0, 1]) + # point 1 in the original input lies at [1.5, 0.5], etc. + # since the resolution is (4, 4) that divides [1, 3] x [1, 3] + # into 4 x 4 equal bins, + # the top-left bin is [1, 1.5] x [1, 1.5], and its center + # (1.25, 1.25) lies at the 3/4 position + # between point 0 and point 1, point 5 and point 6, + # point 0 and point 5, point 1 and point 6, so it can be calculated as + # 0.25*(0*0.25+1*0.75)+(5*0.25+6*0.75)*0.75 = 4.5 + result_expected = torch.tensor( + [ + [4.5, 5.0, 5.5, 6.0], + [7.0, 7.5, 8.0, 8.5], + [9.5, 10.0, 10.5, 11.0], + [12.0, 12.5, 13.0, 13.5], + ] + ) + # This is also an upsampled version of [[6, 7], [11, 12]] + + # When the box is rotated by 90 degrees CCW, + # the result would be rotated by 90 degrees CW, thus it's -i here + result_expected = self._rot90(result_expected, -i) + + assert torch.allclose(result, result_expected) + + def test_resize(self): + H, W = 30, 30 + input = torch.rand(H, W) * 100 + box = [10, 10, 20, 20] + rotated_box = self._box_to_rotated_box(box, angle=0) + output = self._simple_roi_align_rotated(img=input, box=rotated_box, resolution=(5, 5)) + + input2x = cv2.resize(input.numpy(), (W // 2, H // 2), interpolation=cv2.INTER_LINEAR) + input2x = torch.from_numpy(input2x) + box2x = [x / 2 for x in box] + rotated_box2x = self._box_to_rotated_box(box2x, angle=0) + output2x = self._simple_roi_align_rotated(img=input2x, box=rotated_box2x, resolution=(5, 5)) + assert torch.allclose(output2x, output) + + def _simple_roi_align_rotated(self, img, box, resolution): + """ + RoiAlignRotated with scale 1.0 and 0 sample ratio. + """ + op = ROIAlignRotated(output_size=resolution, spatial_scale=1.0, sampling_ratio=0) + input = img[None, None, :, :] + + rois = [0] + list(box) + rois = torch.tensor(rois, dtype=torch.float32)[None, :] + result_cpu = op.forward(input, rois) + if torch.cuda.is_available(): + result_cuda = op.forward(input.cuda(), rois.cuda()) + assert torch.allclose(result_cpu, result_cuda.cpu()) + return result_cpu[0, 0] + + def test_empty_box(self): + img = torch.rand(5, 5) + out = self._simple_roi_align_rotated(img, [2, 3, 0, 0, 0], (7, 7)) + self.assertTrue((out == 0).all()) + + def test_roi_align_rotated_gradcheck_cpu(self): + dtype = torch.float64 + device = torch.device("cpu") + roi_align_rotated_op = ROIAlignRotated( + output_size=(5, 5), spatial_scale=0.5, sampling_ratio=1 + ).to(dtype=dtype, device=device) + x = torch.rand(1, 1, 10, 10, dtype=dtype, device=device, requires_grad=True) + # roi format is (batch index, x_center, y_center, width, height, angle) + rois = torch.tensor( + [[0, 4.5, 4.5, 9, 9, 0], [0, 2, 7, 4, 4, 0], [0, 7, 7, 4, 4, 0]], + dtype=dtype, + device=device, + ) + + def func(input): + return roi_align_rotated_op(input, rois) + + assert gradcheck(func, (x,)), "gradcheck failed for RoIAlignRotated CPU" + assert gradcheck(func, (x.transpose(2, 3),)), "gradcheck failed for RoIAlignRotated CPU" + + @unittest.skipIf(not torch.cuda.is_available(), "CUDA not available") + def test_roi_align_rotated_gradient_cuda(self): + """ + Compute gradients for ROIAlignRotated with multiple bounding boxes on the GPU, + and compare the result with ROIAlign + """ + # torch.manual_seed(123) + dtype = torch.float64 + device = torch.device("cuda") + pool_h, pool_w = (5, 5) + + roi_align = ROIAlign(output_size=(pool_h, pool_w), spatial_scale=1, sampling_ratio=2).to( + device=device + ) + + roi_align_rotated = ROIAlignRotated( + output_size=(pool_h, pool_w), spatial_scale=1, sampling_ratio=2 + ).to(device=device) + + x = torch.rand(1, 1, 10, 10, dtype=dtype, device=device, requires_grad=True) + # x_rotated = x.clone() won't work (will lead to grad_fun=CloneBackward)! + x_rotated = Variable(x.data.clone(), requires_grad=True) + + # roi_rotated format is (batch index, x_center, y_center, width, height, angle) + rois_rotated = torch.tensor( + [[0, 4.5, 4.5, 9, 9, 0], [0, 2, 7, 4, 4, 0], [0, 7, 7, 4, 4, 0]], + dtype=dtype, + device=device, + ) + + y_rotated = roi_align_rotated(x_rotated, rois_rotated) + s_rotated = y_rotated.sum() + s_rotated.backward() + + # roi format is (batch index, x1, y1, x2, y2) + rois = torch.tensor( + [[0, 0, 0, 9, 9], [0, 0, 5, 4, 9], [0, 5, 5, 9, 9]], dtype=dtype, device=device + ) + + y = roi_align(x, rois) + s = y.sum() + s.backward() + + assert torch.allclose( + x.grad, x_rotated.grad + ), "gradients for ROIAlign and ROIAlignRotated mismatch on CUDA" + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_anchor_generator.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_anchor_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..bc14f0279ee682040082e51f96a41a267269d6ce --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_anchor_generator.py @@ -0,0 +1,121 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import unittest +import torch + +from detectron2.config import get_cfg +from detectron2.layers import ShapeSpec +from detectron2.modeling.anchor_generator import DefaultAnchorGenerator, RotatedAnchorGenerator + +logger = logging.getLogger(__name__) + + +class TestAnchorGenerator(unittest.TestCase): + def test_default_anchor_generator(self): + cfg = get_cfg() + cfg.MODEL.ANCHOR_GENERATOR.SIZES = [[32, 64]] + cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [[0.25, 1, 4]] + + anchor_generator = DefaultAnchorGenerator(cfg, [ShapeSpec(stride=4)]) + + # only the last two dimensions of features matter here + num_images = 2 + features = {"stage3": torch.rand(num_images, 96, 1, 2)} + anchors = anchor_generator([features["stage3"]]) + expected_anchor_tensor = torch.tensor( + [ + [-32.0, -8.0, 32.0, 8.0], + [-16.0, -16.0, 16.0, 16.0], + [-8.0, -32.0, 8.0, 32.0], + [-64.0, -16.0, 64.0, 16.0], + [-32.0, -32.0, 32.0, 32.0], + [-16.0, -64.0, 16.0, 64.0], + [-28.0, -8.0, 36.0, 8.0], # -28.0 == -32.0 + STRIDE (4) + [-12.0, -16.0, 20.0, 16.0], + [-4.0, -32.0, 12.0, 32.0], + [-60.0, -16.0, 68.0, 16.0], + [-28.0, -32.0, 36.0, 32.0], + [-12.0, -64.0, 20.0, 64.0], + ] + ) + + assert torch.allclose(anchors[0].tensor, expected_anchor_tensor) + + def test_default_anchor_generator_centered(self): + # test explicit args + anchor_generator = DefaultAnchorGenerator( + sizes=[32, 64], aspect_ratios=[0.25, 1, 4], strides=[4] + ) + + # only the last two dimensions of features matter here + num_images = 2 + features = {"stage3": torch.rand(num_images, 96, 1, 2)} + expected_anchor_tensor = torch.tensor( + [ + [-30.0, -6.0, 34.0, 10.0], + [-14.0, -14.0, 18.0, 18.0], + [-6.0, -30.0, 10.0, 34.0], + [-62.0, -14.0, 66.0, 18.0], + [-30.0, -30.0, 34.0, 34.0], + [-14.0, -62.0, 18.0, 66.0], + [-26.0, -6.0, 38.0, 10.0], + [-10.0, -14.0, 22.0, 18.0], + [-2.0, -30.0, 14.0, 34.0], + [-58.0, -14.0, 70.0, 18.0], + [-26.0, -30.0, 38.0, 34.0], + [-10.0, -62.0, 22.0, 66.0], + ] + ) + + anchors = anchor_generator([features["stage3"]]) + assert torch.allclose(anchors[0].tensor, expected_anchor_tensor) + + # doesn't work yet + # anchors = torch.jit.script(anchor_generator)([features["stage3"]]) + # assert torch.allclose(anchors[0].tensor, expected_anchor_tensor) + + def test_rrpn_anchor_generator(self): + cfg = get_cfg() + cfg.MODEL.ANCHOR_GENERATOR.SIZES = [[32, 64]] + cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [[0.25, 1, 4]] + cfg.MODEL.ANCHOR_GENERATOR.ANGLES = [0, 45] # test single list[float] + anchor_generator = RotatedAnchorGenerator(cfg, [ShapeSpec(stride=4)]) + + # only the last two dimensions of features matter here + num_images = 2 + features = {"stage3": torch.rand(num_images, 96, 1, 2)} + anchors = anchor_generator([features["stage3"]]) + expected_anchor_tensor = torch.tensor( + [ + [0.0, 0.0, 64.0, 16.0, 0.0], + [0.0, 0.0, 64.0, 16.0, 45.0], + [0.0, 0.0, 32.0, 32.0, 0.0], + [0.0, 0.0, 32.0, 32.0, 45.0], + [0.0, 0.0, 16.0, 64.0, 0.0], + [0.0, 0.0, 16.0, 64.0, 45.0], + [0.0, 0.0, 128.0, 32.0, 0.0], + [0.0, 0.0, 128.0, 32.0, 45.0], + [0.0, 0.0, 64.0, 64.0, 0.0], + [0.0, 0.0, 64.0, 64.0, 45.0], + [0.0, 0.0, 32.0, 128.0, 0.0], + [0.0, 0.0, 32.0, 128.0, 45.0], + [4.0, 0.0, 64.0, 16.0, 0.0], # 4.0 == 0.0 + STRIDE (4) + [4.0, 0.0, 64.0, 16.0, 45.0], + [4.0, 0.0, 32.0, 32.0, 0.0], + [4.0, 0.0, 32.0, 32.0, 45.0], + [4.0, 0.0, 16.0, 64.0, 0.0], + [4.0, 0.0, 16.0, 64.0, 45.0], + [4.0, 0.0, 128.0, 32.0, 0.0], + [4.0, 0.0, 128.0, 32.0, 45.0], + [4.0, 0.0, 64.0, 64.0, 0.0], + [4.0, 0.0, 64.0, 64.0, 45.0], + [4.0, 0.0, 32.0, 128.0, 0.0], + [4.0, 0.0, 32.0, 128.0, 45.0], + ] + ) + + assert torch.allclose(anchors[0].tensor, expected_anchor_tensor) + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_box2box_transform.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_box2box_transform.py new file mode 100644 index 0000000000000000000000000000000000000000..9d124d79fc0e17f268f6b5b50fcb8f8dfad59368 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_box2box_transform.py @@ -0,0 +1,64 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import unittest +import torch + +from detectron2.modeling.box_regression import Box2BoxTransform, Box2BoxTransformRotated + +logger = logging.getLogger(__name__) + + +def random_boxes(mean_box, stdev, N): + return torch.rand(N, 4) * stdev + torch.tensor(mean_box, dtype=torch.float) + + +class TestBox2BoxTransform(unittest.TestCase): + def test_reconstruction(self): + weights = (5, 5, 10, 10) + b2b_tfm = Box2BoxTransform(weights=weights) + src_boxes = random_boxes([10, 10, 20, 20], 1, 10) + dst_boxes = random_boxes([10, 10, 20, 20], 1, 10) + + devices = [torch.device("cpu")] + if torch.cuda.is_available(): + devices.append(torch.device("cuda")) + for device in devices: + src_boxes = src_boxes.to(device=device) + dst_boxes = dst_boxes.to(device=device) + deltas = b2b_tfm.get_deltas(src_boxes, dst_boxes) + dst_boxes_reconstructed = b2b_tfm.apply_deltas(deltas, src_boxes) + assert torch.allclose(dst_boxes, dst_boxes_reconstructed) + + +def random_rotated_boxes(mean_box, std_length, std_angle, N): + return torch.cat( + [torch.rand(N, 4) * std_length, torch.rand(N, 1) * std_angle], dim=1 + ) + torch.tensor(mean_box, dtype=torch.float) + + +class TestBox2BoxTransformRotated(unittest.TestCase): + def test_reconstruction(self): + weights = (5, 5, 10, 10, 1) + b2b_transform = Box2BoxTransformRotated(weights=weights) + src_boxes = random_rotated_boxes([10, 10, 20, 20, -30], 5, 60.0, 10) + dst_boxes = random_rotated_boxes([10, 10, 20, 20, -30], 5, 60.0, 10) + + devices = [torch.device("cpu")] + if torch.cuda.is_available(): + devices.append(torch.device("cuda")) + for device in devices: + src_boxes = src_boxes.to(device=device) + dst_boxes = dst_boxes.to(device=device) + deltas = b2b_transform.get_deltas(src_boxes, dst_boxes) + dst_boxes_reconstructed = b2b_transform.apply_deltas(deltas, src_boxes) + assert torch.allclose(dst_boxes[:, :4], dst_boxes_reconstructed[:, :4], atol=1e-5) + # angle difference has to be normalized + assert torch.allclose( + (dst_boxes[:, 4] - dst_boxes_reconstructed[:, 4] + 180.0) % 360.0 - 180.0, + torch.zeros_like(dst_boxes[:, 4]), + atol=1e-4, + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_fast_rcnn.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_fast_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..70b64d3db497bac52e127d02a543b14d2e37e8eb --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_fast_rcnn.py @@ -0,0 +1,106 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import unittest +import torch + +from detectron2.layers import ShapeSpec +from detectron2.modeling.box_regression import Box2BoxTransform, Box2BoxTransformRotated +from detectron2.modeling.roi_heads.fast_rcnn import FastRCNNOutputLayers +from detectron2.modeling.roi_heads.rotated_fast_rcnn import RotatedFastRCNNOutputLayers +from detectron2.structures import Boxes, Instances, RotatedBoxes +from detectron2.utils.events import EventStorage + +logger = logging.getLogger(__name__) + + +class FastRCNNTest(unittest.TestCase): + def test_fast_rcnn(self): + torch.manual_seed(132) + + box_head_output_size = 8 + + box_predictor = FastRCNNOutputLayers( + ShapeSpec(channels=box_head_output_size), + box2box_transform=Box2BoxTransform(weights=(10, 10, 5, 5)), + num_classes=5, + ) + feature_pooled = torch.rand(2, box_head_output_size) + predictions = box_predictor(feature_pooled) + + proposal_boxes = torch.tensor([[0.8, 1.1, 3.2, 2.8], [2.3, 2.5, 7, 8]], dtype=torch.float32) + gt_boxes = torch.tensor([[1, 1, 3, 3], [2, 2, 6, 6]], dtype=torch.float32) + proposal = Instances((10, 10)) + proposal.proposal_boxes = Boxes(proposal_boxes) + proposal.gt_boxes = Boxes(gt_boxes) + proposal.gt_classes = torch.tensor([1, 2]) + + with EventStorage(): # capture events in a new storage to discard them + losses = box_predictor.losses(predictions, [proposal]) + + expected_losses = { + "loss_cls": torch.tensor(1.7951188087), + "loss_box_reg": torch.tensor(4.0357131958), + } + for name in expected_losses.keys(): + assert torch.allclose(losses[name], expected_losses[name]) + + def test_fast_rcnn_empty_batch(self, device="cpu"): + box_predictor = FastRCNNOutputLayers( + ShapeSpec(channels=10), + box2box_transform=Box2BoxTransform(weights=(10, 10, 5, 5)), + num_classes=8, + ).to(device=device) + + logits = torch.randn(0, 100, requires_grad=True, device=device) + deltas = torch.randn(0, 4, requires_grad=True, device=device) + losses = box_predictor.losses([logits, deltas], []) + for value in losses.values(): + self.assertTrue(torch.allclose(value, torch.zeros_like(value))) + sum(losses.values()).backward() + self.assertTrue(logits.grad is not None) + self.assertTrue(deltas.grad is not None) + + predictions, _ = box_predictor.inference([logits, deltas], []) + self.assertEqual(len(predictions), 0) + + @unittest.skipIf(not torch.cuda.is_available(), "CUDA not available") + def test_fast_rcnn_empty_batch_cuda(self): + self.test_fast_rcnn_empty_batch(device=torch.device("cuda")) + + def test_fast_rcnn_rotated(self): + torch.manual_seed(132) + box_head_output_size = 8 + + box_predictor = RotatedFastRCNNOutputLayers( + ShapeSpec(channels=box_head_output_size), + box2box_transform=Box2BoxTransformRotated(weights=(10, 10, 5, 5, 1)), + num_classes=5, + ) + feature_pooled = torch.rand(2, box_head_output_size) + predictions = box_predictor(feature_pooled) + proposal_boxes = torch.tensor( + [[2, 1.95, 2.4, 1.7, 0], [4.65, 5.25, 4.7, 5.5, 0]], dtype=torch.float32 + ) + gt_boxes = torch.tensor([[2, 2, 2, 2, 0], [4, 4, 4, 4, 0]], dtype=torch.float32) + proposal = Instances((10, 10)) + proposal.proposal_boxes = RotatedBoxes(proposal_boxes) + proposal.gt_boxes = RotatedBoxes(gt_boxes) + proposal.gt_classes = torch.tensor([1, 2]) + + with EventStorage(): # capture events in a new storage to discard them + losses = box_predictor.losses(predictions, [proposal]) + + # Note: the expected losses are slightly different even if + # the boxes are essentially the same as in the FastRCNNOutput test, because + # bbox_pred in FastRCNNOutputLayers have different Linear layers/initialization + # between the two cases. + expected_losses = { + "loss_cls": torch.tensor(1.7920907736), + "loss_box_reg": torch.tensor(4.0410838127), + } + for name in expected_losses.keys(): + assert torch.allclose(losses[name], expected_losses[name]) + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_model_e2e.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_model_e2e.py new file mode 100644 index 0000000000000000000000000000000000000000..95fe6a09fd15f877544392ddeccd9906025b0fdd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_model_e2e.py @@ -0,0 +1,154 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + + +import unittest +import torch + +import detectron2.model_zoo as model_zoo +from detectron2.config import get_cfg +from detectron2.modeling import build_model +from detectron2.structures import BitMasks, Boxes, ImageList, Instances +from detectron2.utils.events import EventStorage + + +def get_model_zoo(config_path): + """ + Like model_zoo.get, but do not load any weights (even pretrained) + """ + cfg_file = model_zoo.get_config_file(config_path) + cfg = get_cfg() + cfg.merge_from_file(cfg_file) + if not torch.cuda.is_available(): + cfg.MODEL.DEVICE = "cpu" + return build_model(cfg) + + +def create_model_input(img, inst=None): + if inst is not None: + return {"image": img, "instances": inst} + else: + return {"image": img} + + +def get_empty_instance(h, w): + inst = Instances((h, w)) + inst.gt_boxes = Boxes(torch.rand(0, 4)) + inst.gt_classes = torch.tensor([]).to(dtype=torch.int64) + inst.gt_masks = BitMasks(torch.rand(0, h, w)) + return inst + + +def get_regular_bitmask_instances(h, w): + inst = Instances((h, w)) + inst.gt_boxes = Boxes(torch.rand(3, 4)) + inst.gt_boxes.tensor[:, 2:] += inst.gt_boxes.tensor[:, :2] + inst.gt_classes = torch.tensor([3, 4, 5]).to(dtype=torch.int64) + inst.gt_masks = BitMasks((torch.rand(3, h, w) > 0.5)) + return inst + + +class ModelE2ETest: + def setUp(self): + torch.manual_seed(43) + self.model = get_model_zoo(self.CONFIG_PATH) + + def _test_eval(self, input_sizes): + inputs = [create_model_input(torch.rand(3, s[0], s[1])) for s in input_sizes] + self.model.eval() + self.model(inputs) + + def _test_train(self, input_sizes, instances): + assert len(input_sizes) == len(instances) + inputs = [ + create_model_input(torch.rand(3, s[0], s[1]), inst) + for s, inst in zip(input_sizes, instances) + ] + self.model.train() + with EventStorage(): + losses = self.model(inputs) + sum(losses.values()).backward() + del losses + + def _inf_tensor(self, *shape): + return 1.0 / torch.zeros(*shape, device=self.model.device) + + def _nan_tensor(self, *shape): + return torch.zeros(*shape, device=self.model.device).fill_(float("nan")) + + def test_empty_data(self): + instances = [get_empty_instance(200, 250), get_empty_instance(200, 249)] + self._test_eval([(200, 250), (200, 249)]) + self._test_train([(200, 250), (200, 249)], instances) + + @unittest.skipIf(not torch.cuda.is_available(), "CUDA unavailable") + def test_eval_tocpu(self): + model = get_model_zoo(self.CONFIG_PATH).cpu() + model.eval() + input_sizes = [(200, 250), (200, 249)] + inputs = [create_model_input(torch.rand(3, s[0], s[1])) for s in input_sizes] + model(inputs) + + +class MaskRCNNE2ETest(ModelE2ETest, unittest.TestCase): + CONFIG_PATH = "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml" + + def test_half_empty_data(self): + instances = [get_empty_instance(200, 250), get_regular_bitmask_instances(200, 249)] + self._test_train([(200, 250), (200, 249)], instances) + + # This test is flaky because in some environment the output features are zero due to relu + # def test_rpn_inf_nan_data(self): + # self.model.eval() + # for tensor in [self._inf_tensor, self._nan_tensor]: + # images = ImageList(tensor(1, 3, 512, 512), [(510, 510)]) + # features = { + # "p2": tensor(1, 256, 256, 256), + # "p3": tensor(1, 256, 128, 128), + # "p4": tensor(1, 256, 64, 64), + # "p5": tensor(1, 256, 32, 32), + # "p6": tensor(1, 256, 16, 16), + # } + # props, _ = self.model.proposal_generator(images, features) + # self.assertEqual(len(props[0]), 0) + + def test_roiheads_inf_nan_data(self): + self.model.eval() + for tensor in [self._inf_tensor, self._nan_tensor]: + images = ImageList(tensor(1, 3, 512, 512), [(510, 510)]) + features = { + "p2": tensor(1, 256, 256, 256), + "p3": tensor(1, 256, 128, 128), + "p4": tensor(1, 256, 64, 64), + "p5": tensor(1, 256, 32, 32), + "p6": tensor(1, 256, 16, 16), + } + props = [Instances((510, 510))] + props[0].proposal_boxes = Boxes([[10, 10, 20, 20]]).to(device=self.model.device) + props[0].objectness_logits = torch.tensor([1.0]).reshape(1, 1) + det, _ = self.model.roi_heads(images, features, props) + self.assertEqual(len(det[0]), 0) + + +class RetinaNetE2ETest(ModelE2ETest, unittest.TestCase): + CONFIG_PATH = "COCO-Detection/retinanet_R_50_FPN_1x.yaml" + + def test_inf_nan_data(self): + self.model.eval() + self.model.score_threshold = -999999999 + for tensor in [self._inf_tensor, self._nan_tensor]: + images = ImageList(tensor(1, 3, 512, 512), [(510, 510)]) + features = [ + tensor(1, 256, 128, 128), + tensor(1, 256, 64, 64), + tensor(1, 256, 32, 32), + tensor(1, 256, 16, 16), + tensor(1, 256, 8, 8), + ] + anchors = self.model.anchor_generator(features) + box_cls, box_delta = self.model.head(features) + box_cls = [tensor(*k.shape) for k in box_cls] + box_delta = [tensor(*k.shape) for k in box_delta] + det = self.model.inference(box_cls, box_delta, anchors, images.image_sizes) + # all predictions (if any) are infinite or nan + if len(det[0]): + self.assertTrue(torch.isfinite(det[0].pred_boxes.tensor).sum() == 0) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_roi_heads.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_roi_heads.py new file mode 100644 index 0000000000000000000000000000000000000000..5a0630353ca1c2fbb33d2dee7ddb922d57cad3cd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_roi_heads.py @@ -0,0 +1,108 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import unittest +import torch + +from detectron2.config import get_cfg +from detectron2.modeling.backbone import build_backbone +from detectron2.modeling.proposal_generator.build import build_proposal_generator +from detectron2.modeling.roi_heads import build_roi_heads +from detectron2.structures import Boxes, ImageList, Instances, RotatedBoxes +from detectron2.utils.events import EventStorage + +logger = logging.getLogger(__name__) + + +class ROIHeadsTest(unittest.TestCase): + def test_roi_heads(self): + torch.manual_seed(121) + cfg = get_cfg() + cfg.MODEL.ROI_HEADS.NAME = "StandardROIHeads" + cfg.MODEL.ROI_BOX_HEAD.NAME = "FastRCNNConvFCHead" + cfg.MODEL.ROI_BOX_HEAD.NUM_FC = 2 + cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE = "ROIAlignV2" + cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS = (10, 10, 5, 5) + backbone = build_backbone(cfg) + num_images = 2 + images_tensor = torch.rand(num_images, 20, 30) + image_sizes = [(10, 10), (20, 30)] + images = ImageList(images_tensor, image_sizes) + num_channels = 1024 + features = {"res4": torch.rand(num_images, num_channels, 1, 2)} + + image_shape = (15, 15) + gt_boxes0 = torch.tensor([[1, 1, 3, 3], [2, 2, 6, 6]], dtype=torch.float32) + gt_instance0 = Instances(image_shape) + gt_instance0.gt_boxes = Boxes(gt_boxes0) + gt_instance0.gt_classes = torch.tensor([2, 1]) + gt_boxes1 = torch.tensor([[1, 5, 2, 8], [7, 3, 10, 5]], dtype=torch.float32) + gt_instance1 = Instances(image_shape) + gt_instance1.gt_boxes = Boxes(gt_boxes1) + gt_instance1.gt_classes = torch.tensor([1, 2]) + gt_instances = [gt_instance0, gt_instance1] + + proposal_generator = build_proposal_generator(cfg, backbone.output_shape()) + roi_heads = build_roi_heads(cfg, backbone.output_shape()) + + with EventStorage(): # capture events in a new storage to discard them + proposals, proposal_losses = proposal_generator(images, features, gt_instances) + _, detector_losses = roi_heads(images, features, proposals, gt_instances) + + expected_losses = { + "loss_cls": torch.tensor(4.4236516953), + "loss_box_reg": torch.tensor(0.0091214813), + } + for name in expected_losses.keys(): + self.assertTrue(torch.allclose(detector_losses[name], expected_losses[name])) + + def test_rroi_heads(self): + torch.manual_seed(121) + cfg = get_cfg() + cfg.MODEL.PROPOSAL_GENERATOR.NAME = "RRPN" + cfg.MODEL.ANCHOR_GENERATOR.NAME = "RotatedAnchorGenerator" + cfg.MODEL.ROI_HEADS.NAME = "RROIHeads" + cfg.MODEL.ROI_BOX_HEAD.NAME = "FastRCNNConvFCHead" + cfg.MODEL.ROI_BOX_HEAD.NUM_FC = 2 + cfg.MODEL.RPN.BBOX_REG_WEIGHTS = (1, 1, 1, 1, 1) + cfg.MODEL.RPN.HEAD_NAME = "StandardRPNHead" + cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE = "ROIAlignRotated" + cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS = (10, 10, 5, 5, 1) + backbone = build_backbone(cfg) + num_images = 2 + images_tensor = torch.rand(num_images, 20, 30) + image_sizes = [(10, 10), (20, 30)] + images = ImageList(images_tensor, image_sizes) + num_channels = 1024 + features = {"res4": torch.rand(num_images, num_channels, 1, 2)} + + image_shape = (15, 15) + gt_boxes0 = torch.tensor([[2, 2, 2, 2, 30], [4, 4, 4, 4, 0]], dtype=torch.float32) + gt_instance0 = Instances(image_shape) + gt_instance0.gt_boxes = RotatedBoxes(gt_boxes0) + gt_instance0.gt_classes = torch.tensor([2, 1]) + gt_boxes1 = torch.tensor([[1.5, 5.5, 1, 3, 0], [8.5, 4, 3, 2, -50]], dtype=torch.float32) + gt_instance1 = Instances(image_shape) + gt_instance1.gt_boxes = RotatedBoxes(gt_boxes1) + gt_instance1.gt_classes = torch.tensor([1, 2]) + gt_instances = [gt_instance0, gt_instance1] + + proposal_generator = build_proposal_generator(cfg, backbone.output_shape()) + roi_heads = build_roi_heads(cfg, backbone.output_shape()) + + with EventStorage(): # capture events in a new storage to discard them + proposals, proposal_losses = proposal_generator(images, features, gt_instances) + _, detector_losses = roi_heads(images, features, proposals, gt_instances) + + expected_losses = { + "loss_cls": torch.tensor(4.381618499755859), + "loss_box_reg": torch.tensor(0.0011829272843897343), + } + for name in expected_losses.keys(): + err_msg = "detector_losses[{}] = {}, expected losses = {}".format( + name, detector_losses[name], expected_losses[name] + ) + self.assertTrue(torch.allclose(detector_losses[name], expected_losses[name]), err_msg) + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_roi_pooler.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_roi_pooler.py new file mode 100644 index 0000000000000000000000000000000000000000..9aa3825c0196e4a6d89162e3d7c797e3d77b23bd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_roi_pooler.py @@ -0,0 +1,85 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import unittest +import torch + +from detectron2.modeling.poolers import ROIPooler +from detectron2.structures import Boxes, RotatedBoxes + +logger = logging.getLogger(__name__) + + +class TestROIPooler(unittest.TestCase): + def _rand_boxes(self, num_boxes, x_max, y_max): + coords = torch.rand(num_boxes, 4) + coords[:, 0] *= x_max + coords[:, 1] *= y_max + coords[:, 2] *= x_max + coords[:, 3] *= y_max + boxes = torch.zeros(num_boxes, 4) + boxes[:, 0] = torch.min(coords[:, 0], coords[:, 2]) + boxes[:, 1] = torch.min(coords[:, 1], coords[:, 3]) + boxes[:, 2] = torch.max(coords[:, 0], coords[:, 2]) + boxes[:, 3] = torch.max(coords[:, 1], coords[:, 3]) + return boxes + + def _test_roialignv2_roialignrotated_match(self, device): + pooler_resolution = 14 + canonical_level = 4 + canonical_scale_factor = 2 ** canonical_level + pooler_scales = (1.0 / canonical_scale_factor,) + sampling_ratio = 0 + + N, C, H, W = 2, 4, 10, 8 + N_rois = 10 + std = 11 + mean = 0 + feature = (torch.rand(N, C, H, W) - 0.5) * 2 * std + mean + + features = [feature.to(device)] + + rois = [] + rois_rotated = [] + for _ in range(N): + boxes = self._rand_boxes( + num_boxes=N_rois, x_max=W * canonical_scale_factor, y_max=H * canonical_scale_factor + ) + + rotated_boxes = torch.zeros(N_rois, 5) + rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0 + rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0 + rotated_boxes[:, 2] = boxes[:, 2] - boxes[:, 0] + rotated_boxes[:, 3] = boxes[:, 3] - boxes[:, 1] + rois.append(Boxes(boxes).to(device)) + rois_rotated.append(RotatedBoxes(rotated_boxes).to(device)) + + roialignv2_pooler = ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type="ROIAlignV2", + ) + + roialignv2_out = roialignv2_pooler(features, rois) + + roialignrotated_pooler = ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type="ROIAlignRotated", + ) + + roialignrotated_out = roialignrotated_pooler(features, rois_rotated) + + self.assertTrue(torch.allclose(roialignv2_out, roialignrotated_out, atol=1e-4)) + + def test_roialignv2_roialignrotated_match_cpu(self): + self._test_roialignv2_roialignrotated_match(device="cpu") + + @unittest.skipIf(not torch.cuda.is_available(), "CUDA not available") + def test_roialignv2_roialignrotated_match_cuda(self): + self._test_roialignv2_roialignrotated_match(device="cuda") + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_rpn.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_rpn.py new file mode 100644 index 0000000000000000000000000000000000000000..967d2102b85f2d66e3f0b32b31805c4ac01afa0c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/modeling/test_rpn.py @@ -0,0 +1,234 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import unittest +import torch + +from detectron2.config import get_cfg +from detectron2.modeling.backbone import build_backbone +from detectron2.modeling.proposal_generator.build import build_proposal_generator +from detectron2.modeling.proposal_generator.rpn_outputs import find_top_rpn_proposals +from detectron2.structures import Boxes, ImageList, Instances, RotatedBoxes +from detectron2.utils.events import EventStorage + +logger = logging.getLogger(__name__) + + +class RPNTest(unittest.TestCase): + def test_rpn(self): + torch.manual_seed(121) + cfg = get_cfg() + cfg.MODEL.PROPOSAL_GENERATOR.NAME = "RPN" + cfg.MODEL.ANCHOR_GENERATOR.NAME = "DefaultAnchorGenerator" + cfg.MODEL.RPN.BBOX_REG_WEIGHTS = (1, 1, 1, 1) + backbone = build_backbone(cfg) + proposal_generator = build_proposal_generator(cfg, backbone.output_shape()) + num_images = 2 + images_tensor = torch.rand(num_images, 20, 30) + image_sizes = [(10, 10), (20, 30)] + images = ImageList(images_tensor, image_sizes) + image_shape = (15, 15) + num_channels = 1024 + features = {"res4": torch.rand(num_images, num_channels, 1, 2)} + gt_boxes = torch.tensor([[1, 1, 3, 3], [2, 2, 6, 6]], dtype=torch.float32) + gt_instances = Instances(image_shape) + gt_instances.gt_boxes = Boxes(gt_boxes) + with EventStorage(): # capture events in a new storage to discard them + proposals, proposal_losses = proposal_generator( + images, features, [gt_instances[0], gt_instances[1]] + ) + + expected_losses = { + "loss_rpn_cls": torch.tensor(0.0804563984), + "loss_rpn_loc": torch.tensor(0.0990132466), + } + for name in expected_losses.keys(): + err_msg = "proposal_losses[{}] = {}, expected losses = {}".format( + name, proposal_losses[name], expected_losses[name] + ) + self.assertTrue(torch.allclose(proposal_losses[name], expected_losses[name]), err_msg) + + expected_proposal_boxes = [ + Boxes(torch.tensor([[0, 0, 10, 10], [7.3365392685, 0, 10, 10]])), + Boxes( + torch.tensor( + [ + [0, 0, 30, 20], + [0, 0, 16.7862777710, 13.1362524033], + [0, 0, 30, 13.3173446655], + [0, 0, 10.8602609634, 20], + [7.7165775299, 0, 27.3875980377, 20], + ] + ) + ), + ] + + expected_objectness_logits = [ + torch.tensor([0.1225359365, -0.0133192837]), + torch.tensor([0.1415634006, 0.0989848152, 0.0565387346, -0.0072308783, -0.0428492837]), + ] + + for proposal, expected_proposal_box, im_size, expected_objectness_logit in zip( + proposals, expected_proposal_boxes, image_sizes, expected_objectness_logits + ): + self.assertEqual(len(proposal), len(expected_proposal_box)) + self.assertEqual(proposal.image_size, im_size) + self.assertTrue( + torch.allclose(proposal.proposal_boxes.tensor, expected_proposal_box.tensor) + ) + self.assertTrue(torch.allclose(proposal.objectness_logits, expected_objectness_logit)) + + def test_rrpn(self): + torch.manual_seed(121) + cfg = get_cfg() + cfg.MODEL.PROPOSAL_GENERATOR.NAME = "RRPN" + cfg.MODEL.ANCHOR_GENERATOR.NAME = "RotatedAnchorGenerator" + cfg.MODEL.ANCHOR_GENERATOR.SIZES = [[32, 64]] + cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [[0.25, 1]] + cfg.MODEL.ANCHOR_GENERATOR.ANGLES = [[0, 60]] + cfg.MODEL.RPN.BBOX_REG_WEIGHTS = (1, 1, 1, 1, 1) + cfg.MODEL.RPN.HEAD_NAME = "StandardRPNHead" + backbone = build_backbone(cfg) + proposal_generator = build_proposal_generator(cfg, backbone.output_shape()) + num_images = 2 + images_tensor = torch.rand(num_images, 20, 30) + image_sizes = [(10, 10), (20, 30)] + images = ImageList(images_tensor, image_sizes) + image_shape = (15, 15) + num_channels = 1024 + features = {"res4": torch.rand(num_images, num_channels, 1, 2)} + gt_boxes = torch.tensor([[2, 2, 2, 2, 0], [4, 4, 4, 4, 0]], dtype=torch.float32) + gt_instances = Instances(image_shape) + gt_instances.gt_boxes = RotatedBoxes(gt_boxes) + with EventStorage(): # capture events in a new storage to discard them + proposals, proposal_losses = proposal_generator( + images, features, [gt_instances[0], gt_instances[1]] + ) + + expected_losses = { + "loss_rpn_cls": torch.tensor(0.043263837695121765), + "loss_rpn_loc": torch.tensor(0.14432406425476074), + } + for name in expected_losses.keys(): + err_msg = "proposal_losses[{}] = {}, expected losses = {}".format( + name, proposal_losses[name], expected_losses[name] + ) + self.assertTrue(torch.allclose(proposal_losses[name], expected_losses[name]), err_msg) + + expected_proposal_boxes = [ + RotatedBoxes( + torch.tensor( + [ + [0.60189795, 1.24095452, 61.98131943, 18.03621292, -4.07244873], + [15.64940453, 1.69624567, 59.59749603, 16.34339333, 2.62692475], + [-3.02982378, -2.69752932, 67.90952301, 59.62455750, 59.97010040], + [16.71863365, 1.98309708, 35.61507797, 32.81484985, 62.92267227], + [0.49432933, -7.92979717, 67.77606201, 62.93098450, -1.85656738], + [8.00880814, 1.36017394, 121.81007385, 32.74150467, 50.44297409], + [16.44299889, -4.82221127, 63.39775848, 61.22503662, 54.12270737], + [5.00000000, 5.00000000, 10.00000000, 10.00000000, -0.76943970], + [17.64130402, -0.98095351, 61.40377808, 16.28918839, 55.53118134], + [0.13016054, 4.60568953, 35.80157471, 32.30180359, 62.52872086], + [-4.26460743, 0.39604485, 124.30079651, 31.84611320, -1.58203125], + [7.52815342, -0.91636634, 62.39784622, 15.45565224, 60.79549789], + ] + ) + ), + RotatedBoxes( + torch.tensor( + [ + [0.07734215, 0.81635046, 65.33510590, 17.34688377, -1.51821899], + [-3.41833067, -3.11320257, 64.17595673, 60.55617905, 58.27033234], + [20.67383385, -6.16561556, 63.60531998, 62.52315903, 54.85546494], + [15.00000000, 10.00000000, 30.00000000, 20.00000000, -0.18218994], + [9.22646523, -6.84775209, 62.09895706, 65.46472931, -2.74307251], + [15.00000000, 4.93451595, 30.00000000, 9.86903191, -0.60272217], + [8.88342094, 2.65560246, 120.95362854, 32.45022202, 55.75970078], + [16.39088631, 2.33887148, 34.78761292, 35.61492920, 60.81977463], + [9.78298569, 10.00000000, 19.56597137, 20.00000000, -0.86660767], + [1.28576660, 5.49873352, 34.93610382, 33.22600174, 60.51599884], + [17.58912468, -1.63270092, 62.96052551, 16.45713997, 52.91245270], + [5.64749718, -1.90428460, 62.37649155, 16.19474792, 61.09543991], + [0.82255805, 2.34931135, 118.83985901, 32.83671188, 56.50753784], + [-5.33874989, 1.64404404, 125.28501892, 33.35424042, -2.80731201], + ] + ) + ), + ] + + expected_objectness_logits = [ + torch.tensor( + [ + 0.10111768, + 0.09112845, + 0.08466332, + 0.07589971, + 0.06650183, + 0.06350251, + 0.04299347, + 0.01864817, + 0.00986163, + 0.00078543, + -0.04573630, + -0.04799230, + ] + ), + torch.tensor( + [ + 0.11373727, + 0.09377633, + 0.05281663, + 0.05143715, + 0.04040275, + 0.03250912, + 0.01307789, + 0.01177734, + 0.00038105, + -0.00540255, + -0.01194804, + -0.01461012, + -0.03061717, + -0.03599222, + ] + ), + ] + + torch.set_printoptions(precision=8, sci_mode=False) + + for proposal, expected_proposal_box, im_size, expected_objectness_logit in zip( + proposals, expected_proposal_boxes, image_sizes, expected_objectness_logits + ): + self.assertEqual(len(proposal), len(expected_proposal_box)) + self.assertEqual(proposal.image_size, im_size) + # It seems that there's some randomness in the result across different machines: + # This test can be run on a local machine for 100 times with exactly the same result, + # However, a different machine might produce slightly different results, + # thus the atol here. + err_msg = "computed proposal boxes = {}, expected {}".format( + proposal.proposal_boxes.tensor, expected_proposal_box.tensor + ) + self.assertTrue( + torch.allclose( + proposal.proposal_boxes.tensor, expected_proposal_box.tensor, atol=1e-5 + ), + err_msg, + ) + + err_msg = "computed objectness logits = {}, expected {}".format( + proposal.objectness_logits, expected_objectness_logit + ) + self.assertTrue( + torch.allclose(proposal.objectness_logits, expected_objectness_logit, atol=1e-5), + err_msg, + ) + + def test_rpn_proposals_inf(self): + N, Hi, Wi, A = 3, 3, 3, 3 + proposals = [torch.rand(N, Hi * Wi * A, 4)] + pred_logits = [torch.rand(N, Hi * Wi * A)] + pred_logits[0][1][3:5].fill_(float("inf")) + images = ImageList.from_tensors([torch.rand(3, 10, 10)] * 3) + find_top_rpn_proposals(proposals, pred_logits, images, 0.5, 1000, 1000, 0, False) + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/test_boxes.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/test_boxes.py new file mode 100644 index 0000000000000000000000000000000000000000..4d33c3bf9b7471c7e4382bc9e66c26e1fb60e29f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/test_boxes.py @@ -0,0 +1,182 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import json +import math +import numpy as np +import unittest +import torch + +from detectron2.structures import Boxes, BoxMode, pairwise_iou + + +class TestBoxMode(unittest.TestCase): + def _convert_xy_to_wh(self, x): + return BoxMode.convert(x, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS) + + def _convert_xywha_to_xyxy(self, x): + return BoxMode.convert(x, BoxMode.XYWHA_ABS, BoxMode.XYXY_ABS) + + def _convert_xywh_to_xywha(self, x): + return BoxMode.convert(x, BoxMode.XYWH_ABS, BoxMode.XYWHA_ABS) + + def test_box_convert_list(self): + for tp in [list, tuple]: + box = tp([5.0, 5.0, 10.0, 10.0]) + output = self._convert_xy_to_wh(box) + self.assertIsInstance(output, tp) + self.assertIsInstance(output[0], float) + self.assertEqual(output, tp([5.0, 5.0, 5.0, 5.0])) + + with self.assertRaises(Exception): + self._convert_xy_to_wh([box]) + + def test_box_convert_array(self): + box = np.asarray([[5, 5, 10, 10], [1, 1, 2, 3]]) + output = self._convert_xy_to_wh(box) + self.assertEqual(output.dtype, box.dtype) + self.assertEqual(output.shape, box.shape) + self.assertTrue((output[0] == [5, 5, 5, 5]).all()) + self.assertTrue((output[1] == [1, 1, 1, 2]).all()) + + def test_box_convert_cpu_tensor(self): + box = torch.tensor([[5, 5, 10, 10], [1, 1, 2, 3]]) + output = self._convert_xy_to_wh(box) + self.assertEqual(output.dtype, box.dtype) + self.assertEqual(output.shape, box.shape) + output = output.numpy() + self.assertTrue((output[0] == [5, 5, 5, 5]).all()) + self.assertTrue((output[1] == [1, 1, 1, 2]).all()) + + @unittest.skipIf(not torch.cuda.is_available(), "CUDA not available") + def test_box_convert_cuda_tensor(self): + box = torch.tensor([[5, 5, 10, 10], [1, 1, 2, 3]]).cuda() + output = self._convert_xy_to_wh(box) + self.assertEqual(output.dtype, box.dtype) + self.assertEqual(output.shape, box.shape) + self.assertEqual(output.device, box.device) + output = output.cpu().numpy() + self.assertTrue((output[0] == [5, 5, 5, 5]).all()) + self.assertTrue((output[1] == [1, 1, 1, 2]).all()) + + def test_box_convert_xywha_to_xyxy_list(self): + for tp in [list, tuple]: + box = tp([50, 50, 30, 20, 0]) + output = self._convert_xywha_to_xyxy(box) + self.assertIsInstance(output, tp) + self.assertEqual(output, tp([35, 40, 65, 60])) + + with self.assertRaises(Exception): + self._convert_xywha_to_xyxy([box]) + + def test_box_convert_xywha_to_xyxy_array(self): + for dtype in [np.float64, np.float32]: + box = np.asarray( + [ + [50, 50, 30, 20, 0], + [50, 50, 30, 20, 90], + [1, 1, math.sqrt(2), math.sqrt(2), -45], + ], + dtype=dtype, + ) + output = self._convert_xywha_to_xyxy(box) + self.assertEqual(output.dtype, box.dtype) + expected = np.asarray([[35, 40, 65, 60], [40, 35, 60, 65], [0, 0, 2, 2]], dtype=dtype) + self.assertTrue(np.allclose(output, expected, atol=1e-6), "output={}".format(output)) + + def test_box_convert_xywha_to_xyxy_tensor(self): + for dtype in [torch.float32, torch.float64]: + box = torch.tensor( + [ + [50, 50, 30, 20, 0], + [50, 50, 30, 20, 90], + [1, 1, math.sqrt(2), math.sqrt(2), -45], + ], + dtype=dtype, + ) + output = self._convert_xywha_to_xyxy(box) + self.assertEqual(output.dtype, box.dtype) + expected = torch.tensor([[35, 40, 65, 60], [40, 35, 60, 65], [0, 0, 2, 2]], dtype=dtype) + + self.assertTrue(torch.allclose(output, expected, atol=1e-6), "output={}".format(output)) + + def test_box_convert_xywh_to_xywha_list(self): + for tp in [list, tuple]: + box = tp([50, 50, 30, 20]) + output = self._convert_xywh_to_xywha(box) + self.assertIsInstance(output, tp) + self.assertEqual(output, tp([65, 60, 30, 20, 0])) + + with self.assertRaises(Exception): + self._convert_xywh_to_xywha([box]) + + def test_box_convert_xywh_to_xywha_array(self): + for dtype in [np.float64, np.float32]: + box = np.asarray([[30, 40, 70, 60], [30, 40, 60, 70], [-1, -1, 2, 2]], dtype=dtype) + output = self._convert_xywh_to_xywha(box) + self.assertEqual(output.dtype, box.dtype) + expected = np.asarray( + [[65, 70, 70, 60, 0], [60, 75, 60, 70, 0], [0, 0, 2, 2, 0]], dtype=dtype + ) + self.assertTrue(np.allclose(output, expected, atol=1e-6), "output={}".format(output)) + + def test_box_convert_xywh_to_xywha_tensor(self): + for dtype in [torch.float32, torch.float64]: + box = torch.tensor([[30, 40, 70, 60], [30, 40, 60, 70], [-1, -1, 2, 2]], dtype=dtype) + output = self._convert_xywh_to_xywha(box) + self.assertEqual(output.dtype, box.dtype) + expected = torch.tensor( + [[65, 70, 70, 60, 0], [60, 75, 60, 70, 0], [0, 0, 2, 2, 0]], dtype=dtype + ) + + self.assertTrue(torch.allclose(output, expected, atol=1e-6), "output={}".format(output)) + + def test_json_serializable(self): + payload = {"box_mode": BoxMode.XYWH_REL} + try: + json.dumps(payload) + except Exception: + self.fail("JSON serialization failed") + + def test_json_deserializable(self): + payload = '{"box_mode": 2}' + obj = json.loads(payload) + try: + obj["box_mode"] = BoxMode(obj["box_mode"]) + except Exception: + self.fail("JSON deserialization failed") + + +class TestBoxIOU(unittest.TestCase): + def test_pairwise_iou(self): + boxes1 = torch.tensor([[0.0, 0.0, 1.0, 1.0], [0.0, 0.0, 1.0, 1.0]]) + + boxes2 = torch.tensor( + [ + [0.0, 0.0, 1.0, 1.0], + [0.0, 0.0, 0.5, 1.0], + [0.0, 0.0, 1.0, 0.5], + [0.0, 0.0, 0.5, 0.5], + [0.5, 0.5, 1.0, 1.0], + [0.5, 0.5, 1.5, 1.5], + ] + ) + + expected_ious = torch.tensor( + [ + [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)], + [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)], + ] + ) + + ious = pairwise_iou(Boxes(boxes1), Boxes(boxes2)) + + self.assertTrue(torch.allclose(ious, expected_ious)) + + +class TestBoxes(unittest.TestCase): + def test_empty_cat(self): + x = Boxes.cat([]) + self.assertTrue(x.tensor.shape, (0, 4)) + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/test_imagelist.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/test_imagelist.py new file mode 100644 index 0000000000000000000000000000000000000000..abeb35569ddc34a618735f4989dfbfae23d47bc1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/test_imagelist.py @@ -0,0 +1,38 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import unittest +from typing import Sequence +import torch + +from detectron2.structures import ImageList + + +class TestImageList(unittest.TestCase): + def test_imagelist_padding_shape(self): + class TensorToImageList(torch.nn.Module): + def forward(self, tensors: Sequence[torch.Tensor]): + return ImageList.from_tensors(tensors, 4).tensor + + func = torch.jit.trace( + TensorToImageList(), ([torch.ones((3, 10, 10), dtype=torch.float32)],) + ) + ret = func([torch.ones((3, 15, 20), dtype=torch.float32)]) + self.assertEqual(list(ret.shape), [1, 3, 16, 20], str(ret.shape)) + + func = torch.jit.trace( + TensorToImageList(), + ( + [ + torch.ones((3, 16, 10), dtype=torch.float32), + torch.ones((3, 13, 11), dtype=torch.float32), + ], + ), + ) + ret = func( + [ + torch.ones((3, 25, 20), dtype=torch.float32), + torch.ones((3, 10, 10), dtype=torch.float32), + ] + ) + # does not support calling with different #images + self.assertEqual(list(ret.shape), [2, 3, 28, 20], str(ret.shape)) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/test_instances.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/test_instances.py new file mode 100644 index 0000000000000000000000000000000000000000..79c5249217633d3f144d02f14d11f32d1d4be7c9 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/test_instances.py @@ -0,0 +1,25 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import unittest +import torch + +from detectron2.structures import Instances + + +class TestInstancesIndexing(unittest.TestCase): + def test_int_indexing(self): + attr1 = torch.tensor([[0.0, 0.0, 1.0], [0.0, 0.0, 0.5], [0.0, 0.0, 1.0], [0.0, 0.5, 0.5]]) + attr2 = torch.tensor([0.1, 0.2, 0.3, 0.4]) + instances = Instances((100, 100)) + instances.attr1 = attr1 + instances.attr2 = attr2 + for i in range(-len(instances), len(instances)): + inst = instances[i] + self.assertEqual((inst.attr1 == attr1[i]).all(), True) + self.assertEqual((inst.attr2 == attr2[i]).all(), True) + + self.assertRaises(IndexError, lambda: instances[len(instances)]) + self.assertRaises(IndexError, lambda: instances[-len(instances) - 1]) + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/test_rotated_boxes.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/test_rotated_boxes.py new file mode 100644 index 0000000000000000000000000000000000000000..575ac480e39d7406e55f4ff45b867e6f5c3796a0 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/structures/test_rotated_boxes.py @@ -0,0 +1,357 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +from __future__ import absolute_import, division, print_function, unicode_literals +import logging +import math +import random +import unittest +import torch +from fvcore.common.benchmark import benchmark + +from detectron2.layers.rotated_boxes import pairwise_iou_rotated +from detectron2.structures.boxes import Boxes +from detectron2.structures.rotated_boxes import RotatedBoxes, pairwise_iou + +logger = logging.getLogger(__name__) + + +class TestRotatedBoxesLayer(unittest.TestCase): + def test_iou_0_dim_cpu(self): + boxes1 = torch.rand(0, 5, dtype=torch.float32) + boxes2 = torch.rand(10, 5, dtype=torch.float32) + expected_ious = torch.zeros(0, 10, dtype=torch.float32) + ious = pairwise_iou_rotated(boxes1, boxes2) + self.assertTrue(torch.allclose(ious, expected_ious)) + + boxes1 = torch.rand(10, 5, dtype=torch.float32) + boxes2 = torch.rand(0, 5, dtype=torch.float32) + expected_ious = torch.zeros(10, 0, dtype=torch.float32) + ious = pairwise_iou_rotated(boxes1, boxes2) + self.assertTrue(torch.allclose(ious, expected_ious)) + + @unittest.skipIf(not torch.cuda.is_available(), "CUDA not available") + def test_iou_0_dim_cuda(self): + boxes1 = torch.rand(0, 5, dtype=torch.float32) + boxes2 = torch.rand(10, 5, dtype=torch.float32) + expected_ious = torch.zeros(0, 10, dtype=torch.float32) + ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda()) + self.assertTrue(torch.allclose(ious_cuda.cpu(), expected_ious)) + + boxes1 = torch.rand(10, 5, dtype=torch.float32) + boxes2 = torch.rand(0, 5, dtype=torch.float32) + expected_ious = torch.zeros(10, 0, dtype=torch.float32) + ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda()) + self.assertTrue(torch.allclose(ious_cuda.cpu(), expected_ious)) + + def test_iou_half_overlap_cpu(self): + boxes1 = torch.tensor([[0.5, 0.5, 1.0, 1.0, 0.0]], dtype=torch.float32) + boxes2 = torch.tensor([[0.25, 0.5, 0.5, 1.0, 0.0]], dtype=torch.float32) + expected_ious = torch.tensor([[0.5]], dtype=torch.float32) + ious = pairwise_iou_rotated(boxes1, boxes2) + self.assertTrue(torch.allclose(ious, expected_ious)) + + @unittest.skipIf(not torch.cuda.is_available(), "CUDA not available") + def test_iou_half_overlap_cuda(self): + boxes1 = torch.tensor([[0.5, 0.5, 1.0, 1.0, 0.0]], dtype=torch.float32) + boxes2 = torch.tensor([[0.25, 0.5, 0.5, 1.0, 0.0]], dtype=torch.float32) + expected_ious = torch.tensor([[0.5]], dtype=torch.float32) + ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda()) + self.assertTrue(torch.allclose(ious_cuda.cpu(), expected_ious)) + + def test_iou_precision(self): + for device in ["cpu"] + ["cuda"] if torch.cuda.is_available() else []: + boxes1 = torch.tensor([[565, 565, 10, 10.0, 0]], dtype=torch.float32, device=device) + boxes2 = torch.tensor([[565, 565, 10, 8.3, 0]], dtype=torch.float32, device=device) + iou = 8.3 / 10.0 + expected_ious = torch.tensor([[iou]], dtype=torch.float32) + ious = pairwise_iou_rotated(boxes1, boxes2) + self.assertTrue(torch.allclose(ious.cpu(), expected_ious)) + + @unittest.skipIf(not torch.cuda.is_available(), "CUDA not available") + def test_iou_too_many_boxes_cuda(self): + s1, s2 = 5, 1289035 + boxes1 = torch.zeros(s1, 5) + boxes2 = torch.zeros(s2, 5) + ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda()) + self.assertTupleEqual(tuple(ious_cuda.shape), (s1, s2)) + + def test_iou_extreme(self): + # Cause floating point issues in cuda kernels (#1266) + for device in ["cpu"] + ["cuda"] if torch.cuda.is_available() else []: + boxes1 = torch.tensor([[160.0, 153.0, 230.0, 23.0, -37.0]], device=device) + boxes2 = torch.tensor( + [ + [ + -1.117407639806935e17, + 1.3858420478349148e18, + 1000.0000610351562, + 1000.0000610351562, + 1612.0, + ] + ], + device=device, + ) + ious = pairwise_iou_rotated(boxes1, boxes2) + self.assertTrue(ious.min() >= 0, ious) + + +class TestRotatedBoxesStructure(unittest.TestCase): + def test_clip_area_0_degree(self): + for _ in range(50): + num_boxes = 100 + boxes_5d = torch.zeros(num_boxes, 5) + boxes_5d[:, 0] = torch.FloatTensor(num_boxes).uniform_(-100, 500) + boxes_5d[:, 1] = torch.FloatTensor(num_boxes).uniform_(-100, 500) + boxes_5d[:, 2] = torch.FloatTensor(num_boxes).uniform_(0, 500) + boxes_5d[:, 3] = torch.FloatTensor(num_boxes).uniform_(0, 500) + # Convert from (x_ctr, y_ctr, w, h, 0) to (x1, y1, x2, y2) + boxes_4d = torch.zeros(num_boxes, 4) + boxes_4d[:, 0] = boxes_5d[:, 0] - boxes_5d[:, 2] / 2.0 + boxes_4d[:, 1] = boxes_5d[:, 1] - boxes_5d[:, 3] / 2.0 + boxes_4d[:, 2] = boxes_5d[:, 0] + boxes_5d[:, 2] / 2.0 + boxes_4d[:, 3] = boxes_5d[:, 1] + boxes_5d[:, 3] / 2.0 + + image_size = (500, 600) + test_boxes_4d = Boxes(boxes_4d) + test_boxes_5d = RotatedBoxes(boxes_5d) + # Before clip + areas_4d = test_boxes_4d.area() + areas_5d = test_boxes_5d.area() + self.assertTrue(torch.allclose(areas_4d, areas_5d, atol=1e-1, rtol=1e-5)) + # After clip + test_boxes_4d.clip(image_size) + test_boxes_5d.clip(image_size) + areas_4d = test_boxes_4d.area() + areas_5d = test_boxes_5d.area() + self.assertTrue(torch.allclose(areas_4d, areas_5d, atol=1e-1, rtol=1e-5)) + + def test_clip_area_arbitrary_angle(self): + num_boxes = 100 + boxes_5d = torch.zeros(num_boxes, 5) + boxes_5d[:, 0] = torch.FloatTensor(num_boxes).uniform_(-100, 500) + boxes_5d[:, 1] = torch.FloatTensor(num_boxes).uniform_(-100, 500) + boxes_5d[:, 2] = torch.FloatTensor(num_boxes).uniform_(0, 500) + boxes_5d[:, 3] = torch.FloatTensor(num_boxes).uniform_(0, 500) + boxes_5d[:, 4] = torch.FloatTensor(num_boxes).uniform_(-1800, 1800) + clip_angle_threshold = random.uniform(0, 180) + + image_size = (500, 600) + test_boxes_5d = RotatedBoxes(boxes_5d) + # Before clip + areas_before = test_boxes_5d.area() + # After clip + test_boxes_5d.clip(image_size, clip_angle_threshold) + areas_diff = test_boxes_5d.area() - areas_before + + # the areas should only decrease after clipping + self.assertTrue(torch.all(areas_diff <= 0)) + # whenever the box is clipped (thus the area shrinks), + # the angle for the box must be within the clip_angle_threshold + # Note that the clip function will normalize the angle range + # to be within (-180, 180] + self.assertTrue( + torch.all(torch.abs(boxes_5d[:, 4][torch.where(areas_diff < 0)]) < clip_angle_threshold) + ) + + def test_normalize_angles(self): + # torch.manual_seed(0) + for _ in range(50): + num_boxes = 100 + boxes_5d = torch.zeros(num_boxes, 5) + boxes_5d[:, 0] = torch.FloatTensor(num_boxes).uniform_(-100, 500) + boxes_5d[:, 1] = torch.FloatTensor(num_boxes).uniform_(-100, 500) + boxes_5d[:, 2] = torch.FloatTensor(num_boxes).uniform_(0, 500) + boxes_5d[:, 3] = torch.FloatTensor(num_boxes).uniform_(0, 500) + boxes_5d[:, 4] = torch.FloatTensor(num_boxes).uniform_(-1800, 1800) + rotated_boxes = RotatedBoxes(boxes_5d) + normalized_boxes = rotated_boxes.clone() + normalized_boxes.normalize_angles() + self.assertTrue(torch.all(normalized_boxes.tensor[:, 4] >= -180)) + self.assertTrue(torch.all(normalized_boxes.tensor[:, 4] < 180)) + # x, y, w, h should not change + self.assertTrue(torch.allclose(boxes_5d[:, :4], normalized_boxes.tensor[:, :4])) + # the cos/sin values of the angles should stay the same + + self.assertTrue( + torch.allclose( + torch.cos(boxes_5d[:, 4] * math.pi / 180), + torch.cos(normalized_boxes.tensor[:, 4] * math.pi / 180), + atol=1e-5, + ) + ) + + self.assertTrue( + torch.allclose( + torch.sin(boxes_5d[:, 4] * math.pi / 180), + torch.sin(normalized_boxes.tensor[:, 4] * math.pi / 180), + atol=1e-5, + ) + ) + + def test_pairwise_iou_0_degree(self): + for device in ["cpu"] + ["cuda"] if torch.cuda.is_available() else []: + boxes1 = torch.tensor( + [[0.5, 0.5, 1.0, 1.0, 0.0], [0.5, 0.5, 1.0, 1.0, 0.0]], + dtype=torch.float32, + device=device, + ) + boxes2 = torch.tensor( + [ + [0.5, 0.5, 1.0, 1.0, 0.0], + [0.25, 0.5, 0.5, 1.0, 0.0], + [0.5, 0.25, 1.0, 0.5, 0.0], + [0.25, 0.25, 0.5, 0.5, 0.0], + [0.75, 0.75, 0.5, 0.5, 0.0], + [1.0, 1.0, 1.0, 1.0, 0.0], + ], + dtype=torch.float32, + device=device, + ) + expected_ious = torch.tensor( + [ + [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)], + [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)], + ], + dtype=torch.float32, + device=device, + ) + ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2)) + self.assertTrue(torch.allclose(ious, expected_ious)) + + def test_pairwise_iou_45_degrees(self): + for device in ["cpu"] + ["cuda"] if torch.cuda.is_available() else []: + boxes1 = torch.tensor( + [ + [1, 1, math.sqrt(2), math.sqrt(2), 45], + [1, 1, 2 * math.sqrt(2), 2 * math.sqrt(2), -45], + ], + dtype=torch.float32, + device=device, + ) + boxes2 = torch.tensor([[1, 1, 2, 2, 0]], dtype=torch.float32, device=device) + expected_ious = torch.tensor([[0.5], [0.5]], dtype=torch.float32, device=device) + ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2)) + self.assertTrue(torch.allclose(ious, expected_ious)) + + def test_pairwise_iou_orthogonal(self): + for device in ["cpu"] + ["cuda"] if torch.cuda.is_available() else []: + boxes1 = torch.tensor([[5, 5, 10, 6, 55]], dtype=torch.float32, device=device) + boxes2 = torch.tensor([[5, 5, 10, 6, -35]], dtype=torch.float32, device=device) + iou = (6.0 * 6.0) / (6.0 * 6.0 + 4.0 * 6.0 + 4.0 * 6.0) + expected_ious = torch.tensor([[iou]], dtype=torch.float32, device=device) + ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2)) + self.assertTrue(torch.allclose(ious, expected_ious)) + + def test_pairwise_iou_large_close_boxes(self): + for device in ["cpu"] + ["cuda"] if torch.cuda.is_available() else []: + boxes1 = torch.tensor( + [[299.500000, 417.370422, 600.000000, 364.259186, 27.1828]], + dtype=torch.float32, + device=device, + ) + boxes2 = torch.tensor( + [[299.500000, 417.370422, 600.000000, 364.259155, 27.1828]], + dtype=torch.float32, + device=device, + ) + iou = 364.259155 / 364.259186 + expected_ious = torch.tensor([[iou]], dtype=torch.float32, device=device) + ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2)) + self.assertTrue(torch.allclose(ious, expected_ious)) + + def test_pairwise_iou_many_boxes(self): + for device in ["cpu"] + ["cuda"] if torch.cuda.is_available() else []: + num_boxes1 = 100 + num_boxes2 = 200 + boxes1 = torch.stack( + [ + torch.tensor( + [5 + 20 * i, 5 + 20 * i, 10, 10, 0], dtype=torch.float32, device=device + ) + for i in range(num_boxes1) + ] + ) + boxes2 = torch.stack( + [ + torch.tensor( + [5 + 20 * i, 5 + 20 * i, 10, 1 + 9 * i / num_boxes2, 0], + dtype=torch.float32, + device=device, + ) + for i in range(num_boxes2) + ] + ) + expected_ious = torch.zeros(num_boxes1, num_boxes2, dtype=torch.float32, device=device) + for i in range(min(num_boxes1, num_boxes2)): + expected_ious[i][i] = (1 + 9 * i / num_boxes2) / 10.0 + ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2)) + self.assertTrue(torch.allclose(ious, expected_ious)) + + def test_pairwise_iou_issue1207_simplified(self): + for device in ["cpu"] + ["cuda"] if torch.cuda.is_available() else []: + # Simplified test case of D2-issue-1207 + boxes1 = torch.tensor([[3, 3, 8, 2, -45.0]], device=device) + boxes2 = torch.tensor([[6, 0, 8, 2, -45.0]], device=device) + iou = 0.0 + expected_ious = torch.tensor([[iou]], dtype=torch.float32, device=device) + + ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2)) + self.assertTrue(torch.allclose(ious, expected_ious)) + + def test_pairwise_iou_issue1207(self): + for device in ["cpu"] + ["cuda"] if torch.cuda.is_available() else []: + # The original test case in D2-issue-1207 + boxes1 = torch.tensor([[160.0, 153.0, 230.0, 23.0, -37.0]], device=device) + boxes2 = torch.tensor([[190.0, 127.0, 80.0, 21.0, -46.0]], device=device) + + iou = 0.0 + expected_ious = torch.tensor([[iou]], dtype=torch.float32, device=device) + + ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2)) + self.assertTrue(torch.allclose(ious, expected_ious)) + + def test_empty_cat(self): + x = RotatedBoxes.cat([]) + self.assertTrue(x.tensor.shape, (0, 5)) + + +def benchmark_rotated_iou(): + num_boxes1 = 200 + num_boxes2 = 500 + boxes1 = torch.stack( + [ + torch.tensor([5 + 20 * i, 5 + 20 * i, 10, 10, 0], dtype=torch.float32) + for i in range(num_boxes1) + ] + ) + boxes2 = torch.stack( + [ + torch.tensor( + [5 + 20 * i, 5 + 20 * i, 10, 1 + 9 * i / num_boxes2, 0], dtype=torch.float32 + ) + for i in range(num_boxes2) + ] + ) + + def func(dev, n=1): + b1 = boxes1.to(device=dev) + b2 = boxes2.to(device=dev) + + def bench(): + for _ in range(n): + pairwise_iou_rotated(b1, b2) + if dev.type == "cuda": + torch.cuda.synchronize() + + return bench + + # only run it once per timed loop, since it's slow + args = [{"dev": torch.device("cpu"), "n": 1}] + if torch.cuda.is_available(): + args.append({"dev": torch.device("cuda"), "n": 10}) + + benchmark(func, "rotated_iou", args, warmup_iters=3) + + +if __name__ == "__main__": + unittest.main() + benchmark_rotated_iou() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_checkpoint.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..725b488fdaec5d2b3a5c6d11c11d2c362453a2a4 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_checkpoint.py @@ -0,0 +1,48 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import unittest +from collections import OrderedDict +import torch +from torch import nn + +from detectron2.checkpoint.c2_model_loading import align_and_update_state_dicts +from detectron2.utils.logger import setup_logger + + +class TestCheckpointer(unittest.TestCase): + def setUp(self): + setup_logger() + + def create_complex_model(self): + m = nn.Module() + m.block1 = nn.Module() + m.block1.layer1 = nn.Linear(2, 3) + m.layer2 = nn.Linear(3, 2) + m.res = nn.Module() + m.res.layer2 = nn.Linear(3, 2) + + state_dict = OrderedDict() + state_dict["layer1.weight"] = torch.rand(3, 2) + state_dict["layer1.bias"] = torch.rand(3) + state_dict["layer2.weight"] = torch.rand(2, 3) + state_dict["layer2.bias"] = torch.rand(2) + state_dict["res.layer2.weight"] = torch.rand(2, 3) + state_dict["res.layer2.bias"] = torch.rand(2) + return m, state_dict + + def test_complex_model_loaded(self): + for add_data_parallel in [False, True]: + model, state_dict = self.create_complex_model() + if add_data_parallel: + model = nn.DataParallel(model) + model_sd = model.state_dict() + + align_and_update_state_dicts(model_sd, state_dict) + for loaded, stored in zip(model_sd.values(), state_dict.values()): + # different tensor references + self.assertFalse(id(loaded) == id(stored)) + # same content + self.assertTrue(loaded.equal(stored)) + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_config.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_config.py new file mode 100644 index 0000000000000000000000000000000000000000..650bdf2c42107c7031709653783cb2f3043e1bdf --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_config.py @@ -0,0 +1,240 @@ +#!/usr/bin/env python +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + + +import os +import tempfile +import unittest +import torch + +from detectron2.config import configurable, downgrade_config, get_cfg, upgrade_config +from detectron2.layers import ShapeSpec + +_V0_CFG = """ +MODEL: + RPN_HEAD: + NAME: "TEST" +VERSION: 0 +""" + +_V1_CFG = """ +MODEL: + WEIGHT: "/path/to/weight" +""" + + +class TestConfigVersioning(unittest.TestCase): + def test_upgrade_downgrade_consistency(self): + cfg = get_cfg() + # check that custom is preserved + cfg.USER_CUSTOM = 1 + + down = downgrade_config(cfg, to_version=0) + up = upgrade_config(down) + self.assertTrue(up == cfg) + + def _merge_cfg_str(self, cfg, merge_str): + f = tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) + try: + f.write(merge_str) + f.close() + cfg.merge_from_file(f.name) + finally: + os.remove(f.name) + return cfg + + def test_auto_upgrade(self): + cfg = get_cfg() + latest_ver = cfg.VERSION + cfg.USER_CUSTOM = 1 + + self._merge_cfg_str(cfg, _V0_CFG) + + self.assertEqual(cfg.MODEL.RPN.HEAD_NAME, "TEST") + self.assertEqual(cfg.VERSION, latest_ver) + + def test_guess_v1(self): + cfg = get_cfg() + latest_ver = cfg.VERSION + self._merge_cfg_str(cfg, _V1_CFG) + self.assertEqual(cfg.VERSION, latest_ver) + + +class _TestClassA(torch.nn.Module): + @configurable + def __init__(self, arg1, arg2, arg3=3): + super().__init__() + self.arg1 = arg1 + self.arg2 = arg2 + self.arg3 = arg3 + assert arg1 == 1 + assert arg2 == 2 + assert arg3 == 3 + + @classmethod + def from_config(cls, cfg): + args = {"arg1": cfg.ARG1, "arg2": cfg.ARG2} + return args + + +class _TestClassB(_TestClassA): + @configurable + def __init__(self, input_shape, arg1, arg2, arg3=3): + """ + Doc of _TestClassB + """ + assert input_shape == "shape" + super().__init__(arg1, arg2, arg3) + + @classmethod + def from_config(cls, cfg, input_shape): # test extra positional arg in from_config + args = {"arg1": cfg.ARG1, "arg2": cfg.ARG2} + args["input_shape"] = input_shape + return args + + +class _LegacySubClass(_TestClassB): + # an old subclass written in cfg style + def __init__(self, cfg, input_shape, arg4=4): + super().__init__(cfg, input_shape) + assert self.arg1 == 1 + assert self.arg2 == 2 + assert self.arg3 == 3 + + +class _NewSubClassNewInit(_TestClassB): + # test new subclass with a new __init__ + @configurable + def __init__(self, input_shape, arg4=4, **kwargs): + super().__init__(input_shape, **kwargs) + assert self.arg1 == 1 + assert self.arg2 == 2 + assert self.arg3 == 3 + + +class _LegacySubClassNotCfg(_TestClassB): + # an old subclass written in cfg style, but argument is not called "cfg" + def __init__(self, config, input_shape): + super().__init__(config, input_shape) + assert self.arg1 == 1 + assert self.arg2 == 2 + assert self.arg3 == 3 + + +class _TestClassC(_TestClassB): + @classmethod + def from_config(cls, cfg, input_shape, **kwargs): # test extra kwarg overwrite + args = {"arg1": cfg.ARG1, "arg2": cfg.ARG2} + args["input_shape"] = input_shape + args.update(kwargs) + return args + + +class _TestClassD(_TestClassA): + @configurable + def __init__(self, input_shape: ShapeSpec, arg1: int, arg2, arg3=3): + assert input_shape == "shape" + super().__init__(arg1, arg2, arg3) + + # _TestClassA.from_config does not have input_shape args. + # Test whether input_shape will be forwarded to __init__ + + +class TestConfigurable(unittest.TestCase): + def testInitWithArgs(self): + _ = _TestClassA(arg1=1, arg2=2, arg3=3) + _ = _TestClassB("shape", arg1=1, arg2=2) + _ = _TestClassC("shape", arg1=1, arg2=2) + _ = _TestClassD("shape", arg1=1, arg2=2, arg3=3) + + def testPatchedAttr(self): + self.assertTrue("Doc" in _TestClassB.__init__.__doc__) + self.assertEqual(_TestClassD.__init__.__annotations__["arg1"], int) + + def testInitWithCfg(self): + cfg = get_cfg() + cfg.ARG1 = 1 + cfg.ARG2 = 2 + cfg.ARG3 = 3 + _ = _TestClassA(cfg) + _ = _TestClassB(cfg, input_shape="shape") + _ = _TestClassC(cfg, input_shape="shape") + _ = _TestClassD(cfg, input_shape="shape") + _ = _LegacySubClass(cfg, input_shape="shape") + _ = _NewSubClassNewInit(cfg, input_shape="shape") + _ = _LegacySubClassNotCfg(cfg, input_shape="shape") + with self.assertRaises(TypeError): + # disallow forwarding positional args to __init__ since it's prone to errors + _ = _TestClassD(cfg, "shape") + + # call with kwargs instead + _ = _TestClassA(cfg=cfg) + _ = _TestClassB(cfg=cfg, input_shape="shape") + _ = _TestClassC(cfg=cfg, input_shape="shape") + _ = _TestClassD(cfg=cfg, input_shape="shape") + _ = _LegacySubClass(cfg=cfg, input_shape="shape") + _ = _NewSubClassNewInit(cfg=cfg, input_shape="shape") + _ = _LegacySubClassNotCfg(config=cfg, input_shape="shape") + + def testInitWithCfgOverwrite(self): + cfg = get_cfg() + cfg.ARG1 = 1 + cfg.ARG2 = 999 # wrong config + with self.assertRaises(AssertionError): + _ = _TestClassA(cfg, arg3=3) + + # overwrite arg2 with correct config later: + _ = _TestClassA(cfg, arg2=2, arg3=3) + _ = _TestClassB(cfg, input_shape="shape", arg2=2, arg3=3) + _ = _TestClassC(cfg, input_shape="shape", arg2=2, arg3=3) + _ = _TestClassD(cfg, input_shape="shape", arg2=2, arg3=3) + + # call with kwargs cfg=cfg instead + _ = _TestClassA(cfg=cfg, arg2=2, arg3=3) + _ = _TestClassB(cfg=cfg, input_shape="shape", arg2=2, arg3=3) + _ = _TestClassC(cfg=cfg, input_shape="shape", arg2=2, arg3=3) + _ = _TestClassD(cfg=cfg, input_shape="shape", arg2=2, arg3=3) + + def testInitWithCfgWrongArgs(self): + cfg = get_cfg() + cfg.ARG1 = 1 + cfg.ARG2 = 2 + with self.assertRaises(TypeError): + _ = _TestClassB(cfg, "shape", not_exist=1) + with self.assertRaises(TypeError): + _ = _TestClassC(cfg, "shape", not_exist=1) + with self.assertRaises(TypeError): + _ = _TestClassD(cfg, "shape", not_exist=1) + + def testBadClass(self): + class _BadClass1: + @configurable + def __init__(self, a=1, b=2): + pass + + class _BadClass2: + @configurable + def __init__(self, a=1, b=2): + pass + + def from_config(self, cfg): # noqa + pass + + class _BadClass3: + @configurable + def __init__(self, a=1, b=2): + pass + + # bad name: must be cfg + @classmethod + def from_config(cls, config): # noqa + pass + + with self.assertRaises(AttributeError): + _ = _BadClass1(a=1) + + with self.assertRaises(TypeError): + _ = _BadClass2(a=1) + + with self.assertRaises(TypeError): + _ = _BadClass3(get_cfg()) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_export_caffe2.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_export_caffe2.py new file mode 100644 index 0000000000000000000000000000000000000000..ad989c4a3d11e6675d26ae2690f06d2ffe30d44c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_export_caffe2.py @@ -0,0 +1,71 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# -*- coding: utf-8 -*- + +import copy +import numpy as np +import os +import tempfile +import unittest +import cv2 +import torch +from fvcore.common.file_io import PathManager + +from detectron2 import model_zoo +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import get_cfg +from detectron2.data import DatasetCatalog +from detectron2.modeling import build_model +from detectron2.utils.logger import setup_logger + + +@unittest.skipIf(os.environ.get("CIRCLECI"), "Require COCO data and model zoo.") +class TestCaffe2Export(unittest.TestCase): + def setUp(self): + setup_logger() + + def _test_model(self, config_path, device="cpu"): + # requires extra dependencies + from detectron2.export import Caffe2Model, add_export_config, export_caffe2_model + + cfg = get_cfg() + cfg.merge_from_file(model_zoo.get_config_file(config_path)) + cfg = add_export_config(cfg) + cfg.MODEL.DEVICE = device + + model = build_model(cfg) + DetectionCheckpointer(model).load(model_zoo.get_checkpoint_url(config_path)) + + inputs = [{"image": self._get_test_image()}] + c2_model = export_caffe2_model(cfg, model, copy.deepcopy(inputs)) + + with tempfile.TemporaryDirectory(prefix="detectron2_unittest") as d: + c2_model.save_protobuf(d) + c2_model.save_graph(os.path.join(d, "test.svg"), inputs=copy.deepcopy(inputs)) + c2_model = Caffe2Model.load_protobuf(d) + c2_model(inputs)[0]["instances"] + + def _get_test_image(self): + try: + file_name = DatasetCatalog.get("coco_2017_train")[0]["file_name"] + assert PathManager.exists(file_name) + except Exception: + self.skipTest("COCO dataset not available.") + + with PathManager.open(file_name, "rb") as f: + buf = f.read() + img = cv2.imdecode(np.frombuffer(buf, dtype=np.uint8), cv2.IMREAD_COLOR) + assert img is not None, file_name + return torch.from_numpy(img.transpose(2, 0, 1)) + + def testMaskRCNN(self): + self._test_model("COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml") + + @unittest.skipIf(not torch.cuda.is_available(), "CUDA not available") + def testMaskRCNNGPU(self): + self._test_model("COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml", device="cuda") + + def testRetinaNet(self): + self._test_model("COCO-Detection/retinanet_R_50_FPN_3x.yaml") + + def testPanopticFPN(self): + self._test_model("COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml") diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_model_analysis.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_model_analysis.py new file mode 100644 index 0000000000000000000000000000000000000000..0e3f84c9354746fc634aca997abb232424ddebb2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_model_analysis.py @@ -0,0 +1,58 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + + +import unittest +import torch + +import detectron2.model_zoo as model_zoo +from detectron2.config import get_cfg +from detectron2.modeling import build_model +from detectron2.utils.analysis import flop_count_operators, parameter_count + + +def get_model_zoo(config_path): + """ + Like model_zoo.get, but do not load any weights (even pretrained) + """ + cfg_file = model_zoo.get_config_file(config_path) + cfg = get_cfg() + cfg.merge_from_file(cfg_file) + if not torch.cuda.is_available(): + cfg.MODEL.DEVICE = "cpu" + return build_model(cfg) + + +class RetinaNetTest(unittest.TestCase): + def setUp(self): + self.model = get_model_zoo("COCO-Detection/retinanet_R_50_FPN_1x.yaml") + + def test_flop(self): + # RetinaNet supports flop-counting with random inputs + inputs = [{"image": torch.rand(3, 800, 800)}] + res = flop_count_operators(self.model, inputs) + self.assertTrue(int(res["conv"]), 146) # 146B flops + + def test_param_count(self): + res = parameter_count(self.model) + self.assertTrue(res[""], 37915572) + self.assertTrue(res["backbone"], 31452352) + + +class FasterRCNNTest(unittest.TestCase): + def setUp(self): + self.model = get_model_zoo("COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml") + + def test_flop(self): + # Faster R-CNN supports flop-counting with random inputs + inputs = [{"image": torch.rand(3, 800, 800)}] + res = flop_count_operators(self.model, inputs) + + # This only checks flops for backbone & proposal generator + # Flops for box head is not conv, and depends on #proposals, which is + # almost 0 for random inputs. + self.assertTrue(int(res["conv"]), 117) + + def test_param_count(self): + res = parameter_count(self.model) + self.assertTrue(res[""], 41699936) + self.assertTrue(res["backbone"], 26799296) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_model_zoo.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_model_zoo.py new file mode 100644 index 0000000000000000000000000000000000000000..2d16c711af2ab797dab04d0573c2ed70e071ebfd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_model_zoo.py @@ -0,0 +1,29 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import logging +import unittest + +from detectron2 import model_zoo +from detectron2.modeling import FPN, GeneralizedRCNN + +logger = logging.getLogger(__name__) + + +class TestModelZoo(unittest.TestCase): + def test_get_returns_model(self): + model = model_zoo.get("Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml", trained=False) + self.assertIsInstance(model, GeneralizedRCNN) + self.assertIsInstance(model.backbone, FPN) + + def test_get_invalid_model(self): + self.assertRaises(RuntimeError, model_zoo.get, "Invalid/config.yaml") + + def test_get_url(self): + url = model_zoo.get_checkpoint_url("Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml") + self.assertEqual( + url, + "https://dl.fbaipublicfiles.com/detectron2/Misc/scratch_mask_rcnn_R_50_FPN_3x_gn/138602908/model_final_01ca85.pkl", # noqa + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_visualizer.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_visualizer.py new file mode 100644 index 0000000000000000000000000000000000000000..1cdeddc6733e25d882bede48a404a1d52c0845de --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tests/test_visualizer.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# File: + +import numpy as np +import unittest +import torch + +from detectron2.data import MetadataCatalog +from detectron2.structures import BoxMode, Instances, RotatedBoxes +from detectron2.utils.visualizer import Visualizer + + +class TestVisualizer(unittest.TestCase): + def _random_data(self): + H, W = 100, 100 + N = 10 + img = np.random.rand(H, W, 3) * 255 + boxxy = np.random.rand(N, 2) * (H // 2) + boxes = np.concatenate((boxxy, boxxy + H // 2), axis=1) + + def _rand_poly(): + return np.random.rand(3, 2).flatten() * H + + polygons = [[_rand_poly() for _ in range(np.random.randint(1, 5))] for _ in range(N)] + + mask = np.zeros_like(img[:, :, 0], dtype=np.bool) + mask[:10, 10:20] = 1 + + labels = [str(i) for i in range(N)] + return img, boxes, labels, polygons, [mask] * N + + @property + def metadata(self): + return MetadataCatalog.get("coco_2017_train") + + def test_draw_dataset_dict(self): + img = np.random.rand(512, 512, 3) * 255 + dic = { + "annotations": [ + { + "bbox": [ + 368.9946492271106, + 330.891438763377, + 13.148537455410235, + 13.644708680142685, + ], + "bbox_mode": BoxMode.XYWH_ABS, + "category_id": 0, + "iscrowd": 1, + "segmentation": { + "counts": "_jh52m?2N2N2N2O100O10O001N1O2MceP2", + "size": [512, 512], + }, + } + ], + "height": 512, + "image_id": 1, + "width": 512, + } + v = Visualizer(img, self.metadata) + v.draw_dataset_dict(dic) + + def test_overlay_instances(self): + img, boxes, labels, polygons, masks = self._random_data() + + v = Visualizer(img, self.metadata) + output = v.overlay_instances(masks=polygons, boxes=boxes, labels=labels).get_image() + self.assertEqual(output.shape, img.shape) + + # Test 2x scaling + v = Visualizer(img, self.metadata, scale=2.0) + output = v.overlay_instances(masks=polygons, boxes=boxes, labels=labels).get_image() + self.assertEqual(output.shape[0], img.shape[0] * 2) + + # Test overlay masks + v = Visualizer(img, self.metadata) + output = v.overlay_instances(masks=masks, boxes=boxes, labels=labels).get_image() + self.assertEqual(output.shape, img.shape) + + def test_overlay_instances_no_boxes(self): + img, boxes, labels, polygons, _ = self._random_data() + v = Visualizer(img, self.metadata) + v.overlay_instances(masks=polygons, boxes=None, labels=labels).get_image() + + def test_draw_instance_predictions(self): + img, boxes, _, _, masks = self._random_data() + num_inst = len(boxes) + inst = Instances((img.shape[0], img.shape[1])) + inst.pred_classes = torch.randint(0, 80, size=(num_inst,)) + inst.scores = torch.rand(num_inst) + inst.pred_boxes = torch.from_numpy(boxes) + inst.pred_masks = torch.from_numpy(np.asarray(masks)) + + v = Visualizer(img, self.metadata) + v.draw_instance_predictions(inst) + + def test_draw_empty_mask_predictions(self): + img, boxes, _, _, masks = self._random_data() + num_inst = len(boxes) + inst = Instances((img.shape[0], img.shape[1])) + inst.pred_classes = torch.randint(0, 80, size=(num_inst,)) + inst.scores = torch.rand(num_inst) + inst.pred_boxes = torch.from_numpy(boxes) + inst.pred_masks = torch.from_numpy(np.zeros_like(np.asarray(masks))) + + v = Visualizer(img, self.metadata) + v.draw_instance_predictions(inst) + + def test_correct_output_shape(self): + img = np.random.rand(928, 928, 3) * 255 + v = Visualizer(img, self.metadata) + out = v.output.get_image() + self.assertEqual(out.shape, img.shape) + + def test_overlay_rotated_instances(self): + H, W = 100, 150 + img = np.random.rand(H, W, 3) * 255 + num_boxes = 50 + boxes_5d = torch.zeros(num_boxes, 5) + boxes_5d[:, 0] = torch.FloatTensor(num_boxes).uniform_(-0.1 * W, 1.1 * W) + boxes_5d[:, 1] = torch.FloatTensor(num_boxes).uniform_(-0.1 * H, 1.1 * H) + boxes_5d[:, 2] = torch.FloatTensor(num_boxes).uniform_(0, max(W, H)) + boxes_5d[:, 3] = torch.FloatTensor(num_boxes).uniform_(0, max(W, H)) + boxes_5d[:, 4] = torch.FloatTensor(num_boxes).uniform_(-1800, 1800) + rotated_boxes = RotatedBoxes(boxes_5d) + labels = [str(i) for i in range(num_boxes)] + + v = Visualizer(img, self.metadata) + output = v.overlay_instances(boxes=rotated_boxes, labels=labels).get_image() + self.assertEqual(output.shape, img.shape) + + def test_draw_no_metadata(self): + img, boxes, _, _, masks = self._random_data() + num_inst = len(boxes) + inst = Instances((img.shape[0], img.shape[1])) + inst.pred_classes = torch.randint(0, 80, size=(num_inst,)) + inst.scores = torch.rand(num_inst) + inst.pred_boxes = torch.from_numpy(boxes) + inst.pred_masks = torch.from_numpy(np.asarray(masks)) + + v = Visualizer(img, MetadataCatalog.get("asdfasdf")) + v.draw_instance_predictions(inst) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3733863970218bf8bdf9b32420163f4c858e209e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/README.md @@ -0,0 +1,45 @@ + +This directory contains a few scripts that use detectron2. + + +* `train_net.py` + +An example training script that's made to train builtin models of detectron2. + +For usage, see [GETTING_STARTED.md](../GETTING_STARTED.md). + +* `plain_train_net.py` + +Similar to `train_net.py`, but implements a training loop instead of using `Trainer`. +This script includes fewer features but it may be more friendly to hackers. + +* `benchmark.py` + +Benchmark the training speed, inference speed or data loading speed of a given config. + +Usage: +``` +python benchmark.py --config-file config.yaml --task train/eval/data [optional DDP flags] +``` + +* `visualize_json_results.py` + +Visualize the json instance detection/segmentation results dumped by `COCOEvalutor` or `LVISEvaluator` + +Usage: +``` +python visualize_json_results.py --input x.json --output dir/ --dataset coco_2017_val +``` +If not using a builtin dataset, you'll need your own script or modify this script. + +* `visualize_data.py` + +Visualize ground truth raw annotations or training data (after preprocessing/augmentations). + +Usage: +``` +python visualize_data.py --config-file config.yaml --source annotation/dataloader --output-dir dir/ [--show] +``` + +NOTE: the script does not stop by itself when using `--source dataloader` because a training +dataloader is usually infinite. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/analyze_model.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/analyze_model.py new file mode 100755 index 0000000000000000000000000000000000000000..9c06ea4b5fbfd551d85702171976f9bc33f2e275 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/analyze_model.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import logging +import numpy as np +from collections import Counter +import tqdm + +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import get_cfg +from detectron2.data import build_detection_test_loader +from detectron2.engine import default_argument_parser +from detectron2.modeling import build_model +from detectron2.utils.analysis import ( + activation_count_operators, + flop_count_operators, + parameter_count_table, +) +from detectron2.utils.logger import setup_logger + +logger = logging.getLogger("detectron2") + + +def setup(args): + cfg = get_cfg() + cfg.merge_from_file(args.config_file) + cfg.DATALOADER.NUM_WORKERS = 0 + cfg.merge_from_list(args.opts) + cfg.freeze() + setup_logger() + return cfg + + +def do_flop(cfg): + data_loader = build_detection_test_loader(cfg, cfg.DATASETS.TEST[0]) + model = build_model(cfg) + DetectionCheckpointer(model).load(cfg.MODEL.WEIGHTS) + model.eval() + + counts = Counter() + total_flops = [] + for idx, data in zip(tqdm.trange(args.num_inputs), data_loader): # noqa + count = flop_count_operators(model, data) + counts += count + total_flops.append(sum(count.values())) + logger.info( + "(G)Flops for Each Type of Operators:\n" + str([(k, v / idx) for k, v in counts.items()]) + ) + logger.info("Total (G)Flops: {}±{}".format(np.mean(total_flops), np.std(total_flops))) + + +def do_activation(cfg): + data_loader = build_detection_test_loader(cfg, cfg.DATASETS.TEST[0]) + model = build_model(cfg) + DetectionCheckpointer(model).load(cfg.MODEL.WEIGHTS) + model.eval() + + counts = Counter() + total_activations = [] + for idx, data in zip(tqdm.trange(args.num_inputs), data_loader): # noqa + count = activation_count_operators(model, data) + counts += count + total_activations.append(sum(count.values())) + logger.info( + "(Million) Activations for Each Type of Operators:\n" + + str([(k, v / idx) for k, v in counts.items()]) + ) + logger.info( + "Total (Million) Activations: {}±{}".format( + np.mean(total_activations), np.std(total_activations) + ) + ) + + +def do_parameter(cfg): + model = build_model(cfg) + logger.info("Parameter Count:\n" + parameter_count_table(model, max_depth=5)) + + +def do_structure(cfg): + model = build_model(cfg) + logger.info("Model Structure:\n" + str(model)) + + +if __name__ == "__main__": + parser = default_argument_parser( + epilog=""" +Examples: + +To show parameters of a model: +$ ./analyze_model.py --tasks parameter \\ + --config-file ../configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml + +Flops and activations are data-dependent, therefore inputs and model weights +are needed to count them: + +$ ./analyze_model.py --num-inputs 100 --tasks flop \\ + --config-file ../configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml \\ + MODEL.WEIGHTS /path/to/model.pkl +""" + ) + parser.add_argument( + "--tasks", + choices=["flop", "activation", "parameter", "structure"], + required=True, + nargs="+", + ) + parser.add_argument( + "--num-inputs", + default=100, + type=int, + help="number of inputs used to compute statistics for flops/activations, " + "both are data dependent.", + ) + args = parser.parse_args() + assert not args.eval_only + assert args.num_gpus == 1 + + cfg = setup(args) + + for task in args.tasks: + { + "flop": do_flop, + "activation": do_activation, + "parameter": do_parameter, + "structure": do_structure, + }[task](cfg) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/benchmark.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/benchmark.py new file mode 100755 index 0000000000000000000000000000000000000000..9eec59f476882e4045ec3c682ffe515413a3be15 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/benchmark.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +A script to benchmark builtin models. + +Note: this script has an extra dependency of psutil. +""" + +import itertools +import logging +import psutil +import torch +import tqdm +from fvcore.common.timer import Timer +from torch.nn.parallel import DistributedDataParallel + +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import get_cfg +from detectron2.data import ( + DatasetFromList, + build_detection_test_loader, + build_detection_train_loader, +) +from detectron2.engine import SimpleTrainer, default_argument_parser, hooks, launch +from detectron2.modeling import build_model +from detectron2.solver import build_optimizer +from detectron2.utils import comm +from detectron2.utils.events import CommonMetricPrinter +from detectron2.utils.logger import setup_logger + +logger = logging.getLogger("detectron2") + + +def setup(args): + cfg = get_cfg() + cfg.merge_from_file(args.config_file) + cfg.SOLVER.BASE_LR = 0.001 # Avoid NaNs. Not useful in this script anyway. + cfg.merge_from_list(args.opts) + cfg.freeze() + setup_logger(distributed_rank=comm.get_rank()) + return cfg + + +def benchmark_data(args): + cfg = setup(args) + + timer = Timer() + dataloader = build_detection_train_loader(cfg) + logger.info("Initialize loader using {} seconds.".format(timer.seconds())) + + timer.reset() + itr = iter(dataloader) + for i in range(10): # warmup + next(itr) + if i == 0: + startup_time = timer.seconds() + timer = Timer() + max_iter = 1000 + for _ in tqdm.trange(max_iter): + next(itr) + logger.info( + "{} iters ({} images) in {} seconds.".format( + max_iter, max_iter * cfg.SOLVER.IMS_PER_BATCH, timer.seconds() + ) + ) + logger.info("Startup time: {} seconds".format(startup_time)) + vram = psutil.virtual_memory() + logger.info( + "RAM Usage: {:.2f}/{:.2f} GB".format( + (vram.total - vram.available) / 1024 ** 3, vram.total / 1024 ** 3 + ) + ) + + # test for a few more rounds + for _ in range(10): + timer = Timer() + max_iter = 1000 + for _ in tqdm.trange(max_iter): + next(itr) + logger.info( + "{} iters ({} images) in {} seconds.".format( + max_iter, max_iter * cfg.SOLVER.IMS_PER_BATCH, timer.seconds() + ) + ) + + +def benchmark_train(args): + cfg = setup(args) + model = build_model(cfg) + logger.info("Model:\n{}".format(model)) + if comm.get_world_size() > 1: + model = DistributedDataParallel( + model, device_ids=[comm.get_local_rank()], broadcast_buffers=False + ) + optimizer = build_optimizer(cfg, model) + checkpointer = DetectionCheckpointer(model, optimizer=optimizer) + checkpointer.load(cfg.MODEL.WEIGHTS) + + cfg.defrost() + cfg.DATALOADER.NUM_WORKERS = 0 + data_loader = build_detection_train_loader(cfg) + dummy_data = list(itertools.islice(data_loader, 100)) + + def f(): + data = DatasetFromList(dummy_data, copy=False) + while True: + yield from data + + max_iter = 400 + trainer = SimpleTrainer(model, f(), optimizer) + trainer.register_hooks( + [hooks.IterationTimer(), hooks.PeriodicWriter([CommonMetricPrinter(max_iter)])] + ) + trainer.train(1, max_iter) + + +@torch.no_grad() +def benchmark_eval(args): + cfg = setup(args) + model = build_model(cfg) + model.eval() + logger.info("Model:\n{}".format(model)) + DetectionCheckpointer(model).load(cfg.MODEL.WEIGHTS) + + cfg.defrost() + cfg.DATALOADER.NUM_WORKERS = 0 + data_loader = build_detection_test_loader(cfg, cfg.DATASETS.TEST[0]) + dummy_data = list(itertools.islice(data_loader, 100)) + + def f(): + while True: + yield from DatasetFromList(dummy_data, copy=False) + + for _ in range(5): # warmup + model(dummy_data[0]) + + max_iter = 400 + timer = Timer() + with tqdm.tqdm(total=max_iter) as pbar: + for idx, d in enumerate(f()): + if idx == max_iter: + break + model(d) + pbar.update() + logger.info("{} iters in {} seconds.".format(max_iter, timer.seconds())) + + +if __name__ == "__main__": + parser = default_argument_parser() + parser.add_argument("--task", choices=["train", "eval", "data"], required=True) + args = parser.parse_args() + assert not args.eval_only + + if args.task == "data": + f = benchmark_data + elif args.task == "train": + """ + Note: training speed may not be representative. + The training cost of a R-CNN model varies with the content of the data + and the quality of the model. + """ + f = benchmark_train + elif args.task == "eval": + f = benchmark_eval + # only benchmark single-GPU inference. + assert args.num_gpus == 1 and args.num_machines == 1 + launch(f, args.num_gpus, args.num_machines, args.machine_rank, args.dist_url, args=(args,)) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/convert-torchvision-to-d2.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/convert-torchvision-to-d2.py new file mode 100755 index 0000000000000000000000000000000000000000..18a24e4ef96d34a4a0d1f43debc2276260da1a2b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/convert-torchvision-to-d2.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import pickle as pkl +import sys +import torch + +""" +Usage: + # download one of the ResNet{18,34,50,101,152} models from torchvision: + wget https://download.pytorch.org/models/resnet50-19c8e357.pth -O r50.pth + # run the conversion + ./convert-torchvision-to-d2.py r50.pth r50.pkl + + # Then, use r50.pkl with the following changes in config: + +MODEL: + WEIGHTS: "/path/to/r50.pkl" + PIXEL_MEAN: [123.675, 116.280, 103.530] + PIXEL_STD: [58.395, 57.120, 57.375] + RESNETS: + DEPTH: 50 + STRIDE_IN_1X1: False +INPUT: + FORMAT: "RGB" + + These models typically produce slightly worse results than the + pre-trained ResNets we use in official configs, which are the + original ResNet models released by MSRA. +""" + +if __name__ == "__main__": + input = sys.argv[1] + + obj = torch.load(input, map_location="cpu") + + newmodel = {} + for k in list(obj.keys()): + old_k = k + if "layer" not in k: + k = "stem." + k + for t in [1, 2, 3, 4]: + k = k.replace("layer{}".format(t), "res{}".format(t + 1)) + for t in [1, 2, 3]: + k = k.replace("bn{}".format(t), "conv{}.norm".format(t)) + k = k.replace("downsample.0", "shortcut") + k = k.replace("downsample.1", "shortcut.norm") + print(old_k, "->", k) + newmodel[k] = obj.pop(old_k).detach().numpy() + + res = {"model": newmodel, "__author__": "torchvision", "matching_heuristics": True} + + with open(sys.argv[2], "wb") as f: + pkl.dump(res, f) + if obj: + print("Unconverted keys:", obj.keys()) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/deploy/README.md b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/deploy/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b9d5b15512c0bd160accbb1823236b8954a37b86 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/deploy/README.md @@ -0,0 +1,9 @@ + +This directory contains: + +1. A script that converts a detectron2 model to caffe2 format. + +2. An example that loads a Mask R-CNN model in caffe2 format and runs inference. + +See [tutorial](https://detectron2.readthedocs.io/tutorials/deployment.html) +for their usage. diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/deploy/caffe2_converter.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/deploy/caffe2_converter.py new file mode 100755 index 0000000000000000000000000000000000000000..08feb69fba090a302d1624d52d146ac7a0787223 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/deploy/caffe2_converter.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +import argparse +import os +import onnx +import torch + +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import get_cfg +from detectron2.data import build_detection_test_loader +from detectron2.evaluation import COCOEvaluator, inference_on_dataset, print_csv_format +from detectron2.export import Caffe2Tracer, add_export_config +from detectron2.modeling import build_model +from detectron2.utils.logger import setup_logger + + +def setup_cfg(args): + cfg = get_cfg() + # cuda context is initialized before creating dataloader, so we don't fork anymore + cfg.DATALOADER.NUM_WORKERS = 0 + cfg = add_export_config(cfg) + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + if cfg.MODEL.DEVICE != "cpu": + TORCH_VERSION = tuple(int(x) for x in torch.__version__.split(".")[:2]) + assert TORCH_VERSION >= (1, 5), "PyTorch>=1.5 required for GPU conversion!" + return cfg + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Convert a model using caffe2 tracing.") + parser.add_argument( + "--format", + choices=["caffe2", "onnx", "torchscript"], + help="output format", + default="caffe2", + ) + parser.add_argument("--config-file", default="", metavar="FILE", help="path to config file") + parser.add_argument("--run-eval", action="store_true") + parser.add_argument("--output", help="output directory for the converted model") + parser.add_argument( + "opts", + help="Modify config options using the command-line", + default=None, + nargs=argparse.REMAINDER, + ) + args = parser.parse_args() + logger = setup_logger() + logger.info("Command line arguments: " + str(args)) + os.makedirs(args.output, exist_ok=True) + + cfg = setup_cfg(args) + + # create a torch model + torch_model = build_model(cfg) + DetectionCheckpointer(torch_model).resume_or_load(cfg.MODEL.WEIGHTS) + + # get a sample data + data_loader = build_detection_test_loader(cfg, cfg.DATASETS.TEST[0]) + first_batch = next(iter(data_loader)) + + # convert and save caffe2 model + tracer = Caffe2Tracer(cfg, torch_model, first_batch) + if args.format == "caffe2": + caffe2_model = tracer.export_caffe2() + caffe2_model.save_protobuf(args.output) + # draw the caffe2 graph + caffe2_model.save_graph(os.path.join(args.output, "model.svg"), inputs=first_batch) + elif args.format == "onnx": + onnx_model = tracer.export_onnx() + onnx.save(onnx_model, os.path.join(args.output, "model.onnx")) + elif args.format == "torchscript": + script_model = tracer.export_torchscript() + script_model.save(os.path.join(args.output, "model.ts")) + + # Recursively print IR of all modules + with open(os.path.join(args.output, "model_ts_IR.txt"), "w") as f: + try: + f.write(script_model._actual_script_module._c.dump_to_str(True, False, False)) + except AttributeError: + pass + # Print IR of the entire graph (all submodules inlined) + with open(os.path.join(args.output, "model_ts_IR_inlined.txt"), "w") as f: + f.write(str(script_model.inlined_graph)) + # Print the model structure in pytorch style + with open(os.path.join(args.output, "model.txt"), "w") as f: + f.write(str(script_model)) + + # run evaluation with the converted model + if args.run_eval: + assert args.format == "caffe2", "Python inference in other format is not yet supported." + dataset = cfg.DATASETS.TEST[0] + data_loader = build_detection_test_loader(cfg, dataset) + # NOTE: hard-coded evaluator. change to the evaluator for your dataset + evaluator = COCOEvaluator(dataset, cfg, True, args.output) + metrics = inference_on_dataset(caffe2_model, data_loader, evaluator) + print_csv_format(metrics) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/deploy/caffe2_mask_rcnn.cpp b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/deploy/caffe2_mask_rcnn.cpp new file mode 100644 index 0000000000000000000000000000000000000000..44370b4c518408f1f46345c7e3ac07c7db63a485 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/deploy/caffe2_mask_rcnn.cpp @@ -0,0 +1,119 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +C10_DEFINE_string(predict_net, "", "path to model.pb"); +C10_DEFINE_string(init_net, "", "path to model_init.pb"); +C10_DEFINE_string(input, "", "path to input image"); + +using namespace std; +using namespace caffe2; + +int main(int argc, char** argv) { + caffe2::GlobalInit(&argc, &argv); + string predictNetPath = FLAGS_predict_net; + string initNetPath = FLAGS_init_net; + cv::Mat input = cv::imread(FLAGS_input, cv::IMREAD_COLOR); + + const int height = input.rows; + const int width = input.cols; + // FPN models require divisibility of 32 + assert(height % 32 == 0 && width % 32 == 0); + const int batch = 1; + const int channels = 3; + + // initialize Net and Workspace + caffe2::NetDef initNet_, predictNet_; + CAFFE_ENFORCE(ReadProtoFromFile(initNetPath, &initNet_)); + CAFFE_ENFORCE(ReadProtoFromFile(predictNetPath, &predictNet_)); + + Workspace workSpace; + for (auto& str : predictNet_.external_input()) { + workSpace.CreateBlob(str); + } + CAFFE_ENFORCE(workSpace.CreateNet(predictNet_)); + CAFFE_ENFORCE(workSpace.RunNetOnce(initNet_)); + + // setup inputs + auto data = BlobGetMutableTensor(workSpace.GetBlob("data"), caffe2::CPU); + data->Resize(batch, channels, height, width); + float* ptr = data->mutable_data(); + // HWC to CHW + for (int c = 0; c < 3; ++c) { + for (int i = 0; i < height * width; ++i) { + ptr[c * height * width + i] = static_cast(input.data[3 * i + c]); + } + } + + auto im_info = + BlobGetMutableTensor(workSpace.GetBlob("im_info"), caffe2::CPU); + im_info->Resize(batch, 3); + float* im_info_ptr = im_info->mutable_data(); + im_info_ptr[0] = height; + im_info_ptr[1] = width; + im_info_ptr[2] = 1.0; + + // run the network + CAFFE_ENFORCE(workSpace.RunNet(predictNet_.name())); + + // run 3 more times to benchmark + int N_benchmark = 3; + auto start_time = chrono::high_resolution_clock::now(); + for (int i = 0; i < N_benchmark; ++i) { + CAFFE_ENFORCE(workSpace.RunNet(predictNet_.name())); + } + auto end_time = chrono::high_resolution_clock::now(); + auto ms = chrono::duration_cast(end_time - start_time) + .count(); + cout << "Latency (should vary with different inputs): " + << ms * 1.0 / 1e6 / N_benchmark << " seconds" << endl; + + // parse Mask R-CNN outputs + caffe2::Tensor bbox( + workSpace.GetBlob("bbox_nms")->Get(), caffe2::CPU); + caffe2::Tensor scores( + workSpace.GetBlob("score_nms")->Get(), caffe2::CPU); + caffe2::Tensor labels( + workSpace.GetBlob("class_nms")->Get(), caffe2::CPU); + caffe2::Tensor mask_probs( + workSpace.GetBlob("mask_fcn_probs")->Get(), caffe2::CPU); + cout << "bbox:" << bbox.DebugString() << endl; + cout << "scores:" << scores.DebugString() << endl; + cout << "labels:" << labels.DebugString() << endl; + cout << "mask_probs: " << mask_probs.DebugString() << endl; + + int num_instances = bbox.sizes()[0]; + for (int i = 0; i < num_instances; ++i) { + float score = scores.data()[i]; + if (score < 0.6) + continue; // skip them + + const float* box = bbox.data() + i * 4; + int label = labels.data()[i]; + + cout << "Prediction " << i << ", xyxy=("; + cout << box[0] << ", " << box[1] << ", " << box[2] << ", " << box[3] + << "); score=" << score << "; label=" << label << endl; + + const float* mask = mask_probs.data() + + i * mask_probs.size_from_dim(1) + label * mask_probs.size_from_dim(2); + + // save the 28x28 mask + cv::Mat cv_mask(28, 28, CV_32FC1); + memcpy(cv_mask.data, mask, 28 * 28 * sizeof(float)); + cv::imwrite("mask" + std::to_string(i) + ".png", cv_mask * 255.); + } + return 0; +} diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/deploy/torchscript_traced_mask_rcnn.cpp b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/deploy/torchscript_traced_mask_rcnn.cpp new file mode 100644 index 0000000000000000000000000000000000000000..82fbdb052fa53543920bf8169a05982005e30cc5 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/deploy/torchscript_traced_mask_rcnn.cpp @@ -0,0 +1,71 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +#include +#include +#include + +#include +#include + +using namespace std; + +// experimental. don't use +int main(int argc, const char* argv[]) { + if (argc != 3) { + return 1; + } + std::string image_file = argv[2]; + + torch::autograd::AutoGradMode guard(false); + auto module = torch::jit::load(argv[1]); + + assert(module.buffers().size() > 0); + // Assume that the entire model is on the same device. + // We just put input to this device. + auto device = (*begin(module.buffers())).device(); + + cv::Mat input_img = cv::imread(image_file, cv::IMREAD_COLOR); + const int height = input_img.rows; + const int width = input_img.cols; + // FPN models require divisibility of 32 + assert(height % 32 == 0 && width % 32 == 0); + const int channels = 3; + + auto input = torch::from_blob( + input_img.data, {1, height, width, channels}, torch::kUInt8); + // NHWC to NCHW + input = input.to(device, torch::kFloat).permute({0, 3, 1, 2}).contiguous(); + + std::array im_info_data{height * 1.0f, width * 1.0f, 1.0f}; + auto im_info = torch::from_blob(im_info_data.data(), {1, 3}).to(device); + + // run the network + auto output = module.forward({std::make_tuple(input, im_info)}); + + // run 3 more times to benchmark + int N_benchmark = 3; + auto start_time = chrono::high_resolution_clock::now(); + for (int i = 0; i < N_benchmark; ++i) { + output = module.forward({std::make_tuple(input, im_info)}); + } + auto end_time = chrono::high_resolution_clock::now(); + auto ms = chrono::duration_cast(end_time - start_time) + .count(); + cout << "Latency (should vary with different inputs): " + << ms * 1.0 / 1e6 / N_benchmark << " seconds" << endl; + + auto outputs = output.toTuple()->elements(); + // parse Mask R-CNN outputs + auto bbox = outputs[0].toTensor(), scores = outputs[1].toTensor(), + labels = outputs[2].toTensor(), mask_probs = outputs[3].toTensor(); + + cout << "bbox: " << bbox.toString() << " " << bbox.sizes() << endl; + cout << "scores: " << scores.toString() << " " << scores.sizes() << endl; + cout << "labels: " << labels.toString() << " " << labels.sizes() << endl; + cout << "mask_probs: " << mask_probs.toString() << " " << mask_probs.sizes() + << endl; + + int num_instances = bbox.sizes()[0]; + cout << bbox << endl; + return 0; +} diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/finetune_net.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/finetune_net.py new file mode 100755 index 0000000000000000000000000000000000000000..3e521859f70b89da747b324375a5110d8663fdc7 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/finetune_net.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +Detection Training Script. + +This scripts reads a given config file and runs the training or evaluation. +It is an entry point that is made to train standard models in detectron2. + +In order to let one script support training of many models, +this script contains logic that are specific to these built-in models and therefore +may not be suitable for your own project. +For example, your research project perhaps only needs a single "evaluator". + +Therefore, we recommend you to use detectron2 as an library and take +this file as an example of how to use the library. +You may want to write your own script with your data and other customizations. +""" + +import logging +import os +from collections import OrderedDict +import torch + +import detectron2.utils.comm as comm +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import get_cfg +from detectron2.data import MetadataCatalog +from detectron2.engine import DefaultTrainer, default_argument_parser, default_setup, hooks, launch +from detectron2.evaluation import ( + CityscapesInstanceEvaluator, + CityscapesSemSegEvaluator, + COCOEvaluator, + COCOPanopticEvaluator, + DatasetEvaluators, + LVISEvaluator, + PascalVOCDetectionEvaluator, + SemSegEvaluator, + verify_results, +) +from detectron2.modeling import GeneralizedRCNNWithTTA + +# Register Custom Dataset +from detectron2.data.datasets import register_coco_instances + +register_coco_instances("CIHP_train", {}, "../../data/msrcnn_finetune_annotations/CIHP_train.json", + "../../data/instance-level_human_parsing/Training/Images") +register_coco_instances("CIHP_val", {}, "../../data/msrcnn_finetune_annotations/CIHP_val.json", + "../../data/instance-level_human_parsing/Validation/Images") +register_coco_instances("demo_train", {}, "../../demo/annotations/demo_train.json", + "../../demo/img") +register_coco_instances("demo_val", {}, "../../demo/annotations/demo_val.json", + "../../demo/img") + + +class Trainer(DefaultTrainer): + """ + We use the "DefaultTrainer" which contains pre-defined default logic for + standard training workflow. They may not work for you, especially if you + are working on a new research project. In that case you can use the cleaner + "SimpleTrainer", or write your own training loop. You can use + "tools/plain_train_net.py" as an example. + """ + + @classmethod + def build_evaluator(cls, cfg, dataset_name, output_folder=None): + """ + Create evaluator(s) for a given dataset. + This uses the special metadata "evaluator_type" associated with each builtin dataset. + For your own dataset, you can simply create an evaluator manually in your + script and do not have to worry about the hacky if-else logic here. + """ + if output_folder is None: + output_folder = os.path.join(cfg.OUTPUT_DIR, "inference") + evaluator_list = [] + evaluator_type = MetadataCatalog.get(dataset_name).evaluator_type + if evaluator_type in ["sem_seg", "coco_panoptic_seg"]: + evaluator_list.append( + SemSegEvaluator( + dataset_name, + distributed=True, + num_classes=cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES, + ignore_label=cfg.MODEL.SEM_SEG_HEAD.IGNORE_VALUE, + output_dir=output_folder, + ) + ) + if evaluator_type in ["coco", "coco_panoptic_seg"]: + evaluator_list.append(COCOEvaluator(dataset_name, cfg, True, output_folder)) + if evaluator_type == "coco_panoptic_seg": + evaluator_list.append(COCOPanopticEvaluator(dataset_name, output_folder)) + if evaluator_type == "cityscapes_instance": + assert ( + torch.cuda.device_count() >= comm.get_rank() + ), "CityscapesEvaluator currently do not work with multiple machines." + return CityscapesInstanceEvaluator(dataset_name) + if evaluator_type == "cityscapes_sem_seg": + assert ( + torch.cuda.device_count() >= comm.get_rank() + ), "CityscapesEvaluator currently do not work with multiple machines." + return CityscapesSemSegEvaluator(dataset_name) + elif evaluator_type == "pascal_voc": + return PascalVOCDetectionEvaluator(dataset_name) + elif evaluator_type == "lvis": + return LVISEvaluator(dataset_name, cfg, True, output_folder) + if len(evaluator_list) == 0: + raise NotImplementedError( + "no Evaluator for the dataset {} with the type {}".format( + dataset_name, evaluator_type + ) + ) + elif len(evaluator_list) == 1: + return evaluator_list[0] + return DatasetEvaluators(evaluator_list) + + @classmethod + def test_with_TTA(cls, cfg, model): + logger = logging.getLogger("detectron2.trainer") + # In the end of training, run an evaluation with TTA + # Only support some R-CNN models. + logger.info("Running inference with test-time augmentation ...") + model = GeneralizedRCNNWithTTA(cfg, model) + evaluators = [ + cls.build_evaluator( + cfg, name, output_folder=os.path.join(cfg.OUTPUT_DIR, "inference_TTA") + ) + for name in cfg.DATASETS.TEST + ] + res = cls.test(cfg, model, evaluators) + res = OrderedDict({k + "_TTA": v for k, v in res.items()}) + return res + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg() + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup(cfg, args) + return cfg + + +def main(args): + cfg = setup(args) + + if args.eval_only: + model = Trainer.build_model(cfg) + DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load( + cfg.MODEL.WEIGHTS, resume=args.resume + ) + res = Trainer.test(cfg, model) + if cfg.TEST.AUG.ENABLED: + res.update(Trainer.test_with_TTA(cfg, model)) + if comm.is_main_process(): + verify_results(cfg, res) + return res + + """ + If you'd like to do anything fancier than the standard training logic, + consider writing your own training loop (see plain_train_net.py) or + subclassing the trainer. + """ + trainer = Trainer(cfg) + trainer.resume_or_load(resume=False) + if cfg.TEST.AUG.ENABLED: + trainer.register_hooks( + [hooks.EvalHook(0, lambda: trainer.test_with_TTA(cfg, trainer.model))] + ) + return trainer.train() + + +if __name__ == "__main__": + args = default_argument_parser().parse_args() + print("Command Line Args:", args) + launch( + main, + args.num_gpus, + num_machines=args.num_machines, + machine_rank=args.machine_rank, + dist_url=args.dist_url, + args=(args,), + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/inference.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/inference.sh new file mode 100644 index 0000000000000000000000000000000000000000..3b9d39ed92e9cb574ac4349f457a52a27c38aac3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/inference.sh @@ -0,0 +1,4 @@ +python finetune_net.py \ + --num-gpus 1 \ + --config-file ../configs/Misc/parsing_inference.yaml \ + --eval-only MODEL.WEIGHTS ./model_final.pth TEST.AUG.ENABLED False diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/plain_train_net.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/plain_train_net.py new file mode 100755 index 0000000000000000000000000000000000000000..52a0a281f84bb64fa49c7cb2122564146ee27752 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/plain_train_net.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +Detectron2 training script with a plain training loop. + +This script reads a given config file and runs the training or evaluation. +It is an entry point that is able to train standard models in detectron2. + +In order to let one script support training of many models, +this script contains logic that are specific to these built-in models and therefore +may not be suitable for your own project. +For example, your research project perhaps only needs a single "evaluator". + +Therefore, we recommend you to use detectron2 as a library and take +this file as an example of how to use the library. +You may want to write your own script with your data and other customizations. + +Compared to "train_net.py", this script supports fewer default features. +It also includes fewer abstraction, therefore is easier to add custom logic. +""" + +import logging +import os +from collections import OrderedDict +import torch +from torch.nn.parallel import DistributedDataParallel + +import detectron2.utils.comm as comm +from detectron2.checkpoint import DetectionCheckpointer, PeriodicCheckpointer +from detectron2.config import get_cfg +from detectron2.data import ( + MetadataCatalog, + build_detection_test_loader, + build_detection_train_loader, +) +from detectron2.engine import default_argument_parser, default_setup, launch +from detectron2.evaluation import ( + CityscapesInstanceEvaluator, + CityscapesSemSegEvaluator, + COCOEvaluator, + COCOPanopticEvaluator, + DatasetEvaluators, + LVISEvaluator, + PascalVOCDetectionEvaluator, + SemSegEvaluator, + inference_on_dataset, + print_csv_format, +) +from detectron2.modeling import build_model +from detectron2.solver import build_lr_scheduler, build_optimizer +from detectron2.utils.events import ( + CommonMetricPrinter, + EventStorage, + JSONWriter, + TensorboardXWriter, +) + +logger = logging.getLogger("detectron2") + + +def get_evaluator(cfg, dataset_name, output_folder=None): + """ + Create evaluator(s) for a given dataset. + This uses the special metadata "evaluator_type" associated with each builtin dataset. + For your own dataset, you can simply create an evaluator manually in your + script and do not have to worry about the hacky if-else logic here. + """ + if output_folder is None: + output_folder = os.path.join(cfg.OUTPUT_DIR, "inference") + evaluator_list = [] + evaluator_type = MetadataCatalog.get(dataset_name).evaluator_type + if evaluator_type in ["sem_seg", "coco_panoptic_seg"]: + evaluator_list.append( + SemSegEvaluator( + dataset_name, + distributed=True, + num_classes=cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES, + ignore_label=cfg.MODEL.SEM_SEG_HEAD.IGNORE_VALUE, + output_dir=output_folder, + ) + ) + if evaluator_type in ["coco", "coco_panoptic_seg"]: + evaluator_list.append(COCOEvaluator(dataset_name, cfg, True, output_folder)) + if evaluator_type == "coco_panoptic_seg": + evaluator_list.append(COCOPanopticEvaluator(dataset_name, output_folder)) + if evaluator_type == "cityscapes_instance": + assert ( + torch.cuda.device_count() >= comm.get_rank() + ), "CityscapesEvaluator currently do not work with multiple machines." + return CityscapesInstanceEvaluator(dataset_name) + if evaluator_type == "cityscapes_sem_seg": + assert ( + torch.cuda.device_count() >= comm.get_rank() + ), "CityscapesEvaluator currently do not work with multiple machines." + return CityscapesSemSegEvaluator(dataset_name) + if evaluator_type == "pascal_voc": + return PascalVOCDetectionEvaluator(dataset_name) + if evaluator_type == "lvis": + return LVISEvaluator(dataset_name, cfg, True, output_folder) + if len(evaluator_list) == 0: + raise NotImplementedError( + "no Evaluator for the dataset {} with the type {}".format(dataset_name, evaluator_type) + ) + if len(evaluator_list) == 1: + return evaluator_list[0] + return DatasetEvaluators(evaluator_list) + + +def do_test(cfg, model): + results = OrderedDict() + for dataset_name in cfg.DATASETS.TEST: + data_loader = build_detection_test_loader(cfg, dataset_name) + evaluator = get_evaluator( + cfg, dataset_name, os.path.join(cfg.OUTPUT_DIR, "inference", dataset_name) + ) + results_i = inference_on_dataset(model, data_loader, evaluator) + results[dataset_name] = results_i + if comm.is_main_process(): + logger.info("Evaluation results for {} in csv format:".format(dataset_name)) + print_csv_format(results_i) + if len(results) == 1: + results = list(results.values())[0] + return results + + +def do_train(cfg, model, resume=False): + model.train() + optimizer = build_optimizer(cfg, model) + scheduler = build_lr_scheduler(cfg, optimizer) + + checkpointer = DetectionCheckpointer( + model, cfg.OUTPUT_DIR, optimizer=optimizer, scheduler=scheduler + ) + start_iter = ( + checkpointer.resume_or_load(cfg.MODEL.WEIGHTS, resume=resume).get("iteration", -1) + 1 + ) + max_iter = cfg.SOLVER.MAX_ITER + + periodic_checkpointer = PeriodicCheckpointer( + checkpointer, cfg.SOLVER.CHECKPOINT_PERIOD, max_iter=max_iter + ) + + writers = ( + [ + CommonMetricPrinter(max_iter), + JSONWriter(os.path.join(cfg.OUTPUT_DIR, "metrics.json")), + TensorboardXWriter(cfg.OUTPUT_DIR), + ] + if comm.is_main_process() + else [] + ) + + # compared to "train_net.py", we do not support accurate timing and + # precise BN here, because they are not trivial to implement + data_loader = build_detection_train_loader(cfg) + logger.info("Starting training from iteration {}".format(start_iter)) + with EventStorage(start_iter) as storage: + for data, iteration in zip(data_loader, range(start_iter, max_iter)): + iteration = iteration + 1 + storage.step() + + loss_dict = model(data) + losses = sum(loss_dict.values()) + assert torch.isfinite(losses).all(), loss_dict + + loss_dict_reduced = {k: v.item() for k, v in comm.reduce_dict(loss_dict).items()} + losses_reduced = sum(loss for loss in loss_dict_reduced.values()) + if comm.is_main_process(): + storage.put_scalars(total_loss=losses_reduced, **loss_dict_reduced) + + optimizer.zero_grad() + losses.backward() + optimizer.step() + storage.put_scalar("lr", optimizer.param_groups[0]["lr"], smoothing_hint=False) + scheduler.step() + + if ( + cfg.TEST.EVAL_PERIOD > 0 + and iteration % cfg.TEST.EVAL_PERIOD == 0 + and iteration != max_iter + ): + do_test(cfg, model) + # Compared to "train_net.py", the test results are not dumped to EventStorage + comm.synchronize() + + if iteration - start_iter > 5 and (iteration % 20 == 0 or iteration == max_iter): + for writer in writers: + writer.write() + periodic_checkpointer.step(iteration) + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg() + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup( + cfg, args + ) # if you don't like any of the default setup, write your own setup code + return cfg + + +def main(args): + cfg = setup(args) + + model = build_model(cfg) + logger.info("Model:\n{}".format(model)) + if args.eval_only: + DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load( + cfg.MODEL.WEIGHTS, resume=args.resume + ) + return do_test(cfg, model) + + distributed = comm.get_world_size() > 1 + if distributed: + model = DistributedDataParallel( + model, device_ids=[comm.get_local_rank()], broadcast_buffers=False + ) + + do_train(cfg, model, resume=args.resume) + return do_test(cfg, model) + + +if __name__ == "__main__": + args = default_argument_parser().parse_args() + print("Command Line Args:", args) + launch( + main, + args.num_gpus, + num_machines=args.num_machines, + machine_rank=args.machine_rank, + dist_url=args.dist_url, + args=(args,), + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/run.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/run.sh new file mode 100644 index 0000000000000000000000000000000000000000..b89267337002df6eff52a323a07801fb1da6476c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/run.sh @@ -0,0 +1,3 @@ +python finetune_net.py \ + --config-file ../configs/Misc/parsing_finetune_cihp+vip.yaml \ + --num-gpus 8 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/train_net.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/train_net.py new file mode 100755 index 0000000000000000000000000000000000000000..b1c0ee443c81a0a0f217682cce6d9051ef07c20e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/train_net.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +Detection Training Script. + +This scripts reads a given config file and runs the training or evaluation. +It is an entry point that is made to train standard models in detectron2. + +In order to let one script support training of many models, +this script contains logic that are specific to these built-in models and therefore +may not be suitable for your own project. +For example, your research project perhaps only needs a single "evaluator". + +Therefore, we recommend you to use detectron2 as an library and take +this file as an example of how to use the library. +You may want to write your own script with your data and other customizations. +""" + +import logging +import os +from collections import OrderedDict +import torch + +import detectron2.utils.comm as comm +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import get_cfg +from detectron2.data import MetadataCatalog +from detectron2.engine import DefaultTrainer, default_argument_parser, default_setup, hooks, launch +from detectron2.evaluation import ( + CityscapesInstanceEvaluator, + CityscapesSemSegEvaluator, + COCOEvaluator, + COCOPanopticEvaluator, + DatasetEvaluators, + LVISEvaluator, + PascalVOCDetectionEvaluator, + SemSegEvaluator, + verify_results, +) +from detectron2.modeling import GeneralizedRCNNWithTTA + + +class Trainer(DefaultTrainer): + """ + We use the "DefaultTrainer" which contains pre-defined default logic for + standard training workflow. They may not work for you, especially if you + are working on a new research project. In that case you can use the cleaner + "SimpleTrainer", or write your own training loop. You can use + "tools/plain_train_net.py" as an example. + """ + + @classmethod + def build_evaluator(cls, cfg, dataset_name, output_folder=None): + """ + Create evaluator(s) for a given dataset. + This uses the special metadata "evaluator_type" associated with each builtin dataset. + For your own dataset, you can simply create an evaluator manually in your + script and do not have to worry about the hacky if-else logic here. + """ + if output_folder is None: + output_folder = os.path.join(cfg.OUTPUT_DIR, "inference") + evaluator_list = [] + evaluator_type = MetadataCatalog.get(dataset_name).evaluator_type + if evaluator_type in ["sem_seg", "coco_panoptic_seg"]: + evaluator_list.append( + SemSegEvaluator( + dataset_name, + distributed=True, + num_classes=cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES, + ignore_label=cfg.MODEL.SEM_SEG_HEAD.IGNORE_VALUE, + output_dir=output_folder, + ) + ) + if evaluator_type in ["coco", "coco_panoptic_seg"]: + evaluator_list.append(COCOEvaluator(dataset_name, cfg, True, output_folder)) + if evaluator_type == "coco_panoptic_seg": + evaluator_list.append(COCOPanopticEvaluator(dataset_name, output_folder)) + if evaluator_type == "cityscapes_instance": + assert ( + torch.cuda.device_count() >= comm.get_rank() + ), "CityscapesEvaluator currently do not work with multiple machines." + return CityscapesInstanceEvaluator(dataset_name) + if evaluator_type == "cityscapes_sem_seg": + assert ( + torch.cuda.device_count() >= comm.get_rank() + ), "CityscapesEvaluator currently do not work with multiple machines." + return CityscapesSemSegEvaluator(dataset_name) + elif evaluator_type == "pascal_voc": + return PascalVOCDetectionEvaluator(dataset_name) + elif evaluator_type == "lvis": + return LVISEvaluator(dataset_name, cfg, True, output_folder) + if len(evaluator_list) == 0: + raise NotImplementedError( + "no Evaluator for the dataset {} with the type {}".format( + dataset_name, evaluator_type + ) + ) + elif len(evaluator_list) == 1: + return evaluator_list[0] + return DatasetEvaluators(evaluator_list) + + @classmethod + def test_with_TTA(cls, cfg, model): + logger = logging.getLogger("detectron2.trainer") + # In the end of training, run an evaluation with TTA + # Only support some R-CNN models. + logger.info("Running inference with test-time augmentation ...") + model = GeneralizedRCNNWithTTA(cfg, model) + evaluators = [ + cls.build_evaluator( + cfg, name, output_folder=os.path.join(cfg.OUTPUT_DIR, "inference_TTA") + ) + for name in cfg.DATASETS.TEST + ] + res = cls.test(cfg, model, evaluators) + res = OrderedDict({k + "_TTA": v for k, v in res.items()}) + return res + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg() + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + default_setup(cfg, args) + return cfg + + +def main(args): + cfg = setup(args) + + if args.eval_only: + model = Trainer.build_model(cfg) + DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load( + cfg.MODEL.WEIGHTS, resume=args.resume + ) + res = Trainer.test(cfg, model) + if cfg.TEST.AUG.ENABLED: + res.update(Trainer.test_with_TTA(cfg, model)) + if comm.is_main_process(): + verify_results(cfg, res) + return res + + """ + If you'd like to do anything fancier than the standard training logic, + consider writing your own training loop (see plain_train_net.py) or + subclassing the trainer. + """ + trainer = Trainer(cfg) + trainer.resume_or_load(resume=args.resume) + if cfg.TEST.AUG.ENABLED: + trainer.register_hooks( + [hooks.EvalHook(0, lambda: trainer.test_with_TTA(cfg, trainer.model))] + ) + return trainer.train() + + +if __name__ == "__main__": + args = default_argument_parser().parse_args() + print("Command Line Args:", args) + launch( + main, + args.num_gpus, + num_machines=args.num_machines, + machine_rank=args.machine_rank, + dist_url=args.dist_url, + args=(args,), + ) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/visualize_data.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/visualize_data.py new file mode 100755 index 0000000000000000000000000000000000000000..b143b2d250787c2880657d42c9e9cc0c80c6a348 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/visualize_data.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import argparse +import os +from itertools import chain +import cv2 +import tqdm + +from detectron2.config import get_cfg +from detectron2.data import DatasetCatalog, MetadataCatalog, build_detection_train_loader +from detectron2.data import detection_utils as utils +from detectron2.data.build import filter_images_with_few_keypoints +from detectron2.utils.logger import setup_logger +from detectron2.utils.visualizer import Visualizer + + +def setup(args): + cfg = get_cfg() + if args.config_file: + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + cfg.freeze() + return cfg + + +def parse_args(in_args=None): + parser = argparse.ArgumentParser(description="Visualize ground-truth data") + parser.add_argument( + "--source", + choices=["annotation", "dataloader"], + required=True, + help="visualize the annotations or the data loader (with pre-processing)", + ) + parser.add_argument("--config-file", metavar="FILE", help="path to config file") + parser.add_argument("--output-dir", default="./", help="path to output directory") + parser.add_argument("--show", action="store_true", help="show output in a window") + parser.add_argument( + "opts", + help="Modify config options using the command-line", + default=None, + nargs=argparse.REMAINDER, + ) + return parser.parse_args(in_args) + + +if __name__ == "__main__": + args = parse_args() + logger = setup_logger() + logger.info("Arguments: " + str(args)) + cfg = setup(args) + + dirname = args.output_dir + os.makedirs(dirname, exist_ok=True) + metadata = MetadataCatalog.get(cfg.DATASETS.TRAIN[0]) + + def output(vis, fname): + if args.show: + print(fname) + cv2.imshow("window", vis.get_image()[:, :, ::-1]) + cv2.waitKey() + else: + filepath = os.path.join(dirname, fname) + print("Saving to {} ...".format(filepath)) + vis.save(filepath) + + scale = 2.0 if args.show else 1.0 + if args.source == "dataloader": + train_data_loader = build_detection_train_loader(cfg) + for batch in train_data_loader: + for per_image in batch: + # Pytorch tensor is in (C, H, W) format + img = per_image["image"].permute(1, 2, 0).cpu().detach().numpy() + img = utils.convert_image_to_rgb(img, cfg.INPUT.FORMAT) + + visualizer = Visualizer(img, metadata=metadata, scale=scale) + target_fields = per_image["instances"].get_fields() + labels = [metadata.thing_classes[i] for i in target_fields["gt_classes"]] + vis = visualizer.overlay_instances( + labels=labels, + boxes=target_fields.get("gt_boxes", None), + masks=target_fields.get("gt_masks", None), + keypoints=target_fields.get("gt_keypoints", None), + ) + output(vis, str(per_image["image_id"]) + ".jpg") + else: + dicts = list(chain.from_iterable([DatasetCatalog.get(k) for k in cfg.DATASETS.TRAIN])) + if cfg.MODEL.KEYPOINT_ON: + dicts = filter_images_with_few_keypoints(dicts, 1) + for dic in tqdm.tqdm(dicts): + img = utils.read_image(dic["file_name"], "RGB") + visualizer = Visualizer(img, metadata=metadata, scale=scale) + vis = visualizer.draw_dataset_dict(dic) + output(vis, os.path.basename(dic["file_name"])) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/visualize_json_results.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/visualize_json_results.py new file mode 100755 index 0000000000000000000000000000000000000000..d11ecb90382a630d90661bc65cefc4f8bf3486cf --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/detectron2/tools/visualize_json_results.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import argparse +import json +import numpy as np +import os +from collections import defaultdict +import cv2 +import tqdm +from fvcore.common.file_io import PathManager + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.structures import Boxes, BoxMode, Instances +from detectron2.utils.logger import setup_logger +from detectron2.utils.visualizer import Visualizer + + +def create_instances(predictions, image_size): + ret = Instances(image_size) + + score = np.asarray([x["score"] for x in predictions]) + chosen = (score > args.conf_threshold).nonzero()[0] + score = score[chosen] + bbox = np.asarray([predictions[i]["bbox"] for i in chosen]).reshape(-1, 4) + bbox = BoxMode.convert(bbox, BoxMode.XYWH_ABS, BoxMode.XYXY_ABS) + + labels = np.asarray([dataset_id_map(predictions[i]["category_id"]) for i in chosen]) + + ret.scores = score + ret.pred_boxes = Boxes(bbox) + ret.pred_classes = labels + + try: + ret.pred_masks = [predictions[i]["segmentation"] for i in chosen] + except KeyError: + pass + return ret + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="A script that visualizes the json predictions from COCO or LVIS dataset." + ) + parser.add_argument("--input", required=True, help="JSON file produced by the model") + parser.add_argument("--output", required=True, help="output directory") + parser.add_argument("--dataset", help="name of the dataset", default="coco_2017_val") + parser.add_argument("--conf-threshold", default=0.5, type=float, help="confidence threshold") + args = parser.parse_args() + + logger = setup_logger() + + with PathManager.open(args.input, "r") as f: + predictions = json.load(f) + + pred_by_image = defaultdict(list) + for p in predictions: + pred_by_image[p["image_id"]].append(p) + + dicts = list(DatasetCatalog.get(args.dataset)) + metadata = MetadataCatalog.get(args.dataset) + if hasattr(metadata, "thing_dataset_id_to_contiguous_id"): + + def dataset_id_map(ds_id): + return metadata.thing_dataset_id_to_contiguous_id[ds_id] + + elif "lvis" in args.dataset: + # LVIS results are in the same format as COCO results, but have a different + # mapping from dataset category id to contiguous category id in [0, #categories - 1] + def dataset_id_map(ds_id): + return ds_id - 1 + + else: + raise ValueError("Unsupported dataset: {}".format(args.dataset)) + + os.makedirs(args.output, exist_ok=True) + + for dic in tqdm.tqdm(dicts): + img = cv2.imread(dic["file_name"], cv2.IMREAD_COLOR)[:, :, ::-1] + basename = os.path.basename(dic["file_name"]) + + predictions = create_instances(pred_by_image[dic["image_id"]], img.shape[:2]) + vis = Visualizer(img, metadata) + vis_pred = vis.draw_instance_predictions(predictions).get_image() + + vis = Visualizer(img, metadata) + vis_gt = vis.draw_dataset_dict(dic).get_image() + + concat = np.concatenate((vis_pred, vis_gt), axis=1) + cv2.imwrite(os.path.join(args.output, basename), concat[:, :, ::-1]) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/global_local_parsing/global_local_datasets.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/global_local_parsing/global_local_datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..8b00594ef3302af2a30440676f96a4904ffe9077 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/global_local_parsing/global_local_datasets.py @@ -0,0 +1,200 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : datasets.py +@Time : 8/4/19 3:35 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import os +import numpy as np +import random +import torch +import cv2 +from torch.utils import data +from utils.transforms import get_affine_transform + + +class CropDataSet(data.Dataset): + def __init__(self, root, split_name, crop_size=[473, 473], scale_factor=0.25, + rotation_factor=30, ignore_label=255, transform=None): + self.root = root + self.aspect_ratio = crop_size[1] * 1.0 / crop_size[0] + self.crop_size = np.asarray(crop_size) + self.ignore_label = ignore_label + self.scale_factor = scale_factor + self.rotation_factor = rotation_factor + self.flip_prob = 0.5 + self.transform = transform + self.split_name = split_name + + list_path = os.path.join(self.root, self.split_name + '.txt') + train_list = [i_id.strip() for i_id in open(list_path)] + + self.train_list = train_list + self.number_samples = len(self.train_list) + + def __len__(self): + return self.number_samples + + def _box2cs(self, box): + x, y, w, h = box[:4] + return self._xywh2cs(x, y, w, h) + + def _xywh2cs(self, x, y, w, h): + center = np.zeros((2), dtype=np.float32) + center[0] = x + w * 0.5 + center[1] = y + h * 0.5 + if w > self.aspect_ratio * h: + h = w * 1.0 / self.aspect_ratio + elif w < self.aspect_ratio * h: + w = h * self.aspect_ratio + scale = np.array([w * 1.0, h * 1.0], dtype=np.float32) + return center, scale + + def __getitem__(self, index): + train_item = self.train_list[index] + + im_path = os.path.join(self.root, self.split_name + '_images', train_item + '.jpg') + parsing_anno_path = os.path.join(self.root, self.split_name + '_segmentations', train_item + '.png') + + im = cv2.imread(im_path, cv2.IMREAD_COLOR) + h, w, _ = im.shape + parsing_anno = np.zeros((h, w), dtype=np.long) + + # Get person center and scale + person_center, s = self._box2cs([0, 0, w - 1, h - 1]) + r = 0 + + if self.split_name != 'test': + # Get pose annotation + parsing_anno = cv2.imread(parsing_anno_path, cv2.IMREAD_GRAYSCALE) + sf = self.scale_factor + rf = self.rotation_factor + s = s * np.clip(np.random.randn() * sf + 1, 1 - sf, 1 + sf) + r = np.clip(np.random.randn() * rf, -rf * 2, rf * 2) if random.random() <= 0.6 else 0 + + if random.random() <= self.flip_prob: + im = im[:, ::-1, :] + parsing_anno = parsing_anno[:, ::-1] + person_center[0] = im.shape[1] - person_center[0] - 1 + right_idx = [15, 17, 19] + left_idx = [14, 16, 18] + for i in range(0, 3): + right_pos = np.where(parsing_anno == right_idx[i]) + left_pos = np.where(parsing_anno == left_idx[i]) + parsing_anno[right_pos[0], right_pos[1]] = left_idx[i] + parsing_anno[left_pos[0], left_pos[1]] = right_idx[i] + + trans = get_affine_transform(person_center, s, r, self.crop_size) + input = cv2.warpAffine( + im, + trans, + (int(self.crop_size[1]), int(self.crop_size[0])), + flags=cv2.INTER_LINEAR, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(0, 0, 0)) + + if self.transform: + input = self.transform(input) + + meta = { + 'name': train_item, + 'center': person_center, + 'height': h, + 'width': w, + 'scale': s, + 'rotation': r + } + + if self.split_name == 'val' or self.split_name == 'test': + return input, meta + else: + label_parsing = cv2.warpAffine( + parsing_anno, + trans, + (int(self.crop_size[1]), int(self.crop_size[0])), + flags=cv2.INTER_NEAREST, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(255)) + + label_parsing = torch.from_numpy(label_parsing) + + return input, label_parsing, meta + + +class CropDataValSet(data.Dataset): + def __init__(self, root, split_name='crop_pic', crop_size=[473, 473], transform=None, flip=False): + self.root = root + self.crop_size = crop_size + self.transform = transform + self.flip = flip + self.split_name = split_name + self.root = root + self.aspect_ratio = crop_size[1] * 1.0 / crop_size[0] + self.crop_size = np.asarray(crop_size) + + list_path = os.path.join(self.root, self.split_name + '.txt') + val_list = [i_id.strip() for i_id in open(list_path)] + + self.val_list = val_list + self.number_samples = len(self.val_list) + + def __len__(self): + return len(self.val_list) + + def _box2cs(self, box): + x, y, w, h = box[:4] + return self._xywh2cs(x, y, w, h) + + def _xywh2cs(self, x, y, w, h): + center = np.zeros((2), dtype=np.float32) + center[0] = x + w * 0.5 + center[1] = y + h * 0.5 + if w > self.aspect_ratio * h: + h = w * 1.0 / self.aspect_ratio + elif w < self.aspect_ratio * h: + w = h * self.aspect_ratio + scale = np.array([w * 1.0, h * 1.0], dtype=np.float32) + + return center, scale + + def __getitem__(self, index): + val_item = self.val_list[index] + # Load training image + im_path = os.path.join(self.root, self.split_name, val_item + '.jpg') + im = cv2.imread(im_path, cv2.IMREAD_COLOR) + h, w, _ = im.shape + # Get person center and scale + person_center, s = self._box2cs([0, 0, w - 1, h - 1]) + r = 0 + trans = get_affine_transform(person_center, s, r, self.crop_size) + input = cv2.warpAffine( + im, + trans, + (int(self.crop_size[1]), int(self.crop_size[0])), + flags=cv2.INTER_LINEAR, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(0, 0, 0)) + input = self.transform(input) + flip_input = input.flip(dims=[-1]) + if self.flip: + batch_input_im = torch.stack([input, flip_input]) + else: + batch_input_im = input + + meta = { + 'name': val_item, + 'center': person_center, + 'height': h, + 'width': w, + 'scale': s, + 'rotation': r + } + + return batch_input_im, meta diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/global_local_parsing/global_local_evaluate.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/global_local_parsing/global_local_evaluate.py new file mode 100644 index 0000000000000000000000000000000000000000..288e3c8214f945d5a4f5fc6824b74b3d42e037b2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/global_local_parsing/global_local_evaluate.py @@ -0,0 +1,210 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : evaluate.py +@Time : 8/4/19 3:36 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import os +import argparse +import numpy as np +import torch + +from torch.utils import data +from tqdm import tqdm +from PIL import Image as PILImage +import torchvision.transforms as transforms +import torch.backends.cudnn as cudnn + +import networks +from utils.miou import compute_mean_ioU +from utils.transforms import BGR2RGB_transform +from utils.transforms import transform_parsing, transform_logits +from mhp_extension.global_local_parsing.global_local_datasets import CropDataValSet + + +def get_arguments(): + """Parse all the arguments provided from the CLI. + + Returns: + A list of parsed arguments. + """ + parser = argparse.ArgumentParser(description="Self Correction for Human Parsing") + + # Network Structure + parser.add_argument("--arch", type=str, default='resnet101') + # Data Preference + parser.add_argument("--data-dir", type=str, default='./data/LIP') + parser.add_argument("--batch-size", type=int, default=1) + parser.add_argument("--split-name", type=str, default='crop_pic') + parser.add_argument("--input-size", type=str, default='473,473') + parser.add_argument("--num-classes", type=int, default=20) + parser.add_argument("--ignore-label", type=int, default=255) + parser.add_argument("--random-mirror", action="store_true") + parser.add_argument("--random-scale", action="store_true") + # Evaluation Preference + parser.add_argument("--log-dir", type=str, default='./log') + parser.add_argument("--model-restore", type=str, default='./log/checkpoint.pth.tar') + parser.add_argument("--gpu", type=str, default='0', help="choose gpu device.") + parser.add_argument("--save-results", action="store_true", help="whether to save the results.") + parser.add_argument("--flip", action="store_true", help="random flip during the test.") + parser.add_argument("--multi-scales", type=str, default='1', help="multiple scales during the test") + return parser.parse_args() + + +def get_palette(num_cls): + """ Returns the color map for visualizing the segmentation mask. + Args: + num_cls: Number of classes + Returns: + The color map + """ + n = num_cls + palette = [0] * (n * 3) + for j in range(0, n): + lab = j + palette[j * 3 + 0] = 0 + palette[j * 3 + 1] = 0 + palette[j * 3 + 2] = 0 + i = 0 + while lab: + palette[j * 3 + 0] |= (((lab >> 0) & 1) << (7 - i)) + palette[j * 3 + 1] |= (((lab >> 1) & 1) << (7 - i)) + palette[j * 3 + 2] |= (((lab >> 2) & 1) << (7 - i)) + i += 1 + lab >>= 3 + return palette + + +def multi_scale_testing(model, batch_input_im, crop_size=[473, 473], flip=True, multi_scales=[1]): + flipped_idx = (15, 14, 17, 16, 19, 18) + if len(batch_input_im.shape) > 4: + batch_input_im = batch_input_im.squeeze() + if len(batch_input_im.shape) == 3: + batch_input_im = batch_input_im.unsqueeze(0) + + interp = torch.nn.Upsample(size=crop_size, mode='bilinear', align_corners=True) + ms_outputs = [] + for s in multi_scales: + interp_im = torch.nn.Upsample(scale_factor=s, mode='bilinear', align_corners=True) + scaled_im = interp_im(batch_input_im) + parsing_output = model(scaled_im) + parsing_output = parsing_output[0][-1] + output = parsing_output[0] + if flip: + flipped_output = parsing_output[1] + flipped_output[14:20, :, :] = flipped_output[flipped_idx, :, :] + output += flipped_output.flip(dims=[-1]) + output *= 0.5 + output = interp(output.unsqueeze(0)) + ms_outputs.append(output[0]) + ms_fused_parsing_output = torch.stack(ms_outputs) + ms_fused_parsing_output = ms_fused_parsing_output.mean(0) + ms_fused_parsing_output = ms_fused_parsing_output.permute(1, 2, 0) # HWC + parsing = torch.argmax(ms_fused_parsing_output, dim=2) + parsing = parsing.data.cpu().numpy() + ms_fused_parsing_output = ms_fused_parsing_output.data.cpu().numpy() + return parsing, ms_fused_parsing_output + + +def main(): + """Create the model and start the evaluation process.""" + args = get_arguments() + multi_scales = [float(i) for i in args.multi_scales.split(',')] + gpus = [int(i) for i in args.gpu.split(',')] + assert len(gpus) == 1 + if not args.gpu == 'None': + os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu + + cudnn.benchmark = True + cudnn.enabled = True + + h, w = map(int, args.input_size.split(',')) + input_size = [h, w] + + model = networks.init_model(args.arch, num_classes=args.num_classes, pretrained=None) + + IMAGE_MEAN = model.mean + IMAGE_STD = model.std + INPUT_SPACE = model.input_space + print('image mean: {}'.format(IMAGE_MEAN)) + print('image std: {}'.format(IMAGE_STD)) + print('input space:{}'.format(INPUT_SPACE)) + if INPUT_SPACE == 'BGR': + print('BGR Transformation') + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize(mean=IMAGE_MEAN, + std=IMAGE_STD), + + ]) + if INPUT_SPACE == 'RGB': + print('RGB Transformation') + transform = transforms.Compose([ + transforms.ToTensor(), + BGR2RGB_transform(), + transforms.Normalize(mean=IMAGE_MEAN, + std=IMAGE_STD), + ]) + + # Data loader + lip_test_dataset = CropDataValSet(args.data_dir, args.split_name, crop_size=input_size, transform=transform, + flip=args.flip) + num_samples = len(lip_test_dataset) + print('Totoal testing sample numbers: {}'.format(num_samples)) + testloader = data.DataLoader(lip_test_dataset, batch_size=args.batch_size, shuffle=False, pin_memory=True) + + # Load model weight + state_dict = torch.load(args.model_restore) + from collections import OrderedDict + new_state_dict = OrderedDict() + for k, v in state_dict.items(): + name = k[7:] # remove `module.` + new_state_dict[name] = v + model.load_state_dict(new_state_dict) + model.cuda() + model.eval() + + sp_results_dir = os.path.join(args.log_dir, args.split_name + '_parsing') + if not os.path.exists(sp_results_dir): + os.makedirs(sp_results_dir) + + palette = get_palette(20) + parsing_preds = [] + scales = np.zeros((num_samples, 2), dtype=np.float32) + centers = np.zeros((num_samples, 2), dtype=np.int32) + with torch.no_grad(): + for idx, batch in enumerate(tqdm(testloader)): + image, meta = batch + if (len(image.shape) > 4): + image = image.squeeze() + im_name = meta['name'][0] + c = meta['center'].numpy()[0] + s = meta['scale'].numpy()[0] + w = meta['width'].numpy()[0] + h = meta['height'].numpy()[0] + scales[idx, :] = s + centers[idx, :] = c + parsing, logits = multi_scale_testing(model, image.cuda(), crop_size=input_size, flip=args.flip, + multi_scales=multi_scales) + if args.save_results: + parsing_result = transform_parsing(parsing, c, s, w, h, input_size) + parsing_result_path = os.path.join(sp_results_dir, im_name + '.png') + output_im = PILImage.fromarray(np.asarray(parsing_result, dtype=np.uint8)) + output_im.putpalette(palette) + output_im.save(parsing_result_path) + # save logits + logits_result = transform_logits(logits, c, s, w, h, input_size) + logits_result_path = os.path.join(sp_results_dir, im_name + '.npy') + np.save(logits_result_path, logits_result) + return + + +if __name__ == '__main__': + main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/global_local_parsing/global_local_train.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/global_local_parsing/global_local_train.py new file mode 100644 index 0000000000000000000000000000000000000000..810b1dbbbc0bbc489830903770cc4d627e16c218 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/global_local_parsing/global_local_train.py @@ -0,0 +1,232 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : train.py +@Time : 8/4/19 3:36 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import os +import json +import timeit +import argparse + +import torch +import torch.optim as optim +import torchvision.transforms as transforms +import torch.backends.cudnn as cudnn +from torch.utils import data + +import networks +import utils.schp as schp +from datasets.datasets import LIPDataSet +from datasets.target_generation import generate_edge_tensor +from utils.transforms import BGR2RGB_transform +from utils.criterion import CriterionAll +from utils.encoding import DataParallelModel, DataParallelCriterion +from utils.warmup_scheduler import SGDRScheduler + + +def get_arguments(): + """Parse all the arguments provided from the CLI. + Returns: + A list of parsed arguments. + """ + parser = argparse.ArgumentParser(description="Self Correction for Human Parsing") + + # Network Structure + parser.add_argument("--arch", type=str, default='resnet101') + # Data Preference + parser.add_argument("--data-dir", type=str, default='./data/LIP') + parser.add_argument("--batch-size", type=int, default=16) + parser.add_argument("--input-size", type=str, default='473,473') + parser.add_argument("--split-name", type=str, default='crop_pic') + parser.add_argument("--num-classes", type=int, default=20) + parser.add_argument("--ignore-label", type=int, default=255) + parser.add_argument("--random-mirror", action="store_true") + parser.add_argument("--random-scale", action="store_true") + # Training Strategy + parser.add_argument("--learning-rate", type=float, default=7e-3) + parser.add_argument("--momentum", type=float, default=0.9) + parser.add_argument("--weight-decay", type=float, default=5e-4) + parser.add_argument("--gpu", type=str, default='0,1,2') + parser.add_argument("--start-epoch", type=int, default=0) + parser.add_argument("--epochs", type=int, default=150) + parser.add_argument("--eval-epochs", type=int, default=10) + parser.add_argument("--imagenet-pretrain", type=str, default='./pretrain_model/resnet101-imagenet.pth') + parser.add_argument("--log-dir", type=str, default='./log') + parser.add_argument("--model-restore", type=str, default='./log/checkpoint.pth.tar') + parser.add_argument("--schp-start", type=int, default=100, help='schp start epoch') + parser.add_argument("--cycle-epochs", type=int, default=10, help='schp cyclical epoch') + parser.add_argument("--schp-restore", type=str, default='./log/schp_checkpoint.pth.tar') + parser.add_argument("--lambda-s", type=float, default=1, help='segmentation loss weight') + parser.add_argument("--lambda-e", type=float, default=1, help='edge loss weight') + parser.add_argument("--lambda-c", type=float, default=0.1, help='segmentation-edge consistency loss weight') + return parser.parse_args() + + +def main(): + args = get_arguments() + print(args) + + start_epoch = 0 + cycle_n = 0 + + if not os.path.exists(args.log_dir): + os.makedirs(args.log_dir) + with open(os.path.join(args.log_dir, 'args.json'), 'w') as opt_file: + json.dump(vars(args), opt_file) + + gpus = [int(i) for i in args.gpu.split(',')] + if not args.gpu == 'None': + os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu + + input_size = list(map(int, args.input_size.split(','))) + + cudnn.enabled = True + cudnn.benchmark = True + + # Model Initialization + AugmentCE2P = networks.init_model(args.arch, num_classes=args.num_classes, pretrained=args.imagenet_pretrain) + model = DataParallelModel(AugmentCE2P) + model.cuda() + + IMAGE_MEAN = AugmentCE2P.mean + IMAGE_STD = AugmentCE2P.std + INPUT_SPACE = AugmentCE2P.input_space + print('image mean: {}'.format(IMAGE_MEAN)) + print('image std: {}'.format(IMAGE_STD)) + print('input space:{}'.format(INPUT_SPACE)) + + restore_from = args.model_restore + if os.path.exists(restore_from): + print('Resume training from {}'.format(restore_from)) + checkpoint = torch.load(restore_from) + model.load_state_dict(checkpoint['state_dict']) + start_epoch = checkpoint['epoch'] + + SCHP_AugmentCE2P = networks.init_model(args.arch, num_classes=args.num_classes, pretrained=args.imagenet_pretrain) + schp_model = DataParallelModel(SCHP_AugmentCE2P) + schp_model.cuda() + + if os.path.exists(args.schp_restore): + print('Resuming schp checkpoint from {}'.format(args.schp_restore)) + schp_checkpoint = torch.load(args.schp_restore) + schp_model_state_dict = schp_checkpoint['state_dict'] + cycle_n = schp_checkpoint['cycle_n'] + schp_model.load_state_dict(schp_model_state_dict) + + # Loss Function + criterion = CriterionAll(lambda_1=args.lambda_s, lambda_2=args.lambda_e, lambda_3=args.lambda_c, + num_classes=args.num_classes) + criterion = DataParallelCriterion(criterion) + criterion.cuda() + + # Data Loader + if INPUT_SPACE == 'BGR': + print('BGR Transformation') + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize(mean=IMAGE_MEAN, + std=IMAGE_STD), + ]) + + elif INPUT_SPACE == 'RGB': + print('RGB Transformation') + transform = transforms.Compose([ + transforms.ToTensor(), + BGR2RGB_transform(), + transforms.Normalize(mean=IMAGE_MEAN, + std=IMAGE_STD), + ]) + + train_dataset = LIPDataSet(args.data_dir, args.split_name, crop_size=input_size, transform=transform) + train_loader = data.DataLoader(train_dataset, batch_size=args.batch_size * len(gpus), + num_workers=16, shuffle=True, pin_memory=True, drop_last=True) + print('Total training samples: {}'.format(len(train_dataset))) + + # Optimizer Initialization + optimizer = optim.SGD(model.parameters(), lr=args.learning_rate, momentum=args.momentum, + weight_decay=args.weight_decay) + + lr_scheduler = SGDRScheduler(optimizer, total_epoch=args.epochs, + eta_min=args.learning_rate / 100, warmup_epoch=10, + start_cyclical=args.schp_start, cyclical_base_lr=args.learning_rate / 2, + cyclical_epoch=args.cycle_epochs) + + total_iters = args.epochs * len(train_loader) + start = timeit.default_timer() + for epoch in range(start_epoch, args.epochs): + lr_scheduler.step(epoch=epoch) + lr = lr_scheduler.get_lr()[0] + + model.train() + for i_iter, batch in enumerate(train_loader): + i_iter += len(train_loader) * epoch + + images, labels, _ = batch + labels = labels.cuda(non_blocking=True) + + edges = generate_edge_tensor(labels) + labels = labels.type(torch.cuda.LongTensor) + edges = edges.type(torch.cuda.LongTensor) + + preds = model(images) + + # Online Self Correction Cycle with Label Refinement + if cycle_n >= 1: + with torch.no_grad(): + soft_preds = schp_model(images) + soft_parsing = [] + soft_edge = [] + for soft_pred in soft_preds: + soft_parsing.append(soft_pred[0][-1]) + soft_edge.append(soft_pred[1][-1]) + soft_preds = torch.cat(soft_parsing, dim=0) + soft_edges = torch.cat(soft_edge, dim=0) + else: + soft_preds = None + soft_edges = None + + loss = criterion(preds, [labels, edges, soft_preds, soft_edges], cycle_n) + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + if i_iter % 100 == 0: + print('iter = {} of {} completed, lr = {}, loss = {}'.format(i_iter, total_iters, lr, + loss.data.cpu().numpy())) + if (epoch + 1) % (args.eval_epochs) == 0: + schp.save_checkpoint({ + 'epoch': epoch + 1, + 'state_dict': model.state_dict(), + }, False, args.log_dir, filename='checkpoint_{}.pth.tar'.format(epoch + 1)) + + # Self Correction Cycle with Model Aggregation + if (epoch + 1) >= args.schp_start and (epoch + 1 - args.schp_start) % args.cycle_epochs == 0: + print('Self-correction cycle number {}'.format(cycle_n)) + schp.moving_average(schp_model, model, 1.0 / (cycle_n + 1)) + cycle_n += 1 + schp.bn_re_estimate(train_loader, schp_model) + schp.save_schp_checkpoint({ + 'state_dict': schp_model.state_dict(), + 'cycle_n': cycle_n, + }, False, args.log_dir, filename='schp_{}_checkpoint.pth.tar'.format(cycle_n)) + + torch.cuda.empty_cache() + end = timeit.default_timer() + print('epoch = {} of {} completed using {} s'.format(epoch, args.epochs, + (end - start) / (epoch - start_epoch + 1))) + + end = timeit.default_timer() + print('Training Finished in {} seconds'.format(end - start)) + + +if __name__ == '__main__': + main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/global_local_parsing/make_id_list.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/global_local_parsing/make_id_list.py new file mode 100644 index 0000000000000000000000000000000000000000..311edf45e2d5a00ad85f3df96530e2f51bfd4686 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/global_local_parsing/make_id_list.py @@ -0,0 +1,13 @@ +import os + +DATASET = 'VIP' # DATASET: MHPv2 or CIHP or VIP +TYPE = 'crop_pic' # crop_pic or DemoDataset +IMG_DIR = '../demo/cropped_img/crop_pic' +SAVE_DIR = '../demo/cropped_img' + +if not os.path.exists(SAVE_DIR): + os.makedirs(SAVE_DIR) + +with open(os.path.join(SAVE_DIR, TYPE + '.txt'), "w") as f: + for img_name in os.listdir(IMG_DIR): + f.write(img_name[:-4] + '\n') diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/logits_fusion.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/logits_fusion.py new file mode 100644 index 0000000000000000000000000000000000000000..07a8446282d24b7811b56de5b9591da29ffcdd60 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/logits_fusion.py @@ -0,0 +1,307 @@ +import argparse +import cv2 +import os +import json +import numpy as np +from PIL import Image as PILImage +import joblib + + +def mask_nms(masks, bbox_scores, instances_confidence_threshold=0.5, overlap_threshold=0.7): + """ + NMS-like procedure used in Panoptic Segmentation + Remove the overlap areas of different instances in Instance Segmentation + """ + panoptic_seg = np.zeros(masks.shape[:2], dtype=np.uint8) + sorted_inds = list(range(len(bbox_scores))) + current_segment_id = 0 + segments_score = [] + + for inst_id in sorted_inds: + score = bbox_scores[inst_id] + if score < instances_confidence_threshold: + break + mask = masks[:, :, inst_id] + mask_area = mask.sum() + + if mask_area == 0: + continue + + intersect = (mask > 0) & (panoptic_seg > 0) + intersect_area = intersect.sum() + + if intersect_area * 1.0 / mask_area > overlap_threshold: + continue + + if intersect_area > 0: + mask = mask & (panoptic_seg == 0) + + current_segment_id += 1 + # panoptic_seg[np.where(mask==1)] = current_segment_id + # panoptic_seg = panoptic_seg + current_segment_id*mask + panoptic_seg = np.where(mask == 0, panoptic_seg, current_segment_id) + segments_score.append(score) + # print(np.unique(panoptic_seg)) + return panoptic_seg, segments_score + + +def extend(si, sj, instance_label, global_label, panoptic_seg_mask, class_map): + """ + """ + directions = [[-1, 0], [0, 1], [1, 0], [0, -1], + [1, 1], [1, -1], [-1, 1], [-1, -1]] + + inst_class = instance_label[si, sj] + human_class = panoptic_seg_mask[si, sj] + global_class = class_map[inst_class] + queue = [[si, sj]] + + while len(queue) != 0: + cur = queue[0] + queue.pop(0) + + for direction in directions: + ni = cur[0] + direction[0] + nj = cur[1] + direction[1] + + if ni >= 0 and nj >= 0 and \ + ni < instance_label.shape[0] and \ + nj < instance_label.shape[1] and \ + instance_label[ni, nj] == 0 and \ + global_label[ni, nj] == global_class: + instance_label[ni, nj] = inst_class + # Using refined instance label to refine human label + panoptic_seg_mask[ni, nj] = human_class + queue.append([ni, nj]) + + +def refine(instance_label, panoptic_seg_mask, global_label, class_map): + """ + Inputs: + [ instance_label ] + np.array() with shape [h, w] + [ global_label ] with shape [h, w] + np.array() + """ + for i in range(instance_label.shape[0]): + for j in range(instance_label.shape[1]): + if instance_label[i, j] != 0: + extend(i, j, instance_label, global_label, panoptic_seg_mask, class_map) + + +def get_palette(num_cls): + """ Returns the color map for visualizing the segmentation mask. + Inputs: + =num_cls= + Number of classes. + Returns: + The color map. + """ + n = num_cls + palette = [0] * (n * 3) + for j in range(0, n): + lab = j + palette[j * 3 + 0] = 0 + palette[j * 3 + 1] = 0 + palette[j * 3 + 2] = 0 + i = 0 + while lab: + palette[j * 3 + 0] |= (((lab >> 0) & 1) << (7 - i)) + palette[j * 3 + 1] |= (((lab >> 1) & 1) << (7 - i)) + palette[j * 3 + 2] |= (((lab >> 2) & 1) << (7 - i)) + i += 1 + lab >>= 3 + return palette + + +def patch2img_output(patch_dir, img_name, img_height, img_width, bbox, bbox_type, num_class): + """transform bbox patch outputs to image output""" + assert bbox_type == 'gt' or 'msrcnn' + output = np.zeros((img_height, img_width, num_class), dtype='float') + output[:, :, 0] = np.inf + count_predictions = np.zeros((img_height, img_width, num_class), dtype='int32') + for i in range(len(bbox)): # person index starts from 1 + file_path = os.path.join(patch_dir, os.path.splitext(img_name)[0] + '_' + str(i + 1) + '_' + bbox_type + '.npy') + bbox_output = np.load(file_path) + output[bbox[i][1]:bbox[i][3] + 1, bbox[i][0]:bbox[i][2] + 1, 1:] += bbox_output[:, :, 1:] + count_predictions[bbox[i][1]:bbox[i][3] + 1, bbox[i][0]:bbox[i][2] + 1, 1:] += 1 + output[bbox[i][1]:bbox[i][3] + 1, bbox[i][0]:bbox[i][2] + 1, 0] \ + = np.minimum(output[bbox[i][1]:bbox[i][3] + 1, bbox[i][0]:bbox[i][2] + 1, 0], bbox_output[:, :, 0]) + + # Caution zero dividing. + count_predictions[count_predictions == 0] = 1 + return output / count_predictions + + +def get_instance(cat_gt, panoptic_seg_mask): + """ + """ + instance_gt = np.zeros_like(cat_gt, dtype=np.uint8) + num_humans = len(np.unique(panoptic_seg_mask)) - 1 + class_map = {} + + total_part_num = 0 + for id in range(1, num_humans + 1): + human_part_label = np.where(panoptic_seg_mask == id, cat_gt, 0).astype(np.uint8) + # human_part_label = (np.where(panoptic_seg_mask==id) * cat_gt).astype(np.uint8) + part_classes = np.unique(human_part_label) + + exceed = False + for part_id in part_classes: + if part_id == 0: # background + continue + total_part_num += 1 + + if total_part_num > 255: + print("total_part_num exceed, return current instance map: {}".format(total_part_num)) + exceed = True + break + class_map[total_part_num] = part_id + instance_gt[np.where(human_part_label == part_id)] = total_part_num + if exceed: + break + + # Make instance id continous. + ori_cur_labels = np.unique(instance_gt) + total_num_label = len(ori_cur_labels) + if instance_gt.max() + 1 != total_num_label: + for label in range(1, total_num_label): + instance_gt[instance_gt == ori_cur_labels[label]] = label + + final_class_map = {} + for label in range(1, total_num_label): + if label >= 1: + final_class_map[label] = class_map[ori_cur_labels[label]] + + return instance_gt, final_class_map + + +def compute_confidence(im_name, feature_map, class_map, + instance_label, output_dir, + panoptic_seg_mask, seg_score_list): + """ + """ + conf_file = open(os.path.join(output_dir, os.path.splitext(im_name)[0] + '.txt'), 'w') + + weighted_map = np.zeros_like(feature_map[:, :, 0]) + for index, score in enumerate(seg_score_list): + weighted_map += (panoptic_seg_mask == index + 1) * score + + for label in class_map.keys(): + cls = class_map[label] + confidence = feature_map[:, :, cls].reshape(-1)[np.where(instance_label.reshape(-1) == label)] + confidence = (weighted_map * feature_map[:, :, cls].copy()).reshape(-1)[ + np.where(instance_label.reshape(-1) == label)] + + confidence = confidence.sum() / len(confidence) + conf_file.write('{} {}\n'.format(cls, confidence)) + + conf_file.close() + + +def result_saving(fused_output, img_name, img_height, img_width, output_dir, mask_output_path, bbox_score, msrcnn_bbox): + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + global_root = os.path.join(output_dir, 'global_parsing') + instance_root = os.path.join(output_dir, 'instance_parsing') + tag_dir = os.path.join(output_dir, 'global_tag') + + if not os.path.exists(global_root): + os.makedirs(global_root) + if not os.path.exists(instance_root): + os.makedirs(instance_root) + if not os.path.exists(tag_dir): + os.makedirs(tag_dir) + + # For visualizing indexed png image. + palette = get_palette(256) + + fused_output = cv2.resize(fused_output, dsize=(img_width, img_height), interpolation=cv2.INTER_LINEAR) + seg_pred = np.asarray(np.argmax(fused_output, axis=2), dtype=np.uint8) + masks = np.load(mask_output_path) + masks[np.where(seg_pred == 0)] = 0 + + panoptic_seg_mask = masks + seg_score_list = bbox_score + + instance_pred, class_map = get_instance(seg_pred, panoptic_seg_mask) + refine(instance_pred, panoptic_seg_mask, seg_pred, class_map) + + compute_confidence(img_name, fused_output, class_map, instance_pred, instance_root, + panoptic_seg_mask, seg_score_list) + + ins_seg_results = open(os.path.join(tag_dir, os.path.splitext(img_name)[0] + '.txt'), "a") + keep_human_id_list = list(np.unique(panoptic_seg_mask)) + if 0 in keep_human_id_list: + keep_human_id_list.remove(0) + for i in keep_human_id_list: + ins_seg_results.write('{:.6f} {} {} {} {}\n'.format(seg_score_list[i - 1], + int(msrcnn_bbox[i - 1][1]), int(msrcnn_bbox[i - 1][0]), + int(msrcnn_bbox[i - 1][3]), int(msrcnn_bbox[i - 1][2]))) + ins_seg_results.close() + + output_im_global = PILImage.fromarray(seg_pred) + output_im_instance = PILImage.fromarray(instance_pred) + output_im_tag = PILImage.fromarray(panoptic_seg_mask) + output_im_global.putpalette(palette) + output_im_instance.putpalette(palette) + output_im_tag.putpalette(palette) + + output_im_global.save(os.path.join(global_root, os.path.splitext(img_name)[0] + '.png')) + output_im_instance.save(os.path.join(instance_root, os.path.splitext(img_name)[0] + '.png')) + output_im_tag.save(os.path.join(tag_dir, os.path.splitext(img_name)[0] + '.png')) + + +def multi_process(a, args): + img_name = a['im_name'] + img_height = a['img_height'] + img_width = a['img_width'] + msrcnn_bbox = a['person_bbox'] + bbox_score = a['person_bbox_score'] + + ######### loading outputs from gloabl and local models ######### + global_output = np.load(os.path.join(args.global_output_dir, os.path.splitext(img_name)[0] + '.npy')) + + msrcnn_output = patch2img_output(args.msrcnn_output_dir, img_name, img_height, img_width, msrcnn_bbox, + bbox_type='msrcnn', num_class=20) + + gt_output = patch2img_output(args.gt_output_dir, img_name, img_height, img_width, msrcnn_bbox, bbox_type='msrcnn', + num_class=20) + + #### global and local branch logits fusion ##### +# fused_output = global_output + msrcnn_output + gt_output + fused_output = global_output + gt_output + + + mask_output_path = os.path.join(args.mask_output_dir, os.path.splitext(img_name)[0] + '_mask.npy') + result_saving(fused_output, img_name, img_height, img_width, args.save_dir, mask_output_path, bbox_score, msrcnn_bbox) + return + + +def main(args): + json_file = open(args.test_json_path) + anno = json.load(json_file)['root'] + + results = joblib.Parallel(n_jobs=24, verbose=10, pre_dispatch="all")( + [joblib.delayed(multi_process)(a, args) for i, a in enumerate(anno)] + ) + + +def get_arguments(): + parser = argparse.ArgumentParser(description="obtain final prediction by logits fusion") + parser.add_argument("--test_json_path", type=str, default='./data/CIHP/cascade_152_finetune/test.json') + parser.add_argument("--global_output_dir", type=str, + default='./data/CIHP/global/global_result-cihp-resnet101/global_output') +# parser.add_argument("--msrcnn_output_dir", type=str, +# default='./data/CIHP/cascade_152__finetune/msrcnn_result-cihp-resnet101/msrcnn_output') + parser.add_argument("--gt_output_dir", type=str, + default='./data/CIHP/cascade_152__finetune/gt_result-cihp-resnet101/gt_output') + parser.add_argument("--mask_output_dir", type=str, default='./data/CIHP/cascade_152_finetune/mask') + parser.add_argument("--save_dir", type=str, default='./data/CIHP/fusion_results/cihp-msrcnn_finetune') + return parser.parse_args() + + +if __name__ == '__main__': + args = get_arguments() + main(args) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/make_crop_and_mask_w_mask_nms.py b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/make_crop_and_mask_w_mask_nms.py new file mode 100644 index 0000000000000000000000000000000000000000..1efc5ae86f81db7dcdae1d22db771d2a8e8d3ccf --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/make_crop_and_mask_w_mask_nms.py @@ -0,0 +1,134 @@ +import numpy as np +import cv2, torch +import os +import json +import argparse +import pycocotools.mask as mask_util +from tqdm import tqdm + + +def bbox_expand(img_height, img_width, bbox, exp_ratio): + x_min, y_min, x_max, y_max = bbox[:] + exp_x = (x_max - x_min) * ((exp_ratio - 1) / 2) + exp_y = (y_max - y_min) * ((exp_ratio - 1) / 2) + new_x_min = 0 if x_min - exp_x < 0 else np.round(x_min - exp_x) + new_y_min = 0 if y_min - exp_y < 0 else np.round(y_min - exp_y) + new_x_max = img_width - 1 if x_max + exp_x > img_width - 1 else np.round(x_max + exp_x) + new_y_max = img_height - 1 if y_max + exp_y > img_height - 1 else np.round(y_max + exp_y) + return int(new_x_min), int(new_y_min), int(new_x_max), int(new_y_max) + + +def make_crop_and_mask(img_info, pred, file_list, crop_save_dir, mask_save_dir, args): + img_name = img_info['file_name'] + img_id = img_info['id'] - 1 # img_info['id'] start form 1 + img_w = img_info['width'] + img_h = img_info['height'] + + img = cv2.imread(os.path.join(args.img_dir, img_name)) + + exp_bbox = [] + ori_bbox = [] + bbox_name_list = [] + bbox_score_list = [] + person_idx = 0 + + panoptic_seg = np.zeros((img_h, img_w), dtype=np.uint8) + assert len(pred[img_id]['instances']) > 0, 'image without instance prediction' + + for instance in pred[img_id]['instances']: + score = instance['score'] + if score < args.conf_thres: + break + + mask = mask_util.decode(instance['segmentation']) + mask_area = mask.sum() + + if mask_area == 0: # if mask_area < img_w*img_h/1000: + continue + + intersect = (mask > 0) & (panoptic_seg > 0) + intersect_area = intersect.sum() + + if intersect_area * 1.0 / mask_area > args.overlap_threshold: # todo add args + continue + + if intersect_area > 0: + mask = mask & (panoptic_seg == 0) + + person_idx += 1 + panoptic_seg = np.where(mask == 0, panoptic_seg, person_idx) + + bbox_score_list.append(score) + + ins_bbox = instance['bbox'] # [x,y,w,h] format + x_min, y_min, box_w, box_h = ins_bbox + x_max, y_max = x_min + box_w, y_min + box_h + exp_x_min, exp_y_min, exp_x_max, exp_y_max = bbox_expand(img_h, img_w, [x_min, y_min, x_max, y_max], + args.exp_ratio) + crop_img = img[exp_y_min:exp_y_max + 1, exp_x_min:exp_x_max + 1, :] + exp_bbox.append([exp_x_min, exp_y_min, exp_x_max, exp_y_max]) + ori_bbox.append([x_min, y_min, x_max, y_max]) + bbox_name = os.path.splitext(img_name)[0] + '_' + str(person_idx) + '_msrcnn.jpg' + bbox_name_list.append(bbox_name) + + cv2.imwrite(os.path.join(crop_save_dir, bbox_name), crop_img) + + assert person_idx > 0, 'image without instance' + mask_name = os.path.splitext(img_name)[0] + '_mask.npy' + np.save(os.path.join(mask_save_dir, mask_name), panoptic_seg) + + ############## json writing ################## + item = {} + item['dataset'] = 'CIHP' + item['im_name'] = img_name + item['img_height'] = img_h + item['img_width'] = img_w + item['center'] = [img_h / 2, img_w / 2] + item['person_num'] = person_idx + item['person_bbox'] = exp_bbox + item['real_person_bbox'] = ori_bbox + item['person_bbox_score'] = bbox_score_list + item['bbox_name'] = bbox_name_list + item['mask_name'] = mask_name + file_list.append(item) + json_file = {'root': file_list} + return json_file, file_list + + +def get_arguments(): + parser = argparse.ArgumentParser(description="crop person val/test demo for inference") + parser.add_argument("--exp_ratio", type=float, default=1.2) + parser.add_argument("--overlap_threshold", type=float, default=0.5) + parser.add_argument("--conf_thres", type=float, default=0.5) + parser.add_argument("--img_dir", type=str, + default='/data03/v_xuyunqiu/data/instance-level_human_parsing/Testing/Images') + parser.add_argument("--save_dir", type=str, + default='/data03/v_xuyunqiu/Projects/experiment_data/testing/resnest_200_TTA_mask_nms_all_data') + parser.add_argument("--img_list", type=str, + default='/data03/v_xuyunqiu/Projects/pycococreator/annotations/CIHP_test.json') + parser.add_argument("--det_res", type=str, + default='/data02/v_xuyunqiu/detectron2-ResNeSt/tools/output_cihp_inference_resnest/inference_TTA/instances_predictions.pth') + return parser.parse_args() + + +def main(args): + img_info_list = json.load(open(args.img_list, encoding='UTF-8')) + pred = torch.load(args.det_res) + + crop_save_dir = os.path.join(args.save_dir, 'crop_pic') + if not os.path.exists(crop_save_dir): + os.makedirs(crop_save_dir) + mask_save_dir = os.path.join(args.save_dir, 'crop_mask') + if not os.path.exists(mask_save_dir): + os.makedirs(mask_save_dir) + + file_list = [] + for img_info in tqdm(img_info_list['images']): + json_file, file_list = make_crop_and_mask(img_info, pred, file_list, crop_save_dir, mask_save_dir, args) + with open(os.path.join(args.save_dir, 'crop.json'), 'w') as f: + json.dump(json_file, f, indent=2) + + +if __name__ == '__main__': + args = get_arguments() + main(args) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/scripts/make_coco_style_annotation.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/scripts/make_coco_style_annotation.sh new file mode 100644 index 0000000000000000000000000000000000000000..37a1e7d4944c318bc275a58dceeaf987bb6517dc --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/scripts/make_coco_style_annotation.sh @@ -0,0 +1,14 @@ +python ./coco_style_annotation_creator/human_to_coco.py \ + --dataset 'CIHP' \ + --json_save_dir './data/CIHP/annotations' \ + --train_img_dir './data/CIHP/Training/Images' \ + --train_anno_dir './data/CIHP/Training/Human_ids' \ + --val_img_dir './data/CIHP/Validation/Images' \ + --val_anno_dir './data/CIHP/Validation/Human_ids' + + +python ./coco_style_annotation_creator/test_human2coco_format.py \ + --dataset 'CIHP' \ + --json_save_dir './data/CIHP/annotations' \ + --test_img_dir './data/CIHP/Testing/Images' + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/scripts/make_crop.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/scripts/make_crop.sh new file mode 100644 index 0000000000000000000000000000000000000000..604a433c0494b1ddba9223cd95bf6de2b4b150b0 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/scripts/make_crop.sh @@ -0,0 +1,6 @@ +python make_crop_and_mask_w_mask_nms.py \ + --img_dir './data/CIHP/Testing/Images' \ + --save_dir './data/CIHP/' \ + --img_list './data/CIHP/annotations/CIHP_val.json' \ + --det_res './data/CIHP/detectron2_prediction/inference/instances_predictions.pth' + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/scripts/parsing_fusion.sh b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/scripts/parsing_fusion.sh new file mode 100644 index 0000000000000000000000000000000000000000..107bcf6b0532a7f807c76cd706e48aab767a5da3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/mhp_extension/scripts/parsing_fusion.sh @@ -0,0 +1,6 @@ +python logits_fusion.py \ +--test_json_path ./data/CIHP/crop.json \ +--global_output_dir ./data/CIHP/global_pic_parsing \ +--msrcnn_output_dir ./data/CIHP/crop_pic_parsing \ +--gt_output_dir ./data/CIHP/crop_pic_parsing \ +--save_dir ./data/CIHP/mhp_fusion_parsing diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8a098dee5911f3613d320d23db37bc401cf57fa4 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/__init__.py @@ -0,0 +1,5 @@ +from .bn import ABN, InPlaceABN, InPlaceABNSync +from .functions import ACT_RELU, ACT_LEAKY_RELU, ACT_ELU, ACT_NONE +from .misc import GlobalAvgPool2d, SingleGPU +from .residual import IdentityResidualBlock +from .dense import DenseModule diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/bn.py b/Self-Correction-Human-Parsing-for-ACGPN/modules/bn.py new file mode 100644 index 0000000000000000000000000000000000000000..a794698867e89140a030d550d832e6fa12561c8b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/bn.py @@ -0,0 +1,132 @@ +import torch +import torch.nn as nn +import torch.nn.functional as functional + +try: + from queue import Queue +except ImportError: + from Queue import Queue + +from .functions import * + + +class ABN(nn.Module): + """Activated Batch Normalization + + This gathers a `BatchNorm2d` and an activation function in a single module + """ + + def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True, activation="leaky_relu", slope=0.01): + """Creates an Activated Batch Normalization module + + Parameters + ---------- + num_features : int + Number of feature channels in the input and output. + eps : float + Small constant to prevent numerical issues. + momentum : float + Momentum factor applied to compute running statistics as. + affine : bool + If `True` apply learned scale and shift transformation after normalization. + activation : str + Name of the activation functions, one of: `leaky_relu`, `elu` or `none`. + slope : float + Negative slope for the `leaky_relu` activation. + """ + super(ABN, self).__init__() + self.num_features = num_features + self.affine = affine + self.eps = eps + self.momentum = momentum + self.activation = activation + self.slope = slope + if self.affine: + self.weight = nn.Parameter(torch.ones(num_features)) + self.bias = nn.Parameter(torch.zeros(num_features)) + else: + self.register_parameter('weight', None) + self.register_parameter('bias', None) + self.register_buffer('running_mean', torch.zeros(num_features)) + self.register_buffer('running_var', torch.ones(num_features)) + self.reset_parameters() + + def reset_parameters(self): + nn.init.constant_(self.running_mean, 0) + nn.init.constant_(self.running_var, 1) + if self.affine: + nn.init.constant_(self.weight, 1) + nn.init.constant_(self.bias, 0) + + def forward(self, x): + x = functional.batch_norm(x, self.running_mean, self.running_var, self.weight, self.bias, + self.training, self.momentum, self.eps) + + if self.activation == ACT_RELU: + return functional.relu(x, inplace=True) + elif self.activation == ACT_LEAKY_RELU: + return functional.leaky_relu(x, negative_slope=self.slope, inplace=True) + elif self.activation == ACT_ELU: + return functional.elu(x, inplace=True) + else: + return x + + def __repr__(self): + rep = '{name}({num_features}, eps={eps}, momentum={momentum},' \ + ' affine={affine}, activation={activation}' + if self.activation == "leaky_relu": + rep += ', slope={slope})' + else: + rep += ')' + return rep.format(name=self.__class__.__name__, **self.__dict__) + + +class InPlaceABN(ABN): + """InPlace Activated Batch Normalization""" + + def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True, activation="leaky_relu", slope=0.01): + """Creates an InPlace Activated Batch Normalization module + + Parameters + ---------- + num_features : int + Number of feature channels in the input and output. + eps : float + Small constant to prevent numerical issues. + momentum : float + Momentum factor applied to compute running statistics as. + affine : bool + If `True` apply learned scale and shift transformation after normalization. + activation : str + Name of the activation functions, one of: `leaky_relu`, `elu` or `none`. + slope : float + Negative slope for the `leaky_relu` activation. + """ + super(InPlaceABN, self).__init__(num_features, eps, momentum, affine, activation, slope) + + def forward(self, x): + x, _, _ = inplace_abn(x, self.weight, self.bias, self.running_mean, self.running_var, + self.training, self.momentum, self.eps, self.activation, self.slope) + return x + + +class InPlaceABNSync(ABN): + """InPlace Activated Batch Normalization with cross-GPU synchronization + This assumes that it will be replicated across GPUs using the same mechanism as in `nn.DistributedDataParallel`. + """ + + def forward(self, x): + x, _, _ = inplace_abn_sync(x, self.weight, self.bias, self.running_mean, self.running_var, + self.training, self.momentum, self.eps, self.activation, self.slope) + return x + + def __repr__(self): + rep = '{name}({num_features}, eps={eps}, momentum={momentum},' \ + ' affine={affine}, activation={activation}' + if self.activation == "leaky_relu": + rep += ', slope={slope})' + else: + rep += ')' + return rep.format(name=self.__class__.__name__, **self.__dict__) + + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/deeplab.py b/Self-Correction-Human-Parsing-for-ACGPN/modules/deeplab.py new file mode 100644 index 0000000000000000000000000000000000000000..fd25b78369b27ef02c183a0b17b9bf8354c5f7c3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/deeplab.py @@ -0,0 +1,84 @@ +import torch +import torch.nn as nn +import torch.nn.functional as functional + +from models._util import try_index +from .bn import ABN + + +class DeeplabV3(nn.Module): + def __init__(self, + in_channels, + out_channels, + hidden_channels=256, + dilations=(12, 24, 36), + norm_act=ABN, + pooling_size=None): + super(DeeplabV3, self).__init__() + self.pooling_size = pooling_size + + self.map_convs = nn.ModuleList([ + nn.Conv2d(in_channels, hidden_channels, 1, bias=False), + nn.Conv2d(in_channels, hidden_channels, 3, bias=False, dilation=dilations[0], padding=dilations[0]), + nn.Conv2d(in_channels, hidden_channels, 3, bias=False, dilation=dilations[1], padding=dilations[1]), + nn.Conv2d(in_channels, hidden_channels, 3, bias=False, dilation=dilations[2], padding=dilations[2]) + ]) + self.map_bn = norm_act(hidden_channels * 4) + + self.global_pooling_conv = nn.Conv2d(in_channels, hidden_channels, 1, bias=False) + self.global_pooling_bn = norm_act(hidden_channels) + + self.red_conv = nn.Conv2d(hidden_channels * 4, out_channels, 1, bias=False) + self.pool_red_conv = nn.Conv2d(hidden_channels, out_channels, 1, bias=False) + self.red_bn = norm_act(out_channels) + + self.reset_parameters(self.map_bn.activation, self.map_bn.slope) + + def reset_parameters(self, activation, slope): + gain = nn.init.calculate_gain(activation, slope) + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.xavier_normal_(m.weight.data, gain) + if hasattr(m, "bias") and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, ABN): + if hasattr(m, "weight") and m.weight is not None: + nn.init.constant_(m.weight, 1) + if hasattr(m, "bias") and m.bias is not None: + nn.init.constant_(m.bias, 0) + + def forward(self, x): + # Map convolutions + out = torch.cat([m(x) for m in self.map_convs], dim=1) + out = self.map_bn(out) + out = self.red_conv(out) + + # Global pooling + pool = self._global_pooling(x) + pool = self.global_pooling_conv(pool) + pool = self.global_pooling_bn(pool) + pool = self.pool_red_conv(pool) + if self.training or self.pooling_size is None: + pool = pool.repeat(1, 1, x.size(2), x.size(3)) + + out += pool + out = self.red_bn(out) + return out + + def _global_pooling(self, x): + if self.training or self.pooling_size is None: + pool = x.view(x.size(0), x.size(1), -1).mean(dim=-1) + pool = pool.view(x.size(0), x.size(1), 1, 1) + else: + pooling_size = (min(try_index(self.pooling_size, 0), x.shape[2]), + min(try_index(self.pooling_size, 1), x.shape[3])) + padding = ( + (pooling_size[1] - 1) // 2, + (pooling_size[1] - 1) // 2 if pooling_size[1] % 2 == 1 else (pooling_size[1] - 1) // 2 + 1, + (pooling_size[0] - 1) // 2, + (pooling_size[0] - 1) // 2 if pooling_size[0] % 2 == 1 else (pooling_size[0] - 1) // 2 + 1 + ) + + pool = functional.avg_pool2d(x, pooling_size, stride=1) + pool = functional.pad(pool, pad=padding, mode="replicate") + return pool diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/dense.py b/Self-Correction-Human-Parsing-for-ACGPN/modules/dense.py new file mode 100644 index 0000000000000000000000000000000000000000..9638d6e86d2ae838550fefa9002a984af52e6cc8 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/dense.py @@ -0,0 +1,42 @@ +from collections import OrderedDict + +import torch +import torch.nn as nn + +from .bn import ABN + + +class DenseModule(nn.Module): + def __init__(self, in_channels, growth, layers, bottleneck_factor=4, norm_act=ABN, dilation=1): + super(DenseModule, self).__init__() + self.in_channels = in_channels + self.growth = growth + self.layers = layers + + self.convs1 = nn.ModuleList() + self.convs3 = nn.ModuleList() + for i in range(self.layers): + self.convs1.append(nn.Sequential(OrderedDict([ + ("bn", norm_act(in_channels)), + ("conv", nn.Conv2d(in_channels, self.growth * bottleneck_factor, 1, bias=False)) + ]))) + self.convs3.append(nn.Sequential(OrderedDict([ + ("bn", norm_act(self.growth * bottleneck_factor)), + ("conv", nn.Conv2d(self.growth * bottleneck_factor, self.growth, 3, padding=dilation, bias=False, + dilation=dilation)) + ]))) + in_channels += self.growth + + @property + def out_channels(self): + return self.in_channels + self.growth * self.layers + + def forward(self, x): + inputs = [x] + for i in range(self.layers): + x = torch.cat(inputs, dim=1) + x = self.convs1[i](x) + x = self.convs3[i](x) + inputs += [x] + + return torch.cat(inputs, dim=1) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/functions.py b/Self-Correction-Human-Parsing-for-ACGPN/modules/functions.py new file mode 100644 index 0000000000000000000000000000000000000000..aea9729c0e6944c07bbd63368956e63ab4c76c86 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/functions.py @@ -0,0 +1,244 @@ +from os import path +import torch +import torch.distributed as dist +import torch.autograd as autograd +import torch.cuda.comm as comm +from torch.autograd.function import once_differentiable +from torch.utils.cpp_extension import load + +_src_path = path.join(path.dirname(path.abspath(__file__)), "src") +_backend = load(name="inplace_abn", + extra_cflags=["-O3"], + sources=[path.join(_src_path, f) for f in [ + "inplace_abn.cpp", + "inplace_abn_cpu.cpp", + "inplace_abn_cuda.cu", + "inplace_abn_cuda_half.cu" + ]], + extra_cuda_cflags=["--expt-extended-lambda"]) + +# Activation names +ACT_RELU = "relu" +ACT_LEAKY_RELU = "leaky_relu" +ACT_ELU = "elu" +ACT_NONE = "none" + + +def _check(fn, *args, **kwargs): + success = fn(*args, **kwargs) + if not success: + raise RuntimeError("CUDA Error encountered in {}".format(fn)) + + +def _broadcast_shape(x): + out_size = [] + for i, s in enumerate(x.size()): + if i != 1: + out_size.append(1) + else: + out_size.append(s) + return out_size + + +def _reduce(x): + if len(x.size()) == 2: + return x.sum(dim=0) + else: + n, c = x.size()[0:2] + return x.contiguous().view((n, c, -1)).sum(2).sum(0) + + +def _count_samples(x): + count = 1 + for i, s in enumerate(x.size()): + if i != 1: + count *= s + return count + + +def _act_forward(ctx, x): + if ctx.activation == ACT_LEAKY_RELU: + _backend.leaky_relu_forward(x, ctx.slope) + elif ctx.activation == ACT_ELU: + _backend.elu_forward(x) + elif ctx.activation == ACT_NONE: + pass + + +def _act_backward(ctx, x, dx): + if ctx.activation == ACT_LEAKY_RELU: + _backend.leaky_relu_backward(x, dx, ctx.slope) + elif ctx.activation == ACT_ELU: + _backend.elu_backward(x, dx) + elif ctx.activation == ACT_NONE: + pass + + +class InPlaceABN(autograd.Function): + @staticmethod + def forward(ctx, x, weight, bias, running_mean, running_var, + training=True, momentum=0.1, eps=1e-05, activation=ACT_LEAKY_RELU, slope=0.01): + # Save context + ctx.training = training + ctx.momentum = momentum + ctx.eps = eps + ctx.activation = activation + ctx.slope = slope + ctx.affine = weight is not None and bias is not None + + # Prepare inputs + count = _count_samples(x) + x = x.contiguous() + weight = weight.contiguous() if ctx.affine else x.new_empty(0) + bias = bias.contiguous() if ctx.affine else x.new_empty(0) + + if ctx.training: + mean, var = _backend.mean_var(x) + + # Update running stats + running_mean.mul_((1 - ctx.momentum)).add_(ctx.momentum * mean) + running_var.mul_((1 - ctx.momentum)).add_(ctx.momentum * var * count / (count - 1)) + + # Mark in-place modified tensors + ctx.mark_dirty(x, running_mean, running_var) + else: + mean, var = running_mean.contiguous(), running_var.contiguous() + ctx.mark_dirty(x) + + # BN forward + activation + _backend.forward(x, mean, var, weight, bias, ctx.affine, ctx.eps) + _act_forward(ctx, x) + + # Output + ctx.var = var + ctx.save_for_backward(x, var, weight, bias) + ctx.mark_non_differentiable(running_mean, running_var) + return x, running_mean, running_var + + @staticmethod + @once_differentiable + def backward(ctx, dz, _drunning_mean, _drunning_var): + z, var, weight, bias = ctx.saved_tensors + dz = dz.contiguous() + + # Undo activation + _act_backward(ctx, z, dz) + + if ctx.training: + edz, eydz = _backend.edz_eydz(z, dz, weight, bias, ctx.affine, ctx.eps) + else: + # TODO: implement simplified CUDA backward for inference mode + edz = dz.new_zeros(dz.size(1)) + eydz = dz.new_zeros(dz.size(1)) + + dx = _backend.backward(z, dz, var, weight, bias, edz, eydz, ctx.affine, ctx.eps) + # dweight = eydz * weight.sign() if ctx.affine else None + dweight = eydz if ctx.affine else None + if dweight is not None: + dweight[weight < 0] *= -1 + dbias = edz if ctx.affine else None + + return dx, dweight, dbias, None, None, None, None, None, None, None + + +class InPlaceABNSync(autograd.Function): + @classmethod + def forward(cls, ctx, x, weight, bias, running_mean, running_var, + training=True, momentum=0.1, eps=1e-05, activation=ACT_LEAKY_RELU, slope=0.01, equal_batches=True): + # Save context + ctx.training = training + ctx.momentum = momentum + ctx.eps = eps + ctx.activation = activation + ctx.slope = slope + ctx.affine = weight is not None and bias is not None + + # Prepare inputs + ctx.world_size = dist.get_world_size() if dist.is_initialized() else 1 + + # count = _count_samples(x) + batch_size = x.new_tensor([x.shape[0]], dtype=torch.long) + + x = x.contiguous() + weight = weight.contiguous() if ctx.affine else x.new_empty(0) + bias = bias.contiguous() if ctx.affine else x.new_empty(0) + + if ctx.training: + mean, var = _backend.mean_var(x) + if ctx.world_size > 1: + # get global batch size + if equal_batches: + batch_size *= ctx.world_size + else: + dist.all_reduce(batch_size, dist.ReduceOp.SUM) + + ctx.factor = x.shape[0] / float(batch_size.item()) + + mean_all = mean.clone() * ctx.factor + dist.all_reduce(mean_all, dist.ReduceOp.SUM) + + var_all = (var + (mean - mean_all) ** 2) * ctx.factor + dist.all_reduce(var_all, dist.ReduceOp.SUM) + + mean = mean_all + var = var_all + + # Update running stats + running_mean.mul_((1 - ctx.momentum)).add_(ctx.momentum * mean) + count = batch_size.item() * x.view(x.shape[0], x.shape[1], -1).shape[-1] + running_var.mul_((1 - ctx.momentum)).add_(ctx.momentum * var * (float(count) / (count - 1))) + + # Mark in-place modified tensors + ctx.mark_dirty(x, running_mean, running_var) + else: + mean, var = running_mean.contiguous(), running_var.contiguous() + ctx.mark_dirty(x) + + # BN forward + activation + _backend.forward(x, mean, var, weight, bias, ctx.affine, ctx.eps) + _act_forward(ctx, x) + + # Output + ctx.var = var + ctx.save_for_backward(x, var, weight, bias) + ctx.mark_non_differentiable(running_mean, running_var) + return x, running_mean, running_var + + @staticmethod + @once_differentiable + def backward(ctx, dz, _drunning_mean, _drunning_var): + z, var, weight, bias = ctx.saved_tensors + dz = dz.contiguous() + + # Undo activation + _act_backward(ctx, z, dz) + + if ctx.training: + edz, eydz = _backend.edz_eydz(z, dz, weight, bias, ctx.affine, ctx.eps) + edz_local = edz.clone() + eydz_local = eydz.clone() + + if ctx.world_size > 1: + edz *= ctx.factor + dist.all_reduce(edz, dist.ReduceOp.SUM) + + eydz *= ctx.factor + dist.all_reduce(eydz, dist.ReduceOp.SUM) + else: + edz_local = edz = dz.new_zeros(dz.size(1)) + eydz_local = eydz = dz.new_zeros(dz.size(1)) + + dx = _backend.backward(z, dz, var, weight, bias, edz, eydz, ctx.affine, ctx.eps) + # dweight = eydz_local * weight.sign() if ctx.affine else None + dweight = eydz_local if ctx.affine else None + if dweight is not None: + dweight[weight < 0] *= -1 + dbias = edz_local if ctx.affine else None + + return dx, dweight, dbias, None, None, None, None, None, None, None + + +inplace_abn = InPlaceABN.apply +inplace_abn_sync = InPlaceABNSync.apply + +__all__ = ["inplace_abn", "inplace_abn_sync", "ACT_RELU", "ACT_LEAKY_RELU", "ACT_ELU", "ACT_NONE"] diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/misc.py b/Self-Correction-Human-Parsing-for-ACGPN/modules/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..3c50b69b38c950801baacba8b3684ffd23aef08b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/misc.py @@ -0,0 +1,21 @@ +import torch.nn as nn +import torch +import torch.distributed as dist + +class GlobalAvgPool2d(nn.Module): + def __init__(self): + """Global average pooling over the input's spatial dimensions""" + super(GlobalAvgPool2d, self).__init__() + + def forward(self, inputs): + in_size = inputs.size() + return inputs.view((in_size[0], in_size[1], -1)).mean(dim=2) + +class SingleGPU(nn.Module): + def __init__(self, module): + super(SingleGPU, self).__init__() + self.module=module + + def forward(self, input): + return self.module(input.cuda(non_blocking=True)) + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/residual.py b/Self-Correction-Human-Parsing-for-ACGPN/modules/residual.py new file mode 100644 index 0000000000000000000000000000000000000000..8a5c90e0606a451ff690f67a2feac28476241d86 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/residual.py @@ -0,0 +1,182 @@ +from collections import OrderedDict + +import torch.nn as nn + +from .bn import ABN, ACT_LEAKY_RELU, ACT_ELU, ACT_NONE +import torch.nn.functional as functional + + +class ResidualBlock(nn.Module): + """Configurable residual block + + Parameters + ---------- + in_channels : int + Number of input channels. + channels : list of int + Number of channels in the internal feature maps. Can either have two or three elements: if three construct + a residual block with two `3 x 3` convolutions, otherwise construct a bottleneck block with `1 x 1`, then + `3 x 3` then `1 x 1` convolutions. + stride : int + Stride of the first `3 x 3` convolution + dilation : int + Dilation to apply to the `3 x 3` convolutions. + groups : int + Number of convolution groups. This is used to create ResNeXt-style blocks and is only compatible with + bottleneck blocks. + norm_act : callable + Function to create normalization / activation Module. + dropout: callable + Function to create Dropout Module. + """ + + def __init__(self, + in_channels, + channels, + stride=1, + dilation=1, + groups=1, + norm_act=ABN, + dropout=None): + super(ResidualBlock, self).__init__() + + # Check parameters for inconsistencies + if len(channels) != 2 and len(channels) != 3: + raise ValueError("channels must contain either two or three values") + if len(channels) == 2 and groups != 1: + raise ValueError("groups > 1 are only valid if len(channels) == 3") + + is_bottleneck = len(channels) == 3 + need_proj_conv = stride != 1 or in_channels != channels[-1] + + if not is_bottleneck: + bn2 = norm_act(channels[1]) + bn2.activation = ACT_NONE + layers = [ + ("conv1", nn.Conv2d(in_channels, channels[0], 3, stride=stride, padding=dilation, bias=False, + dilation=dilation)), + ("bn1", norm_act(channels[0])), + ("conv2", nn.Conv2d(channels[0], channels[1], 3, stride=1, padding=dilation, bias=False, + dilation=dilation)), + ("bn2", bn2) + ] + if dropout is not None: + layers = layers[0:2] + [("dropout", dropout())] + layers[2:] + else: + bn3 = norm_act(channels[2]) + bn3.activation = ACT_NONE + layers = [ + ("conv1", nn.Conv2d(in_channels, channels[0], 1, stride=1, padding=0, bias=False)), + ("bn1", norm_act(channels[0])), + ("conv2", nn.Conv2d(channels[0], channels[1], 3, stride=stride, padding=dilation, bias=False, + groups=groups, dilation=dilation)), + ("bn2", norm_act(channels[1])), + ("conv3", nn.Conv2d(channels[1], channels[2], 1, stride=1, padding=0, bias=False)), + ("bn3", bn3) + ] + if dropout is not None: + layers = layers[0:4] + [("dropout", dropout())] + layers[4:] + self.convs = nn.Sequential(OrderedDict(layers)) + + if need_proj_conv: + self.proj_conv = nn.Conv2d(in_channels, channels[-1], 1, stride=stride, padding=0, bias=False) + self.proj_bn = norm_act(channels[-1]) + self.proj_bn.activation = ACT_NONE + + def forward(self, x): + if hasattr(self, "proj_conv"): + residual = self.proj_conv(x) + residual = self.proj_bn(residual) + else: + residual = x + x = self.convs(x) + residual + + if self.convs.bn1.activation == ACT_LEAKY_RELU: + return functional.leaky_relu(x, negative_slope=self.convs.bn1.slope, inplace=True) + elif self.convs.bn1.activation == ACT_ELU: + return functional.elu(x, inplace=True) + else: + return x + + +class IdentityResidualBlock(nn.Module): + def __init__(self, + in_channels, + channels, + stride=1, + dilation=1, + groups=1, + norm_act=ABN, + dropout=None): + """Configurable identity-mapping residual block + + Parameters + ---------- + in_channels : int + Number of input channels. + channels : list of int + Number of channels in the internal feature maps. Can either have two or three elements: if three construct + a residual block with two `3 x 3` convolutions, otherwise construct a bottleneck block with `1 x 1`, then + `3 x 3` then `1 x 1` convolutions. + stride : int + Stride of the first `3 x 3` convolution + dilation : int + Dilation to apply to the `3 x 3` convolutions. + groups : int + Number of convolution groups. This is used to create ResNeXt-style blocks and is only compatible with + bottleneck blocks. + norm_act : callable + Function to create normalization / activation Module. + dropout: callable + Function to create Dropout Module. + """ + super(IdentityResidualBlock, self).__init__() + + # Check parameters for inconsistencies + if len(channels) != 2 and len(channels) != 3: + raise ValueError("channels must contain either two or three values") + if len(channels) == 2 and groups != 1: + raise ValueError("groups > 1 are only valid if len(channels) == 3") + + is_bottleneck = len(channels) == 3 + need_proj_conv = stride != 1 or in_channels != channels[-1] + + self.bn1 = norm_act(in_channels) + if not is_bottleneck: + layers = [ + ("conv1", nn.Conv2d(in_channels, channels[0], 3, stride=stride, padding=dilation, bias=False, + dilation=dilation)), + ("bn2", norm_act(channels[0])), + ("conv2", nn.Conv2d(channels[0], channels[1], 3, stride=1, padding=dilation, bias=False, + dilation=dilation)) + ] + if dropout is not None: + layers = layers[0:2] + [("dropout", dropout())] + layers[2:] + else: + layers = [ + ("conv1", nn.Conv2d(in_channels, channels[0], 1, stride=stride, padding=0, bias=False)), + ("bn2", norm_act(channels[0])), + ("conv2", nn.Conv2d(channels[0], channels[1], 3, stride=1, padding=dilation, bias=False, + groups=groups, dilation=dilation)), + ("bn3", norm_act(channels[1])), + ("conv3", nn.Conv2d(channels[1], channels[2], 1, stride=1, padding=0, bias=False)) + ] + if dropout is not None: + layers = layers[0:4] + [("dropout", dropout())] + layers[4:] + self.convs = nn.Sequential(OrderedDict(layers)) + + if need_proj_conv: + self.proj_conv = nn.Conv2d(in_channels, channels[-1], 1, stride=stride, padding=0, bias=False) + + def forward(self, x): + if hasattr(self, "proj_conv"): + bn1 = self.bn1(x) + shortcut = self.proj_conv(bn1) + else: + shortcut = x.clone() + bn1 = self.bn1(x) + + out = self.convs(bn1) + out.add_(shortcut) + + return out diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/src/checks.h b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/checks.h new file mode 100644 index 0000000000000000000000000000000000000000..e761a6fe34d0789815b588eba7e3726026e0e868 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/checks.h @@ -0,0 +1,15 @@ +#pragma once + +#include + +// Define AT_CHECK for old version of ATen where the same function was called AT_ASSERT +#ifndef AT_CHECK +#define AT_CHECK AT_ASSERT +#endif + +#define CHECK_CUDA(x) AT_CHECK((x).type().is_cuda(), #x " must be a CUDA tensor") +#define CHECK_CPU(x) AT_CHECK(!(x).type().is_cuda(), #x " must be a CPU tensor") +#define CHECK_CONTIGUOUS(x) AT_CHECK((x).is_contiguous(), #x " must be contiguous") + +#define CHECK_CUDA_INPUT(x) CHECK_CUDA(x); CHECK_CONTIGUOUS(x) +#define CHECK_CPU_INPUT(x) CHECK_CPU(x); CHECK_CONTIGUOUS(x) \ No newline at end of file diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn.cpp b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn.cpp new file mode 100644 index 0000000000000000000000000000000000000000..0a6b1128cc20cbfc476134154e23e5869a92b856 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn.cpp @@ -0,0 +1,95 @@ +#include + +#include + +#include "inplace_abn.h" + +std::vector mean_var(at::Tensor x) { + if (x.is_cuda()) { + if (x.type().scalarType() == at::ScalarType::Half) { + return mean_var_cuda_h(x); + } else { + return mean_var_cuda(x); + } + } else { + return mean_var_cpu(x); + } +} + +at::Tensor forward(at::Tensor x, at::Tensor mean, at::Tensor var, at::Tensor weight, at::Tensor bias, + bool affine, float eps) { + if (x.is_cuda()) { + if (x.type().scalarType() == at::ScalarType::Half) { + return forward_cuda_h(x, mean, var, weight, bias, affine, eps); + } else { + return forward_cuda(x, mean, var, weight, bias, affine, eps); + } + } else { + return forward_cpu(x, mean, var, weight, bias, affine, eps); + } +} + +std::vector edz_eydz(at::Tensor z, at::Tensor dz, at::Tensor weight, at::Tensor bias, + bool affine, float eps) { + if (z.is_cuda()) { + if (z.type().scalarType() == at::ScalarType::Half) { + return edz_eydz_cuda_h(z, dz, weight, bias, affine, eps); + } else { + return edz_eydz_cuda(z, dz, weight, bias, affine, eps); + } + } else { + return edz_eydz_cpu(z, dz, weight, bias, affine, eps); + } +} + +at::Tensor backward(at::Tensor z, at::Tensor dz, at::Tensor var, at::Tensor weight, at::Tensor bias, + at::Tensor edz, at::Tensor eydz, bool affine, float eps) { + if (z.is_cuda()) { + if (z.type().scalarType() == at::ScalarType::Half) { + return backward_cuda_h(z, dz, var, weight, bias, edz, eydz, affine, eps); + } else { + return backward_cuda(z, dz, var, weight, bias, edz, eydz, affine, eps); + } + } else { + return backward_cpu(z, dz, var, weight, bias, edz, eydz, affine, eps); + } +} + +void leaky_relu_forward(at::Tensor z, float slope) { + at::leaky_relu_(z, slope); +} + +void leaky_relu_backward(at::Tensor z, at::Tensor dz, float slope) { + if (z.is_cuda()) { + if (z.type().scalarType() == at::ScalarType::Half) { + return leaky_relu_backward_cuda_h(z, dz, slope); + } else { + return leaky_relu_backward_cuda(z, dz, slope); + } + } else { + return leaky_relu_backward_cpu(z, dz, slope); + } +} + +void elu_forward(at::Tensor z) { + at::elu_(z); +} + +void elu_backward(at::Tensor z, at::Tensor dz) { + if (z.is_cuda()) { + return elu_backward_cuda(z, dz); + } else { + return elu_backward_cpu(z, dz); + } +} + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("mean_var", &mean_var, "Mean and variance computation"); + m.def("forward", &forward, "In-place forward computation"); + m.def("edz_eydz", &edz_eydz, "First part of backward computation"); + m.def("backward", &backward, "Second part of backward computation"); + m.def("leaky_relu_forward", &leaky_relu_forward, "Leaky relu forward computation"); + m.def("leaky_relu_backward", &leaky_relu_backward, "Leaky relu backward computation and inversion"); + m.def("elu_forward", &elu_forward, "Elu forward computation"); + m.def("elu_backward", &elu_backward, "Elu backward computation and inversion"); +} diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn.h b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn.h new file mode 100644 index 0000000000000000000000000000000000000000..17afd1196449ecb6376f28961e54b55e1537492f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn.h @@ -0,0 +1,88 @@ +#pragma once + +#include + +#include + +std::vector mean_var_cpu(at::Tensor x); +std::vector mean_var_cuda(at::Tensor x); +std::vector mean_var_cuda_h(at::Tensor x); + +at::Tensor forward_cpu(at::Tensor x, at::Tensor mean, at::Tensor var, at::Tensor weight, at::Tensor bias, + bool affine, float eps); +at::Tensor forward_cuda(at::Tensor x, at::Tensor mean, at::Tensor var, at::Tensor weight, at::Tensor bias, + bool affine, float eps); +at::Tensor forward_cuda_h(at::Tensor x, at::Tensor mean, at::Tensor var, at::Tensor weight, at::Tensor bias, + bool affine, float eps); + +std::vector edz_eydz_cpu(at::Tensor z, at::Tensor dz, at::Tensor weight, at::Tensor bias, + bool affine, float eps); +std::vector edz_eydz_cuda(at::Tensor z, at::Tensor dz, at::Tensor weight, at::Tensor bias, + bool affine, float eps); +std::vector edz_eydz_cuda_h(at::Tensor z, at::Tensor dz, at::Tensor weight, at::Tensor bias, + bool affine, float eps); + +at::Tensor backward_cpu(at::Tensor z, at::Tensor dz, at::Tensor var, at::Tensor weight, at::Tensor bias, + at::Tensor edz, at::Tensor eydz, bool affine, float eps); +at::Tensor backward_cuda(at::Tensor z, at::Tensor dz, at::Tensor var, at::Tensor weight, at::Tensor bias, + at::Tensor edz, at::Tensor eydz, bool affine, float eps); +at::Tensor backward_cuda_h(at::Tensor z, at::Tensor dz, at::Tensor var, at::Tensor weight, at::Tensor bias, + at::Tensor edz, at::Tensor eydz, bool affine, float eps); + +void leaky_relu_backward_cpu(at::Tensor z, at::Tensor dz, float slope); +void leaky_relu_backward_cuda(at::Tensor z, at::Tensor dz, float slope); +void leaky_relu_backward_cuda_h(at::Tensor z, at::Tensor dz, float slope); + +void elu_backward_cpu(at::Tensor z, at::Tensor dz); +void elu_backward_cuda(at::Tensor z, at::Tensor dz); + +static void get_dims(at::Tensor x, int64_t& num, int64_t& chn, int64_t& sp) { + num = x.size(0); + chn = x.size(1); + sp = 1; + for (int64_t i = 2; i < x.ndimension(); ++i) + sp *= x.size(i); +} + +/* + * Specialized CUDA reduction functions for BN + */ +#ifdef __CUDACC__ + +#include "utils/cuda.cuh" + +template +__device__ T reduce(Op op, int plane, int N, int S) { + T sum = (T)0; + for (int batch = 0; batch < N; ++batch) { + for (int x = threadIdx.x; x < S; x += blockDim.x) { + sum += op(batch, plane, x); + } + } + + // sum over NumThreads within a warp + sum = warpSum(sum); + + // 'transpose', and reduce within warp again + __shared__ T shared[32]; + __syncthreads(); + if (threadIdx.x % WARP_SIZE == 0) { + shared[threadIdx.x / WARP_SIZE] = sum; + } + if (threadIdx.x >= blockDim.x / WARP_SIZE && threadIdx.x < WARP_SIZE) { + // zero out the other entries in shared + shared[threadIdx.x] = (T)0; + } + __syncthreads(); + if (threadIdx.x / WARP_SIZE == 0) { + sum = warpSum(shared[threadIdx.x]); + if (threadIdx.x == 0) { + shared[0] = sum; + } + } + __syncthreads(); + + // Everyone picks it up, should be broadcast into the whole gradInput + return shared[0]; +} +#endif diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn_cpu.cpp b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn_cpu.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ffc6d38c52ea31661b8dd438dc3fe1958f50b61e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn_cpu.cpp @@ -0,0 +1,119 @@ +#include + +#include + +#include "utils/checks.h" +#include "inplace_abn.h" + +at::Tensor reduce_sum(at::Tensor x) { + if (x.ndimension() == 2) { + return x.sum(0); + } else { + auto x_view = x.view({x.size(0), x.size(1), -1}); + return x_view.sum(-1).sum(0); + } +} + +at::Tensor broadcast_to(at::Tensor v, at::Tensor x) { + if (x.ndimension() == 2) { + return v; + } else { + std::vector broadcast_size = {1, -1}; + for (int64_t i = 2; i < x.ndimension(); ++i) + broadcast_size.push_back(1); + + return v.view(broadcast_size); + } +} + +int64_t count(at::Tensor x) { + int64_t count = x.size(0); + for (int64_t i = 2; i < x.ndimension(); ++i) + count *= x.size(i); + + return count; +} + +at::Tensor invert_affine(at::Tensor z, at::Tensor weight, at::Tensor bias, bool affine, float eps) { + if (affine) { + return (z - broadcast_to(bias, z)) / broadcast_to(at::abs(weight) + eps, z); + } else { + return z; + } +} + +std::vector mean_var_cpu(at::Tensor x) { + auto num = count(x); + auto mean = reduce_sum(x) / num; + auto diff = x - broadcast_to(mean, x); + auto var = reduce_sum(diff.pow(2)) / num; + + return {mean, var}; +} + +at::Tensor forward_cpu(at::Tensor x, at::Tensor mean, at::Tensor var, at::Tensor weight, at::Tensor bias, + bool affine, float eps) { + auto gamma = affine ? at::abs(weight) + eps : at::ones_like(var); + auto mul = at::rsqrt(var + eps) * gamma; + + x.sub_(broadcast_to(mean, x)); + x.mul_(broadcast_to(mul, x)); + if (affine) x.add_(broadcast_to(bias, x)); + + return x; +} + +std::vector edz_eydz_cpu(at::Tensor z, at::Tensor dz, at::Tensor weight, at::Tensor bias, + bool affine, float eps) { + auto edz = reduce_sum(dz); + auto y = invert_affine(z, weight, bias, affine, eps); + auto eydz = reduce_sum(y * dz); + + return {edz, eydz}; +} + +at::Tensor backward_cpu(at::Tensor z, at::Tensor dz, at::Tensor var, at::Tensor weight, at::Tensor bias, + at::Tensor edz, at::Tensor eydz, bool affine, float eps) { + auto y = invert_affine(z, weight, bias, affine, eps); + auto mul = affine ? at::rsqrt(var + eps) * (at::abs(weight) + eps) : at::rsqrt(var + eps); + + auto num = count(z); + auto dx = (dz - broadcast_to(edz / num, dz) - y * broadcast_to(eydz / num, dz)) * broadcast_to(mul, dz); + return dx; +} + +void leaky_relu_backward_cpu(at::Tensor z, at::Tensor dz, float slope) { + CHECK_CPU_INPUT(z); + CHECK_CPU_INPUT(dz); + + AT_DISPATCH_FLOATING_TYPES(z.type(), "leaky_relu_backward_cpu", ([&] { + int64_t count = z.numel(); + auto *_z = z.data(); + auto *_dz = dz.data(); + + for (int64_t i = 0; i < count; ++i) { + if (_z[i] < 0) { + _z[i] *= 1 / slope; + _dz[i] *= slope; + } + } + })); +} + +void elu_backward_cpu(at::Tensor z, at::Tensor dz) { + CHECK_CPU_INPUT(z); + CHECK_CPU_INPUT(dz); + + AT_DISPATCH_FLOATING_TYPES(z.type(), "elu_backward_cpu", ([&] { + int64_t count = z.numel(); + auto *_z = z.data(); + auto *_dz = dz.data(); + + for (int64_t i = 0; i < count; ++i) { + if (_z[i] < 0) { + _z[i] = log1p(_z[i]); + _dz[i] *= (_z[i] + 1.f); + } + } + })); +} diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn_cuda.cu b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn_cuda.cu new file mode 100644 index 0000000000000000000000000000000000000000..b157b06d47173d1645c6a40c89f564b737e84d43 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn_cuda.cu @@ -0,0 +1,333 @@ +#include + +#include +#include + +#include + +#include "utils/checks.h" +#include "utils/cuda.cuh" +#include "inplace_abn.h" + +#include + +// Operations for reduce +template +struct SumOp { + __device__ SumOp(const T *t, int c, int s) + : tensor(t), chn(c), sp(s) {} + __device__ __forceinline__ T operator()(int batch, int plane, int n) { + return tensor[(batch * chn + plane) * sp + n]; + } + const T *tensor; + const int chn; + const int sp; +}; + +template +struct VarOp { + __device__ VarOp(T m, const T *t, int c, int s) + : mean(m), tensor(t), chn(c), sp(s) {} + __device__ __forceinline__ T operator()(int batch, int plane, int n) { + T val = tensor[(batch * chn + plane) * sp + n]; + return (val - mean) * (val - mean); + } + const T mean; + const T *tensor; + const int chn; + const int sp; +}; + +template +struct GradOp { + __device__ GradOp(T _weight, T _bias, const T *_z, const T *_dz, int c, int s) + : weight(_weight), bias(_bias), z(_z), dz(_dz), chn(c), sp(s) {} + __device__ __forceinline__ Pair operator()(int batch, int plane, int n) { + T _y = (z[(batch * chn + plane) * sp + n] - bias) / weight; + T _dz = dz[(batch * chn + plane) * sp + n]; + return Pair(_dz, _y * _dz); + } + const T weight; + const T bias; + const T *z; + const T *dz; + const int chn; + const int sp; +}; + +/*********** + * mean_var + ***********/ + +template +__global__ void mean_var_kernel(const T *x, T *mean, T *var, int num, int chn, int sp) { + int plane = blockIdx.x; + T norm = T(1) / T(num * sp); + + T _mean = reduce>(SumOp(x, chn, sp), plane, num, sp) * norm; + __syncthreads(); + T _var = reduce>(VarOp(_mean, x, chn, sp), plane, num, sp) * norm; + + if (threadIdx.x == 0) { + mean[plane] = _mean; + var[plane] = _var; + } +} + +std::vector mean_var_cuda(at::Tensor x) { + CHECK_CUDA_INPUT(x); + + // Extract dimensions + int64_t num, chn, sp; + get_dims(x, num, chn, sp); + + // Prepare output tensors + auto mean = at::empty({chn}, x.options()); + auto var = at::empty({chn}, x.options()); + + // Run kernel + dim3 blocks(chn); + dim3 threads(getNumThreads(sp)); + auto stream = at::cuda::getCurrentCUDAStream(); + AT_DISPATCH_FLOATING_TYPES(x.type(), "mean_var_cuda", ([&] { + mean_var_kernel<<>>( + x.data(), + mean.data(), + var.data(), + num, chn, sp); + })); + + return {mean, var}; +} + +/********** + * forward + **********/ + +template +__global__ void forward_kernel(T *x, const T *mean, const T *var, const T *weight, const T *bias, + bool affine, float eps, int num, int chn, int sp) { + int plane = blockIdx.x; + + T _mean = mean[plane]; + T _var = var[plane]; + T _weight = affine ? abs(weight[plane]) + eps : T(1); + T _bias = affine ? bias[plane] : T(0); + + T mul = rsqrt(_var + eps) * _weight; + + for (int batch = 0; batch < num; ++batch) { + for (int n = threadIdx.x; n < sp; n += blockDim.x) { + T _x = x[(batch * chn + plane) * sp + n]; + T _y = (_x - _mean) * mul + _bias; + + x[(batch * chn + plane) * sp + n] = _y; + } + } +} + +at::Tensor forward_cuda(at::Tensor x, at::Tensor mean, at::Tensor var, at::Tensor weight, at::Tensor bias, + bool affine, float eps) { + CHECK_CUDA_INPUT(x); + CHECK_CUDA_INPUT(mean); + CHECK_CUDA_INPUT(var); + CHECK_CUDA_INPUT(weight); + CHECK_CUDA_INPUT(bias); + + // Extract dimensions + int64_t num, chn, sp; + get_dims(x, num, chn, sp); + + // Run kernel + dim3 blocks(chn); + dim3 threads(getNumThreads(sp)); + auto stream = at::cuda::getCurrentCUDAStream(); + AT_DISPATCH_FLOATING_TYPES(x.type(), "forward_cuda", ([&] { + forward_kernel<<>>( + x.data(), + mean.data(), + var.data(), + weight.data(), + bias.data(), + affine, eps, num, chn, sp); + })); + + return x; +} + +/*********** + * edz_eydz + ***********/ + +template +__global__ void edz_eydz_kernel(const T *z, const T *dz, const T *weight, const T *bias, + T *edz, T *eydz, bool affine, float eps, int num, int chn, int sp) { + int plane = blockIdx.x; + + T _weight = affine ? abs(weight[plane]) + eps : 1.f; + T _bias = affine ? bias[plane] : 0.f; + + Pair res = reduce, GradOp>(GradOp(_weight, _bias, z, dz, chn, sp), plane, num, sp); + __syncthreads(); + + if (threadIdx.x == 0) { + edz[plane] = res.v1; + eydz[plane] = res.v2; + } +} + +std::vector edz_eydz_cuda(at::Tensor z, at::Tensor dz, at::Tensor weight, at::Tensor bias, + bool affine, float eps) { + CHECK_CUDA_INPUT(z); + CHECK_CUDA_INPUT(dz); + CHECK_CUDA_INPUT(weight); + CHECK_CUDA_INPUT(bias); + + // Extract dimensions + int64_t num, chn, sp; + get_dims(z, num, chn, sp); + + auto edz = at::empty({chn}, z.options()); + auto eydz = at::empty({chn}, z.options()); + + // Run kernel + dim3 blocks(chn); + dim3 threads(getNumThreads(sp)); + auto stream = at::cuda::getCurrentCUDAStream(); + AT_DISPATCH_FLOATING_TYPES(z.type(), "edz_eydz_cuda", ([&] { + edz_eydz_kernel<<>>( + z.data(), + dz.data(), + weight.data(), + bias.data(), + edz.data(), + eydz.data(), + affine, eps, num, chn, sp); + })); + + return {edz, eydz}; +} + +/*********** + * backward + ***********/ + +template +__global__ void backward_kernel(const T *z, const T *dz, const T *var, const T *weight, const T *bias, const T *edz, + const T *eydz, T *dx, bool affine, float eps, int num, int chn, int sp) { + int plane = blockIdx.x; + + T _weight = affine ? abs(weight[plane]) + eps : 1.f; + T _bias = affine ? bias[plane] : 0.f; + T _var = var[plane]; + T _edz = edz[plane]; + T _eydz = eydz[plane]; + + T _mul = _weight * rsqrt(_var + eps); + T count = T(num * sp); + + for (int batch = 0; batch < num; ++batch) { + for (int n = threadIdx.x; n < sp; n += blockDim.x) { + T _dz = dz[(batch * chn + plane) * sp + n]; + T _y = (z[(batch * chn + plane) * sp + n] - _bias) / _weight; + + dx[(batch * chn + plane) * sp + n] = (_dz - _edz / count - _y * _eydz / count) * _mul; + } + } +} + +at::Tensor backward_cuda(at::Tensor z, at::Tensor dz, at::Tensor var, at::Tensor weight, at::Tensor bias, + at::Tensor edz, at::Tensor eydz, bool affine, float eps) { + CHECK_CUDA_INPUT(z); + CHECK_CUDA_INPUT(dz); + CHECK_CUDA_INPUT(var); + CHECK_CUDA_INPUT(weight); + CHECK_CUDA_INPUT(bias); + CHECK_CUDA_INPUT(edz); + CHECK_CUDA_INPUT(eydz); + + // Extract dimensions + int64_t num, chn, sp; + get_dims(z, num, chn, sp); + + auto dx = at::zeros_like(z); + + // Run kernel + dim3 blocks(chn); + dim3 threads(getNumThreads(sp)); + auto stream = at::cuda::getCurrentCUDAStream(); + AT_DISPATCH_FLOATING_TYPES(z.type(), "backward_cuda", ([&] { + backward_kernel<<>>( + z.data(), + dz.data(), + var.data(), + weight.data(), + bias.data(), + edz.data(), + eydz.data(), + dx.data(), + affine, eps, num, chn, sp); + })); + + return dx; +} + +/************** + * activations + **************/ + +template +inline void leaky_relu_backward_impl(T *z, T *dz, float slope, int64_t count) { + // Create thrust pointers + thrust::device_ptr th_z = thrust::device_pointer_cast(z); + thrust::device_ptr th_dz = thrust::device_pointer_cast(dz); + + auto stream = at::cuda::getCurrentCUDAStream(); + thrust::transform_if(thrust::cuda::par.on(stream), + th_dz, th_dz + count, th_z, th_dz, + [slope] __device__ (const T& dz) { return dz * slope; }, + [] __device__ (const T& z) { return z < 0; }); + thrust::transform_if(thrust::cuda::par.on(stream), + th_z, th_z + count, th_z, + [slope] __device__ (const T& z) { return z / slope; }, + [] __device__ (const T& z) { return z < 0; }); +} + +void leaky_relu_backward_cuda(at::Tensor z, at::Tensor dz, float slope) { + CHECK_CUDA_INPUT(z); + CHECK_CUDA_INPUT(dz); + + int64_t count = z.numel(); + + AT_DISPATCH_FLOATING_TYPES(z.type(), "leaky_relu_backward_cuda", ([&] { + leaky_relu_backward_impl(z.data(), dz.data(), slope, count); + })); +} + +template +inline void elu_backward_impl(T *z, T *dz, int64_t count) { + // Create thrust pointers + thrust::device_ptr th_z = thrust::device_pointer_cast(z); + thrust::device_ptr th_dz = thrust::device_pointer_cast(dz); + + auto stream = at::cuda::getCurrentCUDAStream(); + thrust::transform_if(thrust::cuda::par.on(stream), + th_dz, th_dz + count, th_z, th_z, th_dz, + [] __device__ (const T& dz, const T& z) { return dz * (z + 1.); }, + [] __device__ (const T& z) { return z < 0; }); + thrust::transform_if(thrust::cuda::par.on(stream), + th_z, th_z + count, th_z, + [] __device__ (const T& z) { return log1p(z); }, + [] __device__ (const T& z) { return z < 0; }); +} + +void elu_backward_cuda(at::Tensor z, at::Tensor dz) { + CHECK_CUDA_INPUT(z); + CHECK_CUDA_INPUT(dz); + + int64_t count = z.numel(); + + AT_DISPATCH_FLOATING_TYPES(z.type(), "leaky_relu_backward_cuda", ([&] { + elu_backward_impl(z.data(), dz.data(), count); + })); +} diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn_cuda_half.cu b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn_cuda_half.cu new file mode 100644 index 0000000000000000000000000000000000000000..bb63e73f9d90179e5bd5dae5579c4844da9c25e2 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/inplace_abn_cuda_half.cu @@ -0,0 +1,275 @@ +#include + +#include + +#include + +#include "utils/checks.h" +#include "utils/cuda.cuh" +#include "inplace_abn.h" + +#include + +// Operations for reduce +struct SumOpH { + __device__ SumOpH(const half *t, int c, int s) + : tensor(t), chn(c), sp(s) {} + __device__ __forceinline__ float operator()(int batch, int plane, int n) { + return __half2float(tensor[(batch * chn + plane) * sp + n]); + } + const half *tensor; + const int chn; + const int sp; +}; + +struct VarOpH { + __device__ VarOpH(float m, const half *t, int c, int s) + : mean(m), tensor(t), chn(c), sp(s) {} + __device__ __forceinline__ float operator()(int batch, int plane, int n) { + const auto t = __half2float(tensor[(batch * chn + plane) * sp + n]); + return (t - mean) * (t - mean); + } + const float mean; + const half *tensor; + const int chn; + const int sp; +}; + +struct GradOpH { + __device__ GradOpH(float _weight, float _bias, const half *_z, const half *_dz, int c, int s) + : weight(_weight), bias(_bias), z(_z), dz(_dz), chn(c), sp(s) {} + __device__ __forceinline__ Pair operator()(int batch, int plane, int n) { + float _y = (__half2float(z[(batch * chn + plane) * sp + n]) - bias) / weight; + float _dz = __half2float(dz[(batch * chn + plane) * sp + n]); + return Pair(_dz, _y * _dz); + } + const float weight; + const float bias; + const half *z; + const half *dz; + const int chn; + const int sp; +}; + +/*********** + * mean_var + ***********/ + +__global__ void mean_var_kernel_h(const half *x, float *mean, float *var, int num, int chn, int sp) { + int plane = blockIdx.x; + float norm = 1.f / static_cast(num * sp); + + float _mean = reduce(SumOpH(x, chn, sp), plane, num, sp) * norm; + __syncthreads(); + float _var = reduce(VarOpH(_mean, x, chn, sp), plane, num, sp) * norm; + + if (threadIdx.x == 0) { + mean[plane] = _mean; + var[plane] = _var; + } +} + +std::vector mean_var_cuda_h(at::Tensor x) { + CHECK_CUDA_INPUT(x); + + // Extract dimensions + int64_t num, chn, sp; + get_dims(x, num, chn, sp); + + // Prepare output tensors + auto mean = at::empty({chn},x.options().dtype(at::kFloat)); + auto var = at::empty({chn},x.options().dtype(at::kFloat)); + + // Run kernel + dim3 blocks(chn); + dim3 threads(getNumThreads(sp)); + auto stream = at::cuda::getCurrentCUDAStream(); + mean_var_kernel_h<<>>( + reinterpret_cast(x.data()), + mean.data(), + var.data(), + num, chn, sp); + + return {mean, var}; +} + +/********** + * forward + **********/ + +__global__ void forward_kernel_h(half *x, const float *mean, const float *var, const float *weight, const float *bias, + bool affine, float eps, int num, int chn, int sp) { + int plane = blockIdx.x; + + const float _mean = mean[plane]; + const float _var = var[plane]; + const float _weight = affine ? abs(weight[plane]) + eps : 1.f; + const float _bias = affine ? bias[plane] : 0.f; + + const float mul = rsqrt(_var + eps) * _weight; + + for (int batch = 0; batch < num; ++batch) { + for (int n = threadIdx.x; n < sp; n += blockDim.x) { + half *x_ptr = x + (batch * chn + plane) * sp + n; + float _x = __half2float(*x_ptr); + float _y = (_x - _mean) * mul + _bias; + + *x_ptr = __float2half(_y); + } + } +} + +at::Tensor forward_cuda_h(at::Tensor x, at::Tensor mean, at::Tensor var, at::Tensor weight, at::Tensor bias, + bool affine, float eps) { + CHECK_CUDA_INPUT(x); + CHECK_CUDA_INPUT(mean); + CHECK_CUDA_INPUT(var); + CHECK_CUDA_INPUT(weight); + CHECK_CUDA_INPUT(bias); + + // Extract dimensions + int64_t num, chn, sp; + get_dims(x, num, chn, sp); + + // Run kernel + dim3 blocks(chn); + dim3 threads(getNumThreads(sp)); + auto stream = at::cuda::getCurrentCUDAStream(); + forward_kernel_h<<>>( + reinterpret_cast(x.data()), + mean.data(), + var.data(), + weight.data(), + bias.data(), + affine, eps, num, chn, sp); + + return x; +} + +__global__ void edz_eydz_kernel_h(const half *z, const half *dz, const float *weight, const float *bias, + float *edz, float *eydz, bool affine, float eps, int num, int chn, int sp) { + int plane = blockIdx.x; + + float _weight = affine ? abs(weight[plane]) + eps : 1.f; + float _bias = affine ? bias[plane] : 0.f; + + Pair res = reduce, GradOpH>(GradOpH(_weight, _bias, z, dz, chn, sp), plane, num, sp); + __syncthreads(); + + if (threadIdx.x == 0) { + edz[plane] = res.v1; + eydz[plane] = res.v2; + } +} + +std::vector edz_eydz_cuda_h(at::Tensor z, at::Tensor dz, at::Tensor weight, at::Tensor bias, + bool affine, float eps) { + CHECK_CUDA_INPUT(z); + CHECK_CUDA_INPUT(dz); + CHECK_CUDA_INPUT(weight); + CHECK_CUDA_INPUT(bias); + + // Extract dimensions + int64_t num, chn, sp; + get_dims(z, num, chn, sp); + + auto edz = at::empty({chn},z.options().dtype(at::kFloat)); + auto eydz = at::empty({chn},z.options().dtype(at::kFloat)); + + // Run kernel + dim3 blocks(chn); + dim3 threads(getNumThreads(sp)); + auto stream = at::cuda::getCurrentCUDAStream(); + edz_eydz_kernel_h<<>>( + reinterpret_cast(z.data()), + reinterpret_cast(dz.data()), + weight.data(), + bias.data(), + edz.data(), + eydz.data(), + affine, eps, num, chn, sp); + + return {edz, eydz}; +} + +__global__ void backward_kernel_h(const half *z, const half *dz, const float *var, const float *weight, const float *bias, const float *edz, + const float *eydz, half *dx, bool affine, float eps, int num, int chn, int sp) { + int plane = blockIdx.x; + + float _weight = affine ? abs(weight[plane]) + eps : 1.f; + float _bias = affine ? bias[plane] : 0.f; + float _var = var[plane]; + float _edz = edz[plane]; + float _eydz = eydz[plane]; + + float _mul = _weight * rsqrt(_var + eps); + float count = float(num * sp); + + for (int batch = 0; batch < num; ++batch) { + for (int n = threadIdx.x; n < sp; n += blockDim.x) { + float _dz = __half2float(dz[(batch * chn + plane) * sp + n]); + float _y = (__half2float(z[(batch * chn + plane) * sp + n]) - _bias) / _weight; + + dx[(batch * chn + plane) * sp + n] = __float2half((_dz - _edz / count - _y * _eydz / count) * _mul); + } + } +} + +at::Tensor backward_cuda_h(at::Tensor z, at::Tensor dz, at::Tensor var, at::Tensor weight, at::Tensor bias, + at::Tensor edz, at::Tensor eydz, bool affine, float eps) { + CHECK_CUDA_INPUT(z); + CHECK_CUDA_INPUT(dz); + CHECK_CUDA_INPUT(var); + CHECK_CUDA_INPUT(weight); + CHECK_CUDA_INPUT(bias); + CHECK_CUDA_INPUT(edz); + CHECK_CUDA_INPUT(eydz); + + // Extract dimensions + int64_t num, chn, sp; + get_dims(z, num, chn, sp); + + auto dx = at::zeros_like(z); + + // Run kernel + dim3 blocks(chn); + dim3 threads(getNumThreads(sp)); + auto stream = at::cuda::getCurrentCUDAStream(); + backward_kernel_h<<>>( + reinterpret_cast(z.data()), + reinterpret_cast(dz.data()), + var.data(), + weight.data(), + bias.data(), + edz.data(), + eydz.data(), + reinterpret_cast(dx.data()), + affine, eps, num, chn, sp); + + return dx; +} + +__global__ void leaky_relu_backward_impl_h(half *z, half *dz, float slope, int64_t count) { + for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < count; i += blockDim.x * gridDim.x){ + float _z = __half2float(z[i]); + if (_z < 0) { + dz[i] = __float2half(__half2float(dz[i]) * slope); + z[i] = __float2half(_z / slope); + } + } +} + +void leaky_relu_backward_cuda_h(at::Tensor z, at::Tensor dz, float slope) { + CHECK_CUDA_INPUT(z); + CHECK_CUDA_INPUT(dz); + + int64_t count = z.numel(); + dim3 threads(getNumThreads(count)); + dim3 blocks = (count + threads.x - 1) / threads.x; + auto stream = at::cuda::getCurrentCUDAStream(); + leaky_relu_backward_impl_h<<>>( + reinterpret_cast(z.data()), + reinterpret_cast(dz.data()), + slope, count); +} + diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/src/utils/checks.h b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/utils/checks.h new file mode 100644 index 0000000000000000000000000000000000000000..e761a6fe34d0789815b588eba7e3726026e0e868 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/utils/checks.h @@ -0,0 +1,15 @@ +#pragma once + +#include + +// Define AT_CHECK for old version of ATen where the same function was called AT_ASSERT +#ifndef AT_CHECK +#define AT_CHECK AT_ASSERT +#endif + +#define CHECK_CUDA(x) AT_CHECK((x).type().is_cuda(), #x " must be a CUDA tensor") +#define CHECK_CPU(x) AT_CHECK(!(x).type().is_cuda(), #x " must be a CPU tensor") +#define CHECK_CONTIGUOUS(x) AT_CHECK((x).is_contiguous(), #x " must be contiguous") + +#define CHECK_CUDA_INPUT(x) CHECK_CUDA(x); CHECK_CONTIGUOUS(x) +#define CHECK_CPU_INPUT(x) CHECK_CPU(x); CHECK_CONTIGUOUS(x) \ No newline at end of file diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/src/utils/common.h b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/utils/common.h new file mode 100644 index 0000000000000000000000000000000000000000..e8403eef8a233b75dd4bb353c16486fe1be2039a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/utils/common.h @@ -0,0 +1,49 @@ +#pragma once + +#include + +/* + * Functions to share code between CPU and GPU + */ + +#ifdef __CUDACC__ +// CUDA versions + +#define HOST_DEVICE __host__ __device__ +#define INLINE_HOST_DEVICE __host__ __device__ inline +#define FLOOR(x) floor(x) + +#if __CUDA_ARCH__ >= 600 +// Recent compute capabilities have block-level atomicAdd for all data types, so we use that +#define ACCUM(x,y) atomicAdd_block(&(x),(y)) +#else +// Older architectures don't have block-level atomicAdd, nor atomicAdd for doubles, so we defer to atomicAdd for float +// and use the known atomicCAS-based implementation for double +template +__device__ inline data_t atomic_add(data_t *address, data_t val) { + return atomicAdd(address, val); +} + +template<> +__device__ inline double atomic_add(double *address, double val) { + unsigned long long int* address_as_ull = (unsigned long long int*)address; + unsigned long long int old = *address_as_ull, assumed; + do { + assumed = old; + old = atomicCAS(address_as_ull, assumed, __double_as_longlong(val + __longlong_as_double(assumed))); + } while (assumed != old); + return __longlong_as_double(old); +} + +#define ACCUM(x,y) atomic_add(&(x),(y)) +#endif // #if __CUDA_ARCH__ >= 600 + +#else +// CPU versions + +#define HOST_DEVICE +#define INLINE_HOST_DEVICE inline +#define FLOOR(x) std::floor(x) +#define ACCUM(x,y) (x) += (y) + +#endif // #ifdef __CUDACC__ \ No newline at end of file diff --git a/Self-Correction-Human-Parsing-for-ACGPN/modules/src/utils/cuda.cuh b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/utils/cuda.cuh new file mode 100644 index 0000000000000000000000000000000000000000..60c0023835e02c5f7c539c28ac07b75b72df394b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/modules/src/utils/cuda.cuh @@ -0,0 +1,71 @@ +#pragma once + +/* + * General settings and functions + */ +const int WARP_SIZE = 32; +const int MAX_BLOCK_SIZE = 1024; + +static int getNumThreads(int nElem) { + int threadSizes[6] = {32, 64, 128, 256, 512, MAX_BLOCK_SIZE}; + for (int i = 0; i < 6; ++i) { + if (nElem <= threadSizes[i]) { + return threadSizes[i]; + } + } + return MAX_BLOCK_SIZE; +} + +/* + * Reduction utilities + */ +template +__device__ __forceinline__ T WARP_SHFL_XOR(T value, int laneMask, int width = warpSize, + unsigned int mask = 0xffffffff) { +#if CUDART_VERSION >= 9000 + return __shfl_xor_sync(mask, value, laneMask, width); +#else + return __shfl_xor(value, laneMask, width); +#endif +} + +__device__ __forceinline__ int getMSB(int val) { return 31 - __clz(val); } + +template +struct Pair { + T v1, v2; + __device__ Pair() {} + __device__ Pair(T _v1, T _v2) : v1(_v1), v2(_v2) {} + __device__ Pair(T v) : v1(v), v2(v) {} + __device__ Pair(int v) : v1(v), v2(v) {} + __device__ Pair &operator+=(const Pair &a) { + v1 += a.v1; + v2 += a.v2; + return *this; + } +}; + +template +static __device__ __forceinline__ T warpSum(T val) { +#if __CUDA_ARCH__ >= 300 + for (int i = 0; i < getMSB(WARP_SIZE); ++i) { + val += WARP_SHFL_XOR(val, 1 << i, WARP_SIZE); + } +#else + __shared__ T values[MAX_BLOCK_SIZE]; + values[threadIdx.x] = val; + __threadfence_block(); + const int base = (threadIdx.x / WARP_SIZE) * WARP_SIZE; + for (int i = 1; i < WARP_SIZE; i++) { + val += values[base + ((i + threadIdx.x) % WARP_SIZE)]; + } +#endif + return val; +} + +template +static __device__ __forceinline__ Pair warpSum(Pair value) { + value.v1 = warpSum(value.v1); + value.v2 = warpSum(value.v2); + return value; +} \ No newline at end of file diff --git a/Self-Correction-Human-Parsing-for-ACGPN/networks/AugmentCE2P.py b/Self-Correction-Human-Parsing-for-ACGPN/networks/AugmentCE2P.py new file mode 100644 index 0000000000000000000000000000000000000000..b5d2c7f88e51dbde32c551ba933647a137395147 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/networks/AugmentCE2P.py @@ -0,0 +1,337 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : AugmentCE2P.py +@Time : 8/4/19 3:35 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import functools + +import torch +import torch.nn as nn +from torch.nn import functional as F +# Note here we adopt the InplaceABNSync implementation from https://github.com/mapillary/inplace_abn +# By default, the InplaceABNSync module contains a BatchNorm Layer and a LeakyReLu layer +from modules import InPlaceABNSync + +BatchNorm2d = functools.partial(InPlaceABNSync, activation='none') + +affine_par = True + +pretrained_settings = { + 'resnet101': { + 'imagenet': { + 'input_space': 'BGR', + 'input_size': [3, 224, 224], + 'input_range': [0, 1], + 'mean': [0.406, 0.456, 0.485], + 'std': [0.225, 0.224, 0.229], + 'num_classes': 1000 + } + }, +} + + +def conv3x3(in_planes, out_planes, stride=1): + "3x3 convolution with padding" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, + padding=1, bias=False) + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, fist_dilation=1, multi_grid=1): + super(Bottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = BatchNorm2d(planes) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, + padding=dilation * multi_grid, dilation=dilation * multi_grid, bias=False) + self.bn2 = BatchNorm2d(planes) + self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False) + self.bn3 = BatchNorm2d(planes * 4) + self.relu = nn.ReLU(inplace=False) + self.relu_inplace = nn.ReLU(inplace=True) + self.downsample = downsample + self.dilation = dilation + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out = out + residual + out = self.relu_inplace(out) + + return out + + +class PSPModule(nn.Module): + """ + Reference: + Zhao, Hengshuang, et al. *"Pyramid scene parsing network."* + """ + + def __init__(self, features, out_features=512, sizes=(1, 2, 3, 6)): + super(PSPModule, self).__init__() + + self.stages = [] + self.stages = nn.ModuleList([self._make_stage(features, out_features, size) for size in sizes]) + self.bottleneck = nn.Sequential( + nn.Conv2d(features + len(sizes) * out_features, out_features, kernel_size=3, padding=1, dilation=1, + bias=False), + InPlaceABNSync(out_features), + ) + + def _make_stage(self, features, out_features, size): + prior = nn.AdaptiveAvgPool2d(output_size=(size, size)) + conv = nn.Conv2d(features, out_features, kernel_size=1, bias=False) + bn = InPlaceABNSync(out_features) + return nn.Sequential(prior, conv, bn) + + def forward(self, feats): + h, w = feats.size(2), feats.size(3) + priors = [F.interpolate(input=stage(feats), size=(h, w), mode='bilinear', align_corners=True) for stage in + self.stages] + [feats] + bottle = self.bottleneck(torch.cat(priors, 1)) + return bottle + + +class ASPPModule(nn.Module): + """ + Reference: + Chen, Liang-Chieh, et al. *"Rethinking Atrous Convolution for Semantic Image Segmentation."* + """ + + def __init__(self, features, inner_features=256, out_features=512, dilations=(12, 24, 36)): + super(ASPPModule, self).__init__() + + self.conv1 = nn.Sequential(nn.AdaptiveAvgPool2d((1, 1)), + nn.Conv2d(features, inner_features, kernel_size=1, padding=0, dilation=1, + bias=False), + InPlaceABNSync(inner_features)) + self.conv2 = nn.Sequential( + nn.Conv2d(features, inner_features, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(inner_features)) + self.conv3 = nn.Sequential( + nn.Conv2d(features, inner_features, kernel_size=3, padding=dilations[0], dilation=dilations[0], bias=False), + InPlaceABNSync(inner_features)) + self.conv4 = nn.Sequential( + nn.Conv2d(features, inner_features, kernel_size=3, padding=dilations[1], dilation=dilations[1], bias=False), + InPlaceABNSync(inner_features)) + self.conv5 = nn.Sequential( + nn.Conv2d(features, inner_features, kernel_size=3, padding=dilations[2], dilation=dilations[2], bias=False), + InPlaceABNSync(inner_features)) + + self.bottleneck = nn.Sequential( + nn.Conv2d(inner_features * 5, out_features, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(out_features), + nn.Dropout2d(0.1) + ) + + def forward(self, x): + _, _, h, w = x.size() + + feat1 = F.interpolate(self.conv1(x), size=(h, w), mode='bilinear', align_corners=True) + + feat2 = self.conv2(x) + feat3 = self.conv3(x) + feat4 = self.conv4(x) + feat5 = self.conv5(x) + out = torch.cat((feat1, feat2, feat3, feat4, feat5), 1) + + bottle = self.bottleneck(out) + return bottle + + +class Edge_Module(nn.Module): + """ + Edge Learning Branch + """ + + def __init__(self, in_fea=[256, 512, 1024], mid_fea=256, out_fea=2): + super(Edge_Module, self).__init__() + + self.conv1 = nn.Sequential( + nn.Conv2d(in_fea[0], mid_fea, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(mid_fea) + ) + self.conv2 = nn.Sequential( + nn.Conv2d(in_fea[1], mid_fea, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(mid_fea) + ) + self.conv3 = nn.Sequential( + nn.Conv2d(in_fea[2], mid_fea, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(mid_fea) + ) + self.conv4 = nn.Conv2d(mid_fea, out_fea, kernel_size=3, padding=1, dilation=1, bias=True) + self.conv5 = nn.Conv2d(out_fea * 3, out_fea, kernel_size=1, padding=0, dilation=1, bias=True) + + def forward(self, x1, x2, x3): + _, _, h, w = x1.size() + + edge1_fea = self.conv1(x1) + edge1 = self.conv4(edge1_fea) + edge2_fea = self.conv2(x2) + edge2 = self.conv4(edge2_fea) + edge3_fea = self.conv3(x3) + edge3 = self.conv4(edge3_fea) + + edge2_fea = F.interpolate(edge2_fea, size=(h, w), mode='bilinear', align_corners=True) + edge3_fea = F.interpolate(edge3_fea, size=(h, w), mode='bilinear', align_corners=True) + edge2 = F.interpolate(edge2, size=(h, w), mode='bilinear', align_corners=True) + edge3 = F.interpolate(edge3, size=(h, w), mode='bilinear', align_corners=True) + + edge = torch.cat([edge1, edge2, edge3], dim=1) + edge_fea = torch.cat([edge1_fea, edge2_fea, edge3_fea], dim=1) + edge = self.conv5(edge) + + return edge, edge_fea + + +class Decoder_Module(nn.Module): + """ + Parsing Branch Decoder Module. + """ + + def __init__(self, num_classes): + super(Decoder_Module, self).__init__() + self.conv1 = nn.Sequential( + nn.Conv2d(512, 256, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(256) + ) + self.conv2 = nn.Sequential( + nn.Conv2d(256, 48, kernel_size=1, stride=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(48) + ) + self.conv3 = nn.Sequential( + nn.Conv2d(304, 256, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(256), + nn.Conv2d(256, 256, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(256) + ) + + self.conv4 = nn.Conv2d(256, num_classes, kernel_size=1, padding=0, dilation=1, bias=True) + + def forward(self, xt, xl): + _, _, h, w = xl.size() + xt = F.interpolate(self.conv1(xt), size=(h, w), mode='bilinear', align_corners=True) + xl = self.conv2(xl) + x = torch.cat([xt, xl], dim=1) + x = self.conv3(x) + seg = self.conv4(x) + return seg, x + + +class ResNet(nn.Module): + def __init__(self, block, layers, num_classes): + self.inplanes = 128 + super(ResNet, self).__init__() + self.conv1 = conv3x3(3, 64, stride=2) + self.bn1 = BatchNorm2d(64) + self.relu1 = nn.ReLU(inplace=False) + self.conv2 = conv3x3(64, 64) + self.bn2 = BatchNorm2d(64) + self.relu2 = nn.ReLU(inplace=False) + self.conv3 = conv3x3(64, 128) + self.bn3 = BatchNorm2d(128) + self.relu3 = nn.ReLU(inplace=False) + + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2) + self.layer3 = self._make_layer(block, 256, layers[2], stride=2) + self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=2, multi_grid=(1, 1, 1)) + + self.context_encoding = PSPModule(2048, 512) + + self.edge = Edge_Module() + self.decoder = Decoder_Module(num_classes) + + self.fushion = nn.Sequential( + nn.Conv2d(1024, 256, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(256), + nn.Dropout2d(0.1), + nn.Conv2d(256, num_classes, kernel_size=1, padding=0, dilation=1, bias=True) + ) + + def _make_layer(self, block, planes, blocks, stride=1, dilation=1, multi_grid=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=stride, bias=False), + BatchNorm2d(planes * block.expansion, affine=affine_par)) + + layers = [] + generate_multi_grid = lambda index, grids: grids[index % len(grids)] if isinstance(grids, tuple) else 1 + layers.append(block(self.inplanes, planes, stride, dilation=dilation, downsample=downsample, + multi_grid=generate_multi_grid(0, multi_grid))) + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append( + block(self.inplanes, planes, dilation=dilation, multi_grid=generate_multi_grid(i, multi_grid))) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.relu1(self.bn1(self.conv1(x))) + x = self.relu2(self.bn2(self.conv2(x))) + x = self.relu3(self.bn3(self.conv3(x))) + x = self.maxpool(x) + x2 = self.layer1(x) + x3 = self.layer2(x2) + x4 = self.layer3(x3) + x5 = self.layer4(x4) + x = self.context_encoding(x5) + parsing_result, parsing_fea = self.decoder(x, x2) + # Edge Branch + edge_result, edge_fea = self.edge(x2, x3, x4) + # Fusion Branch + x = torch.cat([parsing_fea, edge_fea], dim=1) + fusion_result = self.fushion(x) + return [[parsing_result, fusion_result], [edge_result]] + + +def initialize_pretrained_model(model, settings, pretrained='./models/resnet101-imagenet.pth'): + model.input_space = settings['input_space'] + model.input_size = settings['input_size'] + model.input_range = settings['input_range'] + model.mean = settings['mean'] + model.std = settings['std'] + + if pretrained is not None: + saved_state_dict = torch.load(pretrained) + new_params = model.state_dict().copy() + for i in saved_state_dict: + i_parts = i.split('.') + if not i_parts[0] == 'fc': + new_params['.'.join(i_parts[0:])] = saved_state_dict[i] + model.load_state_dict(new_params) + + +def resnet101(num_classes=20, pretrained='./models/resnet101-imagenet.pth'): + model = ResNet(Bottleneck, [3, 4, 23, 3], num_classes) + settings = pretrained_settings['resnet101']['imagenet'] + initialize_pretrained_model(model, settings, pretrained) + return model diff --git a/Self-Correction-Human-Parsing-for-ACGPN/networks/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/networks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0fce5b997eb2567e2dfc894d4e75ea4a6e3f0e72 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/networks/__init__.py @@ -0,0 +1,13 @@ +from __future__ import absolute_import + +from networks.AugmentCE2P import resnet101 + +__factory = { + 'resnet101': resnet101, +} + + +def init_model(name, *args, **kwargs): + if name not in __factory.keys(): + raise KeyError("Unknown model arch: {}".format(name)) + return __factory[name](*args, **kwargs) \ No newline at end of file diff --git a/Self-Correction-Human-Parsing-for-ACGPN/networks/backbone/mobilenetv2.py b/Self-Correction-Human-Parsing-for-ACGPN/networks/backbone/mobilenetv2.py new file mode 100644 index 0000000000000000000000000000000000000000..6f2fe342877cfbc5796efea85af9abccfb80a27e --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/networks/backbone/mobilenetv2.py @@ -0,0 +1,156 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : mobilenetv2.py +@Time : 8/4/19 3:35 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import torch.nn as nn +import math +import functools + +from modules import InPlaceABN, InPlaceABNSync + +BatchNorm2d = functools.partial(InPlaceABNSync, activation='none') + +__all__ = ['mobilenetv2'] + + +def conv_bn(inp, oup, stride): + return nn.Sequential( + nn.Conv2d(inp, oup, 3, stride, 1, bias=False), + BatchNorm2d(oup), + nn.ReLU6(inplace=True) + ) + + +def conv_1x1_bn(inp, oup): + return nn.Sequential( + nn.Conv2d(inp, oup, 1, 1, 0, bias=False), + BatchNorm2d(oup), + nn.ReLU6(inplace=True) + ) + + +class InvertedResidual(nn.Module): + def __init__(self, inp, oup, stride, expand_ratio): + super(InvertedResidual, self).__init__() + self.stride = stride + assert stride in [1, 2] + + hidden_dim = round(inp * expand_ratio) + self.use_res_connect = self.stride == 1 and inp == oup + + if expand_ratio == 1: + self.conv = nn.Sequential( + # dw + nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False), + BatchNorm2d(hidden_dim), + nn.ReLU6(inplace=True), + # pw-linear + nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False), + BatchNorm2d(oup), + ) + else: + self.conv = nn.Sequential( + # pw + nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False), + BatchNorm2d(hidden_dim), + nn.ReLU6(inplace=True), + # dw + nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False), + BatchNorm2d(hidden_dim), + nn.ReLU6(inplace=True), + # pw-linear + nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False), + BatchNorm2d(oup), + ) + + def forward(self, x): + if self.use_res_connect: + return x + self.conv(x) + else: + return self.conv(x) + + +class MobileNetV2(nn.Module): + def __init__(self, n_class=1000, input_size=224, width_mult=1.): + super(MobileNetV2, self).__init__() + block = InvertedResidual + input_channel = 32 + last_channel = 1280 + interverted_residual_setting = [ + # t, c, n, s + [1, 16, 1, 1], + [6, 24, 2, 2], # layer 2 + [6, 32, 3, 2], # layer 3 + [6, 64, 4, 2], + [6, 96, 3, 1], # layer 4 + [6, 160, 3, 2], + [6, 320, 1, 1], # layer 5 + ] + + # building first layer + assert input_size % 32 == 0 + input_channel = int(input_channel * width_mult) + self.last_channel = int(last_channel * width_mult) if width_mult > 1.0 else last_channel + self.features = [conv_bn(3, input_channel, 2)] + # building inverted residual blocks + for t, c, n, s in interverted_residual_setting: + output_channel = int(c * width_mult) + for i in range(n): + if i == 0: + self.features.append(block(input_channel, output_channel, s, expand_ratio=t)) + else: + self.features.append(block(input_channel, output_channel, 1, expand_ratio=t)) + input_channel = output_channel + # building last several layers + self.features.append(conv_1x1_bn(input_channel, self.last_channel)) + # make it nn.Sequential + self.features = nn.Sequential(*self.features) + + # building classifier + self.classifier = nn.Sequential( + nn.Dropout(0.2), + nn.Linear(self.last_channel, n_class), + ) + + self._initialize_weights() + + def forward(self, x): + x = self.features(x) + x = x.mean(3).mean(2) + x = self.classifier(x) + return x + + def _initialize_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(0, math.sqrt(2. / n)) + if m.bias is not None: + m.bias.data.zero_() + elif isinstance(m, BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + elif isinstance(m, nn.Linear): + n = m.weight.size(1) + m.weight.data.normal_(0, 0.01) + m.bias.data.zero_() + + +def mobilenetv2(pretrained=False, **kwargs): + """Constructs a MobileNet_V2 model. + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + """ + model = MobileNetV2(n_class=1000, **kwargs) + if pretrained: + model.load_state_dict(load_url(model_urls['mobilenetv2']), strict=False) + return model diff --git a/Self-Correction-Human-Parsing-for-ACGPN/networks/backbone/resnet.py b/Self-Correction-Human-Parsing-for-ACGPN/networks/backbone/resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..88d6f73bc4fc327e18123020e01ccf5c1b37f025 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/networks/backbone/resnet.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : resnet.py +@Time : 8/4/19 3:35 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import functools +import torch.nn as nn +import math +from torch.utils.model_zoo import load_url + +from modules import InPlaceABNSync + +BatchNorm2d = functools.partial(InPlaceABNSync, activation='none') + +__all__ = ['ResNet', 'resnet18', 'resnet50', 'resnet101'] # resnet101 is coming soon! + +model_urls = { + 'resnet18': 'http://sceneparsing.csail.mit.edu/model/pretrained_resnet/resnet18-imagenet.pth', + 'resnet50': 'http://sceneparsing.csail.mit.edu/model/pretrained_resnet/resnet50-imagenet.pth', + 'resnet101': 'http://sceneparsing.csail.mit.edu/model/pretrained_resnet/resnet101-imagenet.pth' +} + + +def conv3x3(in_planes, out_planes, stride=1): + "3x3 convolution with padding" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, + padding=1, bias=False) + + +class BasicBlock(nn.Module): + expansion = 1 + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(inplanes, planes, stride) + self.bn1 = BatchNorm2d(planes) + self.relu = nn.ReLU(inplace=True) + self.conv2 = conv3x3(planes, planes) + self.bn2 = BatchNorm2d(planes) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(Bottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = BatchNorm2d(planes) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, + padding=1, bias=False) + self.bn2 = BatchNorm2d(planes) + self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False) + self.bn3 = BatchNorm2d(planes * 4) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class ResNet(nn.Module): + + def __init__(self, block, layers, num_classes=1000): + self.inplanes = 128 + super(ResNet, self).__init__() + self.conv1 = conv3x3(3, 64, stride=2) + self.bn1 = BatchNorm2d(64) + self.relu1 = nn.ReLU(inplace=True) + self.conv2 = conv3x3(64, 64) + self.bn2 = BatchNorm2d(64) + self.relu2 = nn.ReLU(inplace=True) + self.conv3 = conv3x3(64, 128) + self.bn3 = BatchNorm2d(128) + self.relu3 = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2) + self.layer3 = self._make_layer(block, 256, layers[2], stride=2) + self.layer4 = self._make_layer(block, 512, layers[3], stride=2) + self.avgpool = nn.AvgPool2d(7, stride=1) + self.fc = nn.Linear(512 * block.expansion, num_classes) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(0, math.sqrt(2. / n)) + elif isinstance(m, BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + + def _make_layer(self, block, planes, blocks, stride=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=stride, bias=False), + BatchNorm2d(planes * block.expansion), + ) + + layers = [] + layers.append(block(self.inplanes, planes, stride, downsample)) + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.relu1(self.bn1(self.conv1(x))) + x = self.relu2(self.bn2(self.conv2(x))) + x = self.relu3(self.bn3(self.conv3(x))) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + + x = self.avgpool(x) + x = x.view(x.size(0), -1) + x = self.fc(x) + + return x + + +def resnet18(pretrained=False, **kwargs): + """Constructs a ResNet-18 model. + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + """ + model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs) + if pretrained: + model.load_state_dict(load_url(model_urls['resnet18'])) + return model + + +def resnet50(pretrained=False, **kwargs): + """Constructs a ResNet-50 model. + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + """ + model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs) + if pretrained: + model.load_state_dict(load_url(model_urls['resnet50']), strict=False) + return model + + +def resnet101(pretrained=False, **kwargs): + """Constructs a ResNet-101 model. + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + """ + model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs) + if pretrained: + model.load_state_dict(load_url(model_urls['resnet101']), strict=False) + return model diff --git a/Self-Correction-Human-Parsing-for-ACGPN/networks/backbone/resnext.py b/Self-Correction-Human-Parsing-for-ACGPN/networks/backbone/resnext.py new file mode 100644 index 0000000000000000000000000000000000000000..96adb54146addc523be71591eb93afcc2c25307f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/networks/backbone/resnext.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : resnext.py.py +@Time : 8/11/19 8:58 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" +import functools +import torch.nn as nn +import math +from torch.utils.model_zoo import load_url + +from modules import InPlaceABNSync + +BatchNorm2d = functools.partial(InPlaceABNSync, activation='none') + +__all__ = ['ResNeXt', 'resnext101'] # support resnext 101 + +model_urls = { + 'resnext50': 'http://sceneparsing.csail.mit.edu/model/pretrained_resnet/resnext50-imagenet.pth', + 'resnext101': 'http://sceneparsing.csail.mit.edu/model/pretrained_resnet/resnext101-imagenet.pth' +} + + +def conv3x3(in_planes, out_planes, stride=1): + "3x3 convolution with padding" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, + padding=1, bias=False) + + +class GroupBottleneck(nn.Module): + expansion = 2 + + def __init__(self, inplanes, planes, stride=1, groups=1, downsample=None): + super(GroupBottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = BatchNorm2d(planes) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, + padding=1, groups=groups, bias=False) + self.bn2 = BatchNorm2d(planes) + self.conv3 = nn.Conv2d(planes, planes * 2, kernel_size=1, bias=False) + self.bn3 = BatchNorm2d(planes * 2) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class ResNeXt(nn.Module): + + def __init__(self, block, layers, groups=32, num_classes=1000): + self.inplanes = 128 + super(ResNeXt, self).__init__() + self.conv1 = conv3x3(3, 64, stride=2) + self.bn1 = BatchNorm2d(64) + self.relu1 = nn.ReLU(inplace=True) + self.conv2 = conv3x3(64, 64) + self.bn2 = BatchNorm2d(64) + self.relu2 = nn.ReLU(inplace=True) + self.conv3 = conv3x3(64, 128) + self.bn3 = BatchNorm2d(128) + self.relu3 = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + self.layer1 = self._make_layer(block, 128, layers[0], groups=groups) + self.layer2 = self._make_layer(block, 256, layers[1], stride=2, groups=groups) + self.layer3 = self._make_layer(block, 512, layers[2], stride=2, groups=groups) + self.layer4 = self._make_layer(block, 1024, layers[3], stride=2, groups=groups) + self.avgpool = nn.AvgPool2d(7, stride=1) + self.fc = nn.Linear(1024 * block.expansion, num_classes) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels // m.groups + m.weight.data.normal_(0, math.sqrt(2. / n)) + elif isinstance(m, BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + + def _make_layer(self, block, planes, blocks, stride=1, groups=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=stride, bias=False), + BatchNorm2d(planes * block.expansion), + ) + + layers = [] + layers.append(block(self.inplanes, planes, stride, groups, downsample)) + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes, groups=groups)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.relu1(self.bn1(self.conv1(x))) + x = self.relu2(self.bn2(self.conv2(x))) + x = self.relu3(self.bn3(self.conv3(x))) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + + x = self.avgpool(x) + x = x.view(x.size(0), -1) + x = self.fc(x) + + return x + + +def resnext101(pretrained=False, **kwargs): + """Constructs a ResNet-101 model. + Args: + pretrained (bool): If True, returns a model pre-trained on Places + """ + model = ResNeXt(GroupBottleneck, [3, 4, 23, 3], **kwargs) + if pretrained: + model.load_state_dict(load_url(model_urls['resnext101']), strict=False) + return model diff --git a/Self-Correction-Human-Parsing-for-ACGPN/networks/context_encoding/aspp.py b/Self-Correction-Human-Parsing-for-ACGPN/networks/context_encoding/aspp.py new file mode 100644 index 0000000000000000000000000000000000000000..d0ba531a8920665c982b1f3412bc030465d56d2a --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/networks/context_encoding/aspp.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : aspp.py +@Time : 8/4/19 3:36 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import torch +import torch.nn as nn +from torch.nn import functional as F + +from modules import InPlaceABNSync + + +class ASPPModule(nn.Module): + """ + Reference: + Chen, Liang-Chieh, et al. *"Rethinking Atrous Convolution for Semantic Image Segmentation."* + """ + def __init__(self, features, out_features=512, inner_features=256, dilations=(12, 24, 36)): + super(ASPPModule, self).__init__() + + self.conv1 = nn.Sequential(nn.AdaptiveAvgPool2d((1, 1)), + nn.Conv2d(features, inner_features, kernel_size=1, padding=0, dilation=1, + bias=False), + InPlaceABNSync(inner_features)) + self.conv2 = nn.Sequential( + nn.Conv2d(features, inner_features, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(inner_features)) + self.conv3 = nn.Sequential( + nn.Conv2d(features, inner_features, kernel_size=3, padding=dilations[0], dilation=dilations[0], bias=False), + InPlaceABNSync(inner_features)) + self.conv4 = nn.Sequential( + nn.Conv2d(features, inner_features, kernel_size=3, padding=dilations[1], dilation=dilations[1], bias=False), + InPlaceABNSync(inner_features)) + self.conv5 = nn.Sequential( + nn.Conv2d(features, inner_features, kernel_size=3, padding=dilations[2], dilation=dilations[2], bias=False), + InPlaceABNSync(inner_features)) + + self.bottleneck = nn.Sequential( + nn.Conv2d(inner_features * 5, out_features, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(out_features), + nn.Dropout2d(0.1) + ) + + def forward(self, x): + _, _, h, w = x.size() + + feat1 = F.interpolate(self.conv1(x), size=(h, w), mode='bilinear', align_corners=True) + + feat2 = self.conv2(x) + feat3 = self.conv3(x) + feat4 = self.conv4(x) + feat5 = self.conv5(x) + out = torch.cat((feat1, feat2, feat3, feat4, feat5), 1) + + bottle = self.bottleneck(out) + return bottle \ No newline at end of file diff --git a/Self-Correction-Human-Parsing-for-ACGPN/networks/context_encoding/ocnet.py b/Self-Correction-Human-Parsing-for-ACGPN/networks/context_encoding/ocnet.py new file mode 100644 index 0000000000000000000000000000000000000000..ac43ebf489ee478c48acf3f93b01b32bdb08cdf3 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/networks/context_encoding/ocnet.py @@ -0,0 +1,226 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : ocnet.py +@Time : 8/4/19 3:36 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import functools + +import torch +import torch.nn as nn +from torch.autograd import Variable +from torch.nn import functional as F + +from modules import InPlaceABNSync +BatchNorm2d = functools.partial(InPlaceABNSync, activation='none') + + +class _SelfAttentionBlock(nn.Module): + ''' + The basic implementation for self-attention block/non-local block + Input: + N X C X H X W + Parameters: + in_channels : the dimension of the input feature map + key_channels : the dimension after the key/query transform + value_channels : the dimension after the value transform + scale : choose the scale to downsample the input feature maps (save memory cost) + Return: + N X C X H X W + position-aware context features.(w/o concate or add with the input) + ''' + + def __init__(self, in_channels, key_channels, value_channels, out_channels=None, scale=1): + super(_SelfAttentionBlock, self).__init__() + self.scale = scale + self.in_channels = in_channels + self.out_channels = out_channels + self.key_channels = key_channels + self.value_channels = value_channels + if out_channels == None: + self.out_channels = in_channels + self.pool = nn.MaxPool2d(kernel_size=(scale, scale)) + self.f_key = nn.Sequential( + nn.Conv2d(in_channels=self.in_channels, out_channels=self.key_channels, + kernel_size=1, stride=1, padding=0), + InPlaceABNSync(self.key_channels), + ) + self.f_query = self.f_key + self.f_value = nn.Conv2d(in_channels=self.in_channels, out_channels=self.value_channels, + kernel_size=1, stride=1, padding=0) + self.W = nn.Conv2d(in_channels=self.value_channels, out_channels=self.out_channels, + kernel_size=1, stride=1, padding=0) + nn.init.constant(self.W.weight, 0) + nn.init.constant(self.W.bias, 0) + + def forward(self, x): + batch_size, h, w = x.size(0), x.size(2), x.size(3) + if self.scale > 1: + x = self.pool(x) + + value = self.f_value(x).view(batch_size, self.value_channels, -1) + value = value.permute(0, 2, 1) + query = self.f_query(x).view(batch_size, self.key_channels, -1) + query = query.permute(0, 2, 1) + key = self.f_key(x).view(batch_size, self.key_channels, -1) + + sim_map = torch.matmul(query, key) + sim_map = (self.key_channels ** -.5) * sim_map + sim_map = F.softmax(sim_map, dim=-1) + + context = torch.matmul(sim_map, value) + context = context.permute(0, 2, 1).contiguous() + context = context.view(batch_size, self.value_channels, *x.size()[2:]) + context = self.W(context) + if self.scale > 1: + context = F.upsample(input=context, size=(h, w), mode='bilinear', align_corners=True) + return context + + +class SelfAttentionBlock2D(_SelfAttentionBlock): + def __init__(self, in_channels, key_channels, value_channels, out_channels=None, scale=1): + super(SelfAttentionBlock2D, self).__init__(in_channels, + key_channels, + value_channels, + out_channels, + scale) + + +class BaseOC_Module(nn.Module): + """ + Implementation of the BaseOC module + Parameters: + in_features / out_features: the channels of the input / output feature maps. + dropout: we choose 0.05 as the default value. + size: you can apply multiple sizes. Here we only use one size. + Return: + features fused with Object context information. + """ + + def __init__(self, in_channels, out_channels, key_channels, value_channels, dropout, sizes=([1])): + super(BaseOC_Module, self).__init__() + self.stages = [] + self.stages = nn.ModuleList( + [self._make_stage(in_channels, out_channels, key_channels, value_channels, size) for size in sizes]) + self.conv_bn_dropout = nn.Sequential( + nn.Conv2d(2 * in_channels, out_channels, kernel_size=1, padding=0), + InPlaceABNSync(out_channels), + nn.Dropout2d(dropout) + ) + + def _make_stage(self, in_channels, output_channels, key_channels, value_channels, size): + return SelfAttentionBlock2D(in_channels, + key_channels, + value_channels, + output_channels, + size) + + def forward(self, feats): + priors = [stage(feats) for stage in self.stages] + context = priors[0] + for i in range(1, len(priors)): + context += priors[i] + output = self.conv_bn_dropout(torch.cat([context, feats], 1)) + return output + + +class BaseOC_Context_Module(nn.Module): + """ + Output only the context features. + Parameters: + in_features / out_features: the channels of the input / output feature maps. + dropout: specify the dropout ratio + fusion: We provide two different fusion method, "concat" or "add" + size: we find that directly learn the attention weights on even 1/8 feature maps is hard. + Return: + features after "concat" or "add" + """ + + def __init__(self, in_channels, out_channels, key_channels, value_channels, dropout, sizes=([1])): + super(BaseOC_Context_Module, self).__init__() + self.stages = [] + self.stages = nn.ModuleList( + [self._make_stage(in_channels, out_channels, key_channels, value_channels, size) for size in sizes]) + self.conv_bn_dropout = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=1, padding=0), + InPlaceABNSync(out_channels), + ) + + def _make_stage(self, in_channels, output_channels, key_channels, value_channels, size): + return SelfAttentionBlock2D(in_channels, + key_channels, + value_channels, + output_channels, + size) + + def forward(self, feats): + priors = [stage(feats) for stage in self.stages] + context = priors[0] + for i in range(1, len(priors)): + context += priors[i] + output = self.conv_bn_dropout(context) + return output + + +class ASP_OC_Module(nn.Module): + def __init__(self, features, out_features=256, dilations=(12, 24, 36)): + super(ASP_OC_Module, self).__init__() + self.context = nn.Sequential(nn.Conv2d(features, out_features, kernel_size=3, padding=1, dilation=1, bias=True), + InPlaceABNSync(out_features), + BaseOC_Context_Module(in_channels=out_features, out_channels=out_features, + key_channels=out_features // 2, value_channels=out_features, + dropout=0, sizes=([2]))) + self.conv2 = nn.Sequential(nn.Conv2d(features, out_features, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(out_features)) + self.conv3 = nn.Sequential( + nn.Conv2d(features, out_features, kernel_size=3, padding=dilations[0], dilation=dilations[0], bias=False), + InPlaceABNSync(out_features)) + self.conv4 = nn.Sequential( + nn.Conv2d(features, out_features, kernel_size=3, padding=dilations[1], dilation=dilations[1], bias=False), + InPlaceABNSync(out_features)) + self.conv5 = nn.Sequential( + nn.Conv2d(features, out_features, kernel_size=3, padding=dilations[2], dilation=dilations[2], bias=False), + InPlaceABNSync(out_features)) + + self.conv_bn_dropout = nn.Sequential( + nn.Conv2d(out_features * 5, out_features, kernel_size=1, padding=0, dilation=1, bias=False), + InPlaceABNSync(out_features), + nn.Dropout2d(0.1) + ) + + def _cat_each(self, feat1, feat2, feat3, feat4, feat5): + assert (len(feat1) == len(feat2)) + z = [] + for i in range(len(feat1)): + z.append(torch.cat((feat1[i], feat2[i], feat3[i], feat4[i], feat5[i]), 1)) + return z + + def forward(self, x): + if isinstance(x, Variable): + _, _, h, w = x.size() + elif isinstance(x, tuple) or isinstance(x, list): + _, _, h, w = x[0].size() + else: + raise RuntimeError('unknown input type') + + feat1 = self.context(x) + feat2 = self.conv2(x) + feat3 = self.conv3(x) + feat4 = self.conv4(x) + feat5 = self.conv5(x) + + if isinstance(x, Variable): + out = torch.cat((feat1, feat2, feat3, feat4, feat5), 1) + elif isinstance(x, tuple) or isinstance(x, list): + out = self._cat_each(feat1, feat2, feat3, feat4, feat5) + else: + raise RuntimeError('unknown input type') + output = self.conv_bn_dropout(out) + return output diff --git a/Self-Correction-Human-Parsing-for-ACGPN/networks/context_encoding/psp.py b/Self-Correction-Human-Parsing-for-ACGPN/networks/context_encoding/psp.py new file mode 100644 index 0000000000000000000000000000000000000000..47181dc3f5fddb1c7fb80ad58a6694aae9ebd746 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/networks/context_encoding/psp.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : psp.py +@Time : 8/4/19 3:36 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import torch +import torch.nn as nn +from torch.nn import functional as F + +from modules import InPlaceABNSync + + +class PSPModule(nn.Module): + """ + Reference: + Zhao, Hengshuang, et al. *"Pyramid scene parsing network."* + """ + def __init__(self, features, out_features=512, sizes=(1, 2, 3, 6)): + super(PSPModule, self).__init__() + + self.stages = [] + self.stages = nn.ModuleList([self._make_stage(features, out_features, size) for size in sizes]) + self.bottleneck = nn.Sequential( + nn.Conv2d(features + len(sizes) * out_features, out_features, kernel_size=3, padding=1, dilation=1, + bias=False), + InPlaceABNSync(out_features), + ) + + def _make_stage(self, features, out_features, size): + prior = nn.AdaptiveAvgPool2d(output_size=(size, size)) + conv = nn.Conv2d(features, out_features, kernel_size=1, bias=False) + bn = InPlaceABNSync(out_features) + return nn.Sequential(prior, conv, bn) + + def forward(self, feats): + h, w = feats.size(2), feats.size(3) + priors = [F.interpolate(input=stage(feats), size=(h, w), mode='bilinear', align_corners=True) for stage in + self.stages] + [feats] + bottle = self.bottleneck(torch.cat(priors, 1)) + return bottle \ No newline at end of file diff --git a/Self-Correction-Human-Parsing-for-ACGPN/requirements.txt b/Self-Correction-Human-Parsing-for-ACGPN/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..fa113c8904cc69b76694a0a666de0fd895619770 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/requirements.txt @@ -0,0 +1 @@ +opencv-python==4.4.0.46 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/simple_extractor.py b/Self-Correction-Human-Parsing-for-ACGPN/simple_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..10e1cc0c4934342b4b9ad77ea98131308c0e237b --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/simple_extractor.py @@ -0,0 +1,175 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : simple_extractor.py +@Time : 8/30/19 8:59 PM +@Desc : Simple Extractor +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import os +import torch +import argparse +import numpy as np +from PIL import Image +from tqdm import tqdm + +from torch.utils.data import DataLoader +import torchvision.transforms as transforms + +import networks +from utils.transforms import transform_logits +from datasets.simple_extractor_dataset import SimpleFolderDataset + +dataset_settings = { + 'lip': { + 'input_size': [473, 473], + 'num_classes': 20, + 'label': ['Background', 'Hat', 'Hair', 'Glove', 'Sunglasses', 'Upper-clothes', 'Dress', 'Coat', + 'Socks', 'Pants', 'Jumpsuits', 'Scarf', 'Skirt', 'Face', 'Left-arm', 'Right-arm', + 'Left-leg', 'Right-leg', 'Left-shoe', 'Right-shoe'] + }, + 'atr': { + 'input_size': [512, 512], + 'num_classes': 18, + 'label': ['Background', 'Hat', 'Hair', 'Sunglasses', 'Upper-clothes', 'Skirt', 'Pants', 'Dress', 'Belt', + 'Left-shoe', 'Right-shoe', 'Face', 'Left-leg', 'Right-leg', 'Left-arm', 'Right-arm', 'Bag', 'Scarf'] + }, + 'pascal': { + 'input_size': [512, 512], + 'num_classes': 7, + 'label': ['Background', 'Head', 'Torso', 'Upper Arms', 'Lower Arms', 'Upper Legs', 'Lower Legs'], + } +} + + +def get_arguments(): + """Parse all the arguments provided from the CLI. + Returns: + A list of parsed arguments. + """ + parser = argparse.ArgumentParser(description="Self Correction for Human Parsing") + + parser.add_argument("--dataset", type=str, default='lip', choices=['lip', 'atr', 'pascal']) + parser.add_argument("--model-restore", type=str, default='', help="restore pretrained model parameters.") + parser.add_argument("--gpu", type=str, default='0', help="choose gpu device.") + parser.add_argument("--input-dir", type=str, default='', help="path of input image folder.") + parser.add_argument("--output-dir", type=str, default='', help="path of output image folder.") + parser.add_argument("--logits", action='store_true', default=False, help="whether to save the logits.") + + return parser.parse_args() + + +def get_palette(num_cls): + """ Returns the color map for visualizing the segmentation mask. + Args: + num_cls: Number of classes + Returns: + The color map + """ + n = num_cls + palette = [0] * (n * 3) + for j in range(0, n): + lab = j + palette[j * 3 + 0] = 0 + palette[j * 3 + 1] = 0 + palette[j * 3 + 2] = 0 + i = 0 + while lab: + palette[j * 3 + 0] |= (((lab >> 0) & 1) << (7 - i)) + palette[j * 3 + 1] |= (((lab >> 1) & 1) << (7 - i)) + palette[j * 3 + 2] |= (((lab >> 2) & 1) << (7 - i)) + i += 1 + lab >>= 3 + return palette + + +def main(): + args = get_arguments() + + gpus = [int(i) for i in args.gpu.split(',')] + assert len(gpus) == 1 + if not args.gpu == 'None': + os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu + + num_classes = dataset_settings[args.dataset]['num_classes'] + #num_classes = 14 for ACGPN + input_size = dataset_settings[args.dataset]['input_size'] + label = dataset_settings[args.dataset]['label'] + #print("Evaluating total class number {} with {}".format(num_classes, label)) + + model = networks.init_model('resnet101', num_classes=num_classes, pretrained=None) + + state_dict = torch.load(args.model_restore)['state_dict'] + from collections import OrderedDict + new_state_dict = OrderedDict() + for k, v in state_dict.items(): + name = k[7:] # remove `module.` + new_state_dict[name] = v + model.load_state_dict(new_state_dict) + model.cuda() + model.eval() + + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize(mean=[0.406, 0.456, 0.485], std=[0.225, 0.224, 0.229]) + ]) + + trans_dict = { + 0:0, + 1:1, 2:1, + 5:4, 6:4, 7:4, + 18:5, + 19:6, + 9:8, 12:8, + 16:9, + 17:10, + 14:11, + 4:12, 13:12, + 15:13 + } + dataset = SimpleFolderDataset(root=args.input_dir, input_size=input_size, transform=transform) + dataloader = DataLoader(dataset) + + if not os.path.exists(args.output_dir): + os.makedirs(args.output_dir) + + #palette = get_palette(14) + with torch.no_grad(): + for idx, batch in enumerate(tqdm(dataloader)): + image, meta = batch + img_name = meta['name'][0] + c = meta['center'].numpy()[0] + s = meta['scale'].numpy()[0] + w = meta['width'].numpy()[0] + h = meta['height'].numpy()[0] + + output = model(image.cuda()) + upsample = torch.nn.Upsample(size=input_size, mode='bicubic', align_corners=True) + upsample_output = upsample(output[0][-1][0].unsqueeze(0)) + upsample_output = upsample_output.squeeze() + upsample_output = upsample_output.permute(1, 2, 0) # CHW -> HWC + + logits_result = transform_logits(upsample_output.data.cpu().numpy(), c, s, w, h, input_size=input_size) + parsing_result = np.argmax(logits_result, axis=2) + parsing_result_path = os.path.join(args.output_dir, img_name[:-4] + '.png') + output_arr = np.asarray(parsing_result, dtype=np.uint8) + + new_arr = np.full(output_arr.shape, 7) + for old, new in trans_dict.items(): + new_arr = np.where(output_arr == old, new, new_arr) + output_img = Image.fromarray(np.asarray(new_arr, dtype=np.uint8)) + #output_img.putpalette(palette) + output_img.save(parsing_result_path) + if args.logits: + logits_result_path = os.path.join(args.output_dir, img_name[:-4] + '.npy') + np.save(logits_result_path, logits_result) + return + + +if __name__ == '__main__': + main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/simple_extractor_sievenet.py b/Self-Correction-Human-Parsing-for-ACGPN/simple_extractor_sievenet.py new file mode 100644 index 0000000000000000000000000000000000000000..286ca843aa92e1fab423760b8f9766272285bcc1 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/simple_extractor_sievenet.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : simple_extractor.py +@Time : 8/30/19 8:59 PM +@Desc : Simple Extractor +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import os +import torch +import argparse +import numpy as np +from PIL import Image +from tqdm import tqdm + +from torch.utils.data import DataLoader +import torchvision.transforms as transforms + +import networks +from utils.transforms import transform_logits +from datasets.simple_extractor_dataset import SimpleFolderDataset + +dataset_settings = { + 'lip': { + 'input_size': [473, 473], + 'num_classes': 20, + 'label': ['Background', 'Hat', 'Hair', 'Glove', 'Sunglasses', 'Upper-clothes', 'Dress', 'Coat', + 'Socks', 'Pants', 'Jumpsuits', 'Scarf', 'Skirt', 'Face', 'Left-arm', 'Right-arm', + 'Left-leg', 'Right-leg', 'Left-shoe', 'Right-shoe'] + }, + 'atr': { + 'input_size': [512, 512], + 'num_classes': 18, + 'label': ['Background', 'Hat', 'Hair', 'Sunglasses', 'Upper-clothes', 'Skirt', 'Pants', 'Dress', 'Belt', + 'Left-shoe', 'Right-shoe', 'Face', 'Left-leg', 'Right-leg', 'Left-arm', 'Right-arm', 'Bag', 'Scarf'] + }, + 'pascal': { + 'input_size': [512, 512], + 'num_classes': 7, + 'label': ['Background', 'Head', 'Torso', 'Upper Arms', 'Lower Arms', 'Upper Legs', 'Lower Legs'], + } +} + + +def get_arguments(): + """Parse all the arguments provided from the CLI. + Returns: + A list of parsed arguments. + """ + parser = argparse.ArgumentParser(description="Self Correction for Human Parsing") + + parser.add_argument("--dataset", type=str, default='lip', choices=['lip', 'atr', 'pascal']) + parser.add_argument("--model-restore", type=str, default='', help="restore pretrained model parameters.") + parser.add_argument("--gpu", type=str, default='0', help="choose gpu device.") + parser.add_argument("--input-dir", type=str, default='', help="path of input image folder.") + parser.add_argument("--output-dir", type=str, default='', help="path of output image folder.") + parser.add_argument("--logits", action='store_true', default=False, help="whether to save the logits.") + + return parser.parse_args() + + +def get_palette(num_cls): + """ Returns the color map for visualizing the segmentation mask. + Args: + num_cls: Number of classes + Returns: + The color map + """ + n = num_cls + palette = [0] * (n * 3) + for j in range(0, n): + lab = j + palette[j * 3 + 0] = 0 + palette[j * 3 + 1] = 0 + palette[j * 3 + 2] = 0 + i = 0 + while lab: + palette[j * 3 + 0] |= (((lab >> 0) & 1) << (7 - i)) + palette[j * 3 + 1] |= (((lab >> 1) & 1) << (7 - i)) + palette[j * 3 + 2] |= (((lab >> 2) & 1) << (7 - i)) + i += 1 + lab >>= 3 + return palette + + +def main(): + args = get_arguments() + + gpus = [int(i) for i in args.gpu.split(',')] + assert len(gpus) == 1 + if not args.gpu == 'None': + os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu + + num_classes = dataset_settings[args.dataset]['num_classes'] + input_size = dataset_settings[args.dataset]['input_size'] + label = dataset_settings[args.dataset]['label'] + print("Evaluating total class number {} with {}".format(num_classes, label)) + + model = networks.init_model('resnet101', num_classes=num_classes, pretrained=None) + + state_dict = torch.load(args.model_restore)['state_dict'] + from collections import OrderedDict + new_state_dict = OrderedDict() + for k, v in state_dict.items(): + name = k[7:] # remove `module.` + new_state_dict[name] = v + model.load_state_dict(new_state_dict) + model.cuda() + model.eval() + + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize(mean=[0.406, 0.456, 0.485], std=[0.225, 0.224, 0.229]) + ]) + dataset = SimpleFolderDataset(root=args.input_dir, input_size=input_size, transform=transform) + dataloader = DataLoader(dataset) + + if not os.path.exists(args.output_dir): + os.makedirs(args.output_dir) + + palette = get_palette(num_classes) + with torch.no_grad(): + for idx, batch in enumerate(tqdm(dataloader)): + image, meta = batch + img_name = meta['name'][0] + c = meta['center'].numpy()[0] + s = meta['scale'].numpy()[0] + w = meta['width'].numpy()[0] + h = meta['height'].numpy()[0] + + output = model(image.cuda()) + upsample = torch.nn.Upsample(size=input_size, mode='bilinear', align_corners=True) + upsample_output = upsample(output[0][-1][0].unsqueeze(0)) + upsample_output = upsample_output.squeeze() + upsample_output = upsample_output.permute(1, 2, 0) # CHW -> HWC + + logits_result = transform_logits(upsample_output.data.cpu().numpy(), c, s, w, h, input_size=input_size) + parsing_result = np.argmax(logits_result, axis=2) + parsing_result_path = os.path.join(args.output_dir, img_name[:-4] + '.png') + output_img = Image.fromarray(np.asarray(parsing_result, dtype=np.uint8)) + #output_img.putpalette(palette) + output_img.save(parsing_result_path) + if args.logits: + logits_result_path = os.path.join(args.output_dir, img_name[:-4] + '.npy') + np.save(logits_result_path, logits_result) + return + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/Self-Correction-Human-Parsing-for-ACGPN/train.py b/Self-Correction-Human-Parsing-for-ACGPN/train.py new file mode 100644 index 0000000000000000000000000000000000000000..c13c5040dae096a6da9d2d468942a19a5b3a3641 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/train.py @@ -0,0 +1,231 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : train.py +@Time : 8/4/19 3:36 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import os +import json +import timeit +import argparse + +import torch +import torch.optim as optim +import torchvision.transforms as transforms +import torch.backends.cudnn as cudnn +from torch.utils import data + +import networks +import utils.schp as schp +from datasets.datasets import LIPDataSet +from datasets.target_generation import generate_edge_tensor +from utils.transforms import BGR2RGB_transform +from utils.criterion import CriterionAll +from utils.encoding import DataParallelModel, DataParallelCriterion +from utils.warmup_scheduler import SGDRScheduler + + +def get_arguments(): + """Parse all the arguments provided from the CLI. + Returns: + A list of parsed arguments. + """ + parser = argparse.ArgumentParser(description="Self Correction for Human Parsing") + + # Network Structure + parser.add_argument("--arch", type=str, default='resnet101') + # Data Preference + parser.add_argument("--data-dir", type=str, default='./data/LIP') + parser.add_argument("--batch-size", type=int, default=16) + parser.add_argument("--input-size", type=str, default='473,473') + parser.add_argument("--num-classes", type=int, default=20) + parser.add_argument("--ignore-label", type=int, default=255) + parser.add_argument("--random-mirror", action="store_true") + parser.add_argument("--random-scale", action="store_true") + # Training Strategy + parser.add_argument("--learning-rate", type=float, default=7e-3) + parser.add_argument("--momentum", type=float, default=0.9) + parser.add_argument("--weight-decay", type=float, default=5e-4) + parser.add_argument("--gpu", type=str, default='0,1,2') + parser.add_argument("--start-epoch", type=int, default=0) + parser.add_argument("--epochs", type=int, default=150) + parser.add_argument("--eval-epochs", type=int, default=10) + parser.add_argument("--imagenet-pretrain", type=str, default='./pretrain_model/resnet101-imagenet.pth') + parser.add_argument("--log-dir", type=str, default='./log') + parser.add_argument("--model-restore", type=str, default='./log/checkpoint.pth.tar') + parser.add_argument("--schp-start", type=int, default=100, help='schp start epoch') + parser.add_argument("--cycle-epochs", type=int, default=10, help='schp cyclical epoch') + parser.add_argument("--schp-restore", type=str, default='./log/schp_checkpoint.pth.tar') + parser.add_argument("--lambda-s", type=float, default=1, help='segmentation loss weight') + parser.add_argument("--lambda-e", type=float, default=1, help='edge loss weight') + parser.add_argument("--lambda-c", type=float, default=0.1, help='segmentation-edge consistency loss weight') + return parser.parse_args() + + +def main(): + args = get_arguments() + print(args) + + start_epoch = 0 + cycle_n = 0 + + if not os.path.exists(args.log_dir): + os.makedirs(args.log_dir) + with open(os.path.join(args.log_dir, 'args.json'), 'w') as opt_file: + json.dump(vars(args), opt_file) + + gpus = [int(i) for i in args.gpu.split(',')] + if not args.gpu == 'None': + os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu + + input_size = list(map(int, args.input_size.split(','))) + + cudnn.enabled = True + cudnn.benchmark = True + + # Model Initialization + AugmentCE2P = networks.init_model(args.arch, num_classes=args.num_classes, pretrained=args.imagenet_pretrain) + model = DataParallelModel(AugmentCE2P) + model.cuda() + + IMAGE_MEAN = AugmentCE2P.mean + IMAGE_STD = AugmentCE2P.std + INPUT_SPACE = AugmentCE2P.input_space + print('image mean: {}'.format(IMAGE_MEAN)) + print('image std: {}'.format(IMAGE_STD)) + print('input space:{}'.format(INPUT_SPACE)) + + restore_from = args.model_restore + if os.path.exists(restore_from): + print('Resume training from {}'.format(restore_from)) + checkpoint = torch.load(restore_from) + model.load_state_dict(checkpoint['state_dict']) + start_epoch = checkpoint['epoch'] + + SCHP_AugmentCE2P = networks.init_model(args.arch, num_classes=args.num_classes, pretrained=args.imagenet_pretrain) + schp_model = DataParallelModel(SCHP_AugmentCE2P) + schp_model.cuda() + + if os.path.exists(args.schp_restore): + print('Resuming schp checkpoint from {}'.format(args.schp_restore)) + schp_checkpoint = torch.load(args.schp_restore) + schp_model_state_dict = schp_checkpoint['state_dict'] + cycle_n = schp_checkpoint['cycle_n'] + schp_model.load_state_dict(schp_model_state_dict) + + # Loss Function + criterion = CriterionAll(lambda_1=args.lambda_s, lambda_2=args.lambda_e, lambda_3=args.lambda_c, + num_classes=args.num_classes) + criterion = DataParallelCriterion(criterion) + criterion.cuda() + + # Data Loader + if INPUT_SPACE == 'BGR': + print('BGR Transformation') + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize(mean=IMAGE_MEAN, + std=IMAGE_STD), + ]) + + elif INPUT_SPACE == 'RGB': + print('RGB Transformation') + transform = transforms.Compose([ + transforms.ToTensor(), + BGR2RGB_transform(), + transforms.Normalize(mean=IMAGE_MEAN, + std=IMAGE_STD), + ]) + + train_dataset = LIPDataSet(args.data_dir, 'train', crop_size=input_size, transform=transform) + train_loader = data.DataLoader(train_dataset, batch_size=args.batch_size * len(gpus), + num_workers=16, shuffle=True, pin_memory=True, drop_last=True) + print('Total training samples: {}'.format(len(train_dataset))) + + # Optimizer Initialization + optimizer = optim.SGD(model.parameters(), lr=args.learning_rate, momentum=args.momentum, + weight_decay=args.weight_decay) + + lr_scheduler = SGDRScheduler(optimizer, total_epoch=args.epochs, + eta_min=args.learning_rate / 100, warmup_epoch=10, + start_cyclical=args.schp_start, cyclical_base_lr=args.learning_rate / 2, + cyclical_epoch=args.cycle_epochs) + + total_iters = args.epochs * len(train_loader) + start = timeit.default_timer() + for epoch in range(start_epoch, args.epochs): + lr_scheduler.step(epoch=epoch) + lr = lr_scheduler.get_lr()[0] + + model.train() + for i_iter, batch in enumerate(train_loader): + i_iter += len(train_loader) * epoch + + images, labels, _ = batch + labels = labels.cuda(non_blocking=True) + + edges = generate_edge_tensor(labels) + labels = labels.type(torch.cuda.LongTensor) + edges = edges.type(torch.cuda.LongTensor) + + preds = model(images) + + # Online Self Correction Cycle with Label Refinement + if cycle_n >= 1: + with torch.no_grad(): + soft_preds = schp_model(images) + soft_parsing = [] + soft_edge = [] + for soft_pred in soft_preds: + soft_parsing.append(soft_pred[0][-1]) + soft_edge.append(soft_pred[1][-1]) + soft_preds = torch.cat(soft_parsing, dim=0) + soft_edges = torch.cat(soft_edge, dim=0) + else: + soft_preds = None + soft_edges = None + + loss = criterion(preds, [labels, edges, soft_preds, soft_edges], cycle_n) + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + if i_iter % 100 == 0: + print('iter = {} of {} completed, lr = {}, loss = {}'.format(i_iter, total_iters, lr, + loss.data.cpu().numpy())) + if (epoch + 1) % (args.eval_epochs) == 0: + schp.save_schp_checkpoint({ + 'epoch': epoch + 1, + 'state_dict': model.state_dict(), + }, False, args.log_dir, filename='checkpoint_{}.pth.tar'.format(epoch + 1)) + + # Self Correction Cycle with Model Aggregation + if (epoch + 1) >= args.schp_start and (epoch + 1 - args.schp_start) % args.cycle_epochs == 0: + print('Self-correction cycle number {}'.format(cycle_n)) + schp.moving_average(schp_model, model, 1.0 / (cycle_n + 1)) + cycle_n += 1 + schp.bn_re_estimate(train_loader, schp_model) + schp.save_schp_checkpoint({ + 'state_dict': schp_model.state_dict(), + 'cycle_n': cycle_n, + }, False, args.log_dir, filename='schp_{}_checkpoint.pth.tar'.format(cycle_n)) + + torch.cuda.empty_cache() + end = timeit.default_timer() + print('epoch = {} of {} completed using {} s'.format(epoch, args.epochs, + (end - start) / (epoch - start_epoch + 1))) + + end = timeit.default_timer() + print('Training Finished in {} seconds'.format(end - start)) + + +if __name__ == '__main__': + main() diff --git a/Self-Correction-Human-Parsing-for-ACGPN/utils/__init__.py b/Self-Correction-Human-Parsing-for-ACGPN/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Self-Correction-Human-Parsing-for-ACGPN/utils/consistency_loss.py b/Self-Correction-Human-Parsing-for-ACGPN/utils/consistency_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..b872fdcc10ecef02762399278191e48e79ea9a1f --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/utils/consistency_loss.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : kl_loss.py +@Time : 7/23/19 4:02 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" +import torch +import torch.nn.functional as F +from torch import nn +from datasets.target_generation import generate_edge_tensor + + +class ConsistencyLoss(nn.Module): + def __init__(self, ignore_index=255): + super(ConsistencyLoss, self).__init__() + self.ignore_index=ignore_index + + def forward(self, parsing, edge, label): + parsing_pre = torch.argmax(parsing, dim=1) + parsing_pre[label==self.ignore_index]=self.ignore_index + generated_edge = generate_edge_tensor(parsing_pre) + edge_pre = torch.argmax(edge, dim=1) + v_generate_edge = generated_edge[label!=255] + v_edge_pre = edge_pre[label!=255] + v_edge_pre = v_edge_pre.type(torch.cuda.FloatTensor) + positive_union = (v_generate_edge==1)&(v_edge_pre==1) # only the positive values count + return F.smooth_l1_loss(v_generate_edge[positive_union].squeeze(0), v_edge_pre[positive_union].squeeze(0)) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/utils/criterion.py b/Self-Correction-Human-Parsing-for-ACGPN/utils/criterion.py new file mode 100644 index 0000000000000000000000000000000000000000..968894319042331482692e42804f103074e4b710 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/utils/criterion.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : criterion.py +@Time : 8/30/19 8:59 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import torch.nn as nn +import torch +import numpy as np +from torch.nn import functional as F +from .lovasz_softmax import LovaszSoftmax +from .kl_loss import KLDivergenceLoss +from .consistency_loss import ConsistencyLoss + +NUM_CLASSES = 20 + + +class CriterionAll(nn.Module): + def __init__(self, use_class_weight=False, ignore_index=255, lambda_1=1, lambda_2=1, lambda_3=1, + num_classes=20): + super(CriterionAll, self).__init__() + self.ignore_index = ignore_index + self.use_class_weight = use_class_weight + self.criterion = torch.nn.CrossEntropyLoss(ignore_index=ignore_index) + self.lovasz = LovaszSoftmax(ignore_index=ignore_index) + self.kldiv = KLDivergenceLoss(ignore_index=ignore_index) + self.reg = ConsistencyLoss(ignore_index=ignore_index) + self.lamda_1 = lambda_1 + self.lamda_2 = lambda_2 + self.lamda_3 = lambda_3 + self.num_classes = num_classes + + def parsing_loss(self, preds, target, cycle_n=None): + """ + Loss function definition. + + Args: + preds: [[parsing result1, parsing result2],[edge result]] + target: [parsing label, egde label] + soft_preds: [[parsing result1, parsing result2],[edge result]] + Returns: + Calculated Loss. + """ + h, w = target[0].size(1), target[0].size(2) + + pos_num = torch.sum(target[1] == 1, dtype=torch.float) + neg_num = torch.sum(target[1] == 0, dtype=torch.float) + + weight_pos = neg_num / (pos_num + neg_num) + weight_neg = pos_num / (pos_num + neg_num) + weights = torch.tensor([weight_neg, weight_pos]) # edge loss weight + + loss = 0 + + # loss for segmentation + preds_parsing = preds[0] + for pred_parsing in preds_parsing: + scale_pred = F.interpolate(input=pred_parsing, size=(h, w), + mode='bilinear', align_corners=True) + + loss += 0.5 * self.lamda_1 * self.lovasz(scale_pred, target[0]) + if target[2] is None: + loss += 0.5 * self.lamda_1 * self.criterion(scale_pred, target[0]) + else: + soft_scale_pred = F.interpolate(input=target[2], size=(h, w), + mode='bilinear', align_corners=True) + soft_scale_pred = moving_average(soft_scale_pred, to_one_hot(target[0], num_cls=self.num_classes), + 1.0 / (cycle_n + 1.0)) + loss += 0.5 * self.lamda_1 * self.kldiv(scale_pred, soft_scale_pred, target[0]) + + # loss for edge + preds_edge = preds[1] + for pred_edge in preds_edge: + scale_pred = F.interpolate(input=pred_edge, size=(h, w), + mode='bilinear', align_corners=True) + if target[3] is None: + loss += self.lamda_2 * F.cross_entropy(scale_pred, target[1], + weights.cuda(), ignore_index=self.ignore_index) + else: + soft_scale_edge = F.interpolate(input=target[3], size=(h, w), + mode='bilinear', align_corners=True) + soft_scale_edge = moving_average(soft_scale_edge, to_one_hot(target[1], num_cls=2), + 1.0 / (cycle_n + 1.0)) + loss += self.lamda_2 * self.kldiv(scale_pred, soft_scale_edge, target[0]) + + # consistency regularization + preds_parsing = preds[0] + preds_edge = preds[1] + for pred_parsing in preds_parsing: + scale_pred = F.interpolate(input=pred_parsing, size=(h, w), + mode='bilinear', align_corners=True) + scale_edge = F.interpolate(input=preds_edge[0], size=(h, w), + mode='bilinear', align_corners=True) + loss += self.lamda_3 * self.reg(scale_pred, scale_edge, target[0]) + + return loss + + def forward(self, preds, target, cycle_n=None): + loss = self.parsing_loss(preds, target, cycle_n) + return loss + + def _generate_weights(self, masks, num_classes): + """ + masks: torch.Tensor with shape [B, H, W] + """ + masks_label = masks.data.cpu().numpy().astype(np.int64) + pixel_nums = [] + tot_pixels = 0 + for i in range(num_classes): + pixel_num_of_cls_i = np.sum(masks_label == i).astype(np.float) + pixel_nums.append(pixel_num_of_cls_i) + tot_pixels += pixel_num_of_cls_i + weights = [] + for i in range(num_classes): + weights.append( + (tot_pixels - pixel_nums[i]) / tot_pixels / (num_classes - 1) + ) + weights = np.array(weights, dtype=np.float) + # weights = torch.from_numpy(weights).float().to(masks.device) + return weights + + +def moving_average(target1, target2, alpha=1.0): + target = 0 + target += (1.0 - alpha) * target1 + target += target2 * alpha + return target + + +def to_one_hot(tensor, num_cls, dim=1, ignore_index=255): + b, h, w = tensor.shape + tensor[tensor == ignore_index] = 0 + onehot_tensor = torch.zeros(b, num_cls, h, w).cuda() + onehot_tensor.scatter_(dim, tensor.unsqueeze(dim), 1) + return onehot_tensor diff --git a/Self-Correction-Human-Parsing-for-ACGPN/utils/encoding.py b/Self-Correction-Human-Parsing-for-ACGPN/utils/encoding.py new file mode 100644 index 0000000000000000000000000000000000000000..e8654706c345e8a13219f2c8e4cfa7700f531612 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/utils/encoding.py @@ -0,0 +1,188 @@ +##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +## Created by: Hang Zhang +## ECE Department, Rutgers University +## Email: zhang.hang@rutgers.edu +## Copyright (c) 2017 +## +## This source code is licensed under the MIT-style license found in the +## LICENSE file in the root directory of this source tree +##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +"""Encoding Data Parallel""" +import threading +import functools +import torch +from torch.autograd import Variable, Function +import torch.cuda.comm as comm +from torch.nn.parallel.data_parallel import DataParallel +from torch.nn.parallel.parallel_apply import get_a_var +from torch.nn.parallel._functions import ReduceAddCoalesced, Broadcast + +torch_ver = torch.__version__[:3] + +__all__ = ['allreduce', 'DataParallelModel', 'DataParallelCriterion', 'patch_replication_callback'] + +def allreduce(*inputs): + """Cross GPU all reduce autograd operation for calculate mean and + variance in SyncBN. + """ + return AllReduce.apply(*inputs) + +class AllReduce(Function): + @staticmethod + def forward(ctx, num_inputs, *inputs): + ctx.num_inputs = num_inputs + ctx.target_gpus = [inputs[i].get_device() for i in range(0, len(inputs), num_inputs)] + inputs = [inputs[i:i + num_inputs] + for i in range(0, len(inputs), num_inputs)] + # sort before reduce sum + inputs = sorted(inputs, key=lambda i: i[0].get_device()) + results = comm.reduce_add_coalesced(inputs, ctx.target_gpus[0]) + outputs = comm.broadcast_coalesced(results, ctx.target_gpus) + return tuple([t for tensors in outputs for t in tensors]) + + @staticmethod + def backward(ctx, *inputs): + inputs = [i.data for i in inputs] + inputs = [inputs[i:i + ctx.num_inputs] + for i in range(0, len(inputs), ctx.num_inputs)] + results = comm.reduce_add_coalesced(inputs, ctx.target_gpus[0]) + outputs = comm.broadcast_coalesced(results, ctx.target_gpus) + return (None,) + tuple([Variable(t) for tensors in outputs for t in tensors]) + +class Reduce(Function): + @staticmethod + def forward(ctx, *inputs): + ctx.target_gpus = [inputs[i].get_device() for i in range(len(inputs))] + inputs = sorted(inputs, key=lambda i: i.get_device()) + return comm.reduce_add(inputs) + + @staticmethod + def backward(ctx, gradOutput): + return Broadcast.apply(ctx.target_gpus, gradOutput) + + +class DataParallelModel(DataParallel): + """Implements data parallelism at the module level. + + This container parallelizes the application of the given module by + splitting the input across the specified devices by chunking in the + batch dimension. + In the forward pass, the module is replicated on each device, + and each replica handles a portion of the input. During the backwards pass, gradients from each replica are summed into the original module. + Note that the outputs are not gathered, please use compatible + :class:`encoding.parallel.DataParallelCriterion`. + + The batch size should be larger than the number of GPUs used. It should + also be an integer multiple of the number of GPUs so that each chunk is + the same size (so that each GPU processes the same number of samples). + + Args: + module: module to be parallelized + device_ids: CUDA devices (default: all devices) + + Reference: + Hang Zhang, Kristin Dana, Jianping Shi, Zhongyue Zhang, Xiaogang Wang, Ambrish Tyagi, + Amit Agrawal. “Context Encoding for Semantic Segmentation. + *The IEEE Conference on Computer Vision and Pattern Recognition (CVPR) 2018* + + Example:: + + >>> net = encoding.nn.DataParallelModel(model, device_ids=[0, 1, 2]) + >>> y = net(x) + """ + def gather(self, outputs, output_device): + return outputs + + def replicate(self, module, device_ids): + modules = super(DataParallelModel, self).replicate(module, device_ids) + return modules + + +class DataParallelCriterion(DataParallel): + """ + Calculate loss in multiple-GPUs, which balance the memory usage for + Semantic Segmentation. + + The targets are splitted across the specified devices by chunking in + the batch dimension. Please use together with :class:`encoding.parallel.DataParallelModel`. + + Reference: + Hang Zhang, Kristin Dana, Jianping Shi, Zhongyue Zhang, Xiaogang Wang, Ambrish Tyagi, + Amit Agrawal. “Context Encoding for Semantic Segmentation. + *The IEEE Conference on Computer Vision and Pattern Recognition (CVPR) 2018* + + Example:: + + >>> net = encoding.nn.DataParallelModel(model, device_ids=[0, 1, 2]) + >>> criterion = encoding.nn.DataParallelCriterion(criterion, device_ids=[0, 1, 2]) + >>> y = net(x) + >>> loss = criterion(y, target) + """ + def forward(self, inputs, *targets, **kwargs): + # input should be already scatterd + # scattering the targets instead + if not self.device_ids: + return self.module(inputs, *targets, **kwargs) + targets, kwargs = self.scatter(targets, kwargs, self.device_ids) + if len(self.device_ids) == 1: + return self.module(inputs, *targets[0], **kwargs[0]) + replicas = self.replicate(self.module, self.device_ids[:len(inputs)]) + outputs = _criterion_parallel_apply(replicas, inputs, targets, kwargs) + return Reduce.apply(*outputs) / len(outputs) + + +def _criterion_parallel_apply(modules, inputs, targets, kwargs_tup=None, devices=None): + assert len(modules) == len(inputs) + assert len(targets) == len(inputs) + if kwargs_tup: + assert len(modules) == len(kwargs_tup) + else: + kwargs_tup = ({},) * len(modules) + if devices is not None: + assert len(modules) == len(devices) + else: + devices = [None] * len(modules) + + lock = threading.Lock() + results = {} + if torch_ver != "0.3": + grad_enabled = torch.is_grad_enabled() + + def _worker(i, module, input, target, kwargs, device=None): + if torch_ver != "0.3": + torch.set_grad_enabled(grad_enabled) + if device is None: + device = get_a_var(input).get_device() + try: + if not isinstance(input, tuple): + input = (input,) + with torch.cuda.device(device): + output = module(*(input + target), **kwargs) + with lock: + results[i] = output + except Exception as e: + with lock: + results[i] = e + + if len(modules) > 1: + threads = [threading.Thread(target=_worker, + args=(i, module, input, target, + kwargs, device),) + for i, (module, input, target, kwargs, device) in + enumerate(zip(modules, inputs, targets, kwargs_tup, devices))] + + for thread in threads: + thread.start() + for thread in threads: + thread.join() + else: + _worker(0, modules[0], inputs[0], kwargs_tup[0], devices[0]) + + outputs = [] + for i in range(len(inputs)): + output = results[i] + if isinstance(output, Exception): + raise output + outputs.append(output) + return outputs diff --git a/Self-Correction-Human-Parsing-for-ACGPN/utils/kl_loss.py b/Self-Correction-Human-Parsing-for-ACGPN/utils/kl_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..9a685d945fb852a81324513ae55498857f1a4552 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/utils/kl_loss.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : kl_loss.py +@Time : 7/23/19 4:02 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" +import torch +import torch.nn.functional as F +from torch import nn + + +def flatten_probas(input, target, labels, ignore=255): + """ + Flattens predictions in the batch. + """ + B, C, H, W = input.size() + input = input.permute(0, 2, 3, 1).contiguous().view(-1, C) # B * H * W, C = P, C + target = target.permute(0, 2, 3, 1).contiguous().view(-1, C) # B * H * W, C = P, C + labels = labels.view(-1) + if ignore is None: + return input, target + valid = (labels != ignore) + vinput = input[valid.nonzero().squeeze()] + vtarget = target[valid.nonzero().squeeze()] + return vinput, vtarget + + +class KLDivergenceLoss(nn.Module): + def __init__(self, ignore_index=255, T=1): + super(KLDivergenceLoss, self).__init__() + self.ignore_index=ignore_index + self.T = T + + def forward(self, input, target, label): + log_input_prob = F.log_softmax(input / self.T, dim=1) + target_porb = F.softmax(target / self.T, dim=1) + loss = F.kl_div(*flatten_probas(log_input_prob, target_porb, label, ignore=self.ignore_index)) + return self.T*self.T*loss # balanced diff --git a/Self-Correction-Human-Parsing-for-ACGPN/utils/lovasz_softmax.py b/Self-Correction-Human-Parsing-for-ACGPN/utils/lovasz_softmax.py new file mode 100644 index 0000000000000000000000000000000000000000..b6e444f684c0d9bda9d7c2d54a4e79fac0ddf081 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/utils/lovasz_softmax.py @@ -0,0 +1,279 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : lovasz_softmax.py +@Time : 8/30/19 7:12 PM +@Desc : Lovasz-Softmax and Jaccard hinge loss in PyTorch + Maxim Berman 2018 ESAT-PSI KU Leuven (MIT License) +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +from __future__ import print_function, division + +import torch +from torch.autograd import Variable +import torch.nn.functional as F +import numpy as np +from torch import nn + +try: + from itertools import ifilterfalse +except ImportError: # py3k + from itertools import filterfalse as ifilterfalse + + +def lovasz_grad(gt_sorted): + """ + Computes gradient of the Lovasz extension w.r.t sorted errors + See Alg. 1 in paper + """ + p = len(gt_sorted) + gts = gt_sorted.sum() + intersection = gts - gt_sorted.float().cumsum(0) + union = gts + (1 - gt_sorted).float().cumsum(0) + jaccard = 1. - intersection / union + if p > 1: # cover 1-pixel case + jaccard[1:p] = jaccard[1:p] - jaccard[0:-1] + return jaccard + + +def iou_binary(preds, labels, EMPTY=1., ignore=None, per_image=True): + """ + IoU for foreground class + binary: 1 foreground, 0 background + """ + if not per_image: + preds, labels = (preds,), (labels,) + ious = [] + for pred, label in zip(preds, labels): + intersection = ((label == 1) & (pred == 1)).sum() + union = ((label == 1) | ((pred == 1) & (label != ignore))).sum() + if not union: + iou = EMPTY + else: + iou = float(intersection) / float(union) + ious.append(iou) + iou = mean(ious) # mean accross images if per_image + return 100 * iou + + +def iou(preds, labels, C, EMPTY=1., ignore=None, per_image=False): + """ + Array of IoU for each (non ignored) class + """ + if not per_image: + preds, labels = (preds,), (labels,) + ious = [] + for pred, label in zip(preds, labels): + iou = [] + for i in range(C): + if i != ignore: # The ignored label is sometimes among predicted classes (ENet - CityScapes) + intersection = ((label == i) & (pred == i)).sum() + union = ((label == i) | ((pred == i) & (label != ignore))).sum() + if not union: + iou.append(EMPTY) + else: + iou.append(float(intersection) / float(union)) + ious.append(iou) + ious = [mean(iou) for iou in zip(*ious)] # mean accross images if per_image + return 100 * np.array(ious) + + +# --------------------------- BINARY LOSSES --------------------------- + + +def lovasz_hinge(logits, labels, per_image=True, ignore=None): + """ + Binary Lovasz hinge loss + logits: [B, H, W] Variable, logits at each pixel (between -\infty and +\infty) + labels: [B, H, W] Tensor, binary ground truth masks (0 or 1) + per_image: compute the loss per image instead of per batch + ignore: void class id + """ + if per_image: + loss = mean(lovasz_hinge_flat(*flatten_binary_scores(log.unsqueeze(0), lab.unsqueeze(0), ignore)) + for log, lab in zip(logits, labels)) + else: + loss = lovasz_hinge_flat(*flatten_binary_scores(logits, labels, ignore)) + return loss + + +def lovasz_hinge_flat(logits, labels): + """ + Binary Lovasz hinge loss + logits: [P] Variable, logits at each prediction (between -\infty and +\infty) + labels: [P] Tensor, binary ground truth labels (0 or 1) + ignore: label to ignore + """ + if len(labels) == 0: + # only void pixels, the gradients should be 0 + return logits.sum() * 0. + signs = 2. * labels.float() - 1. + errors = (1. - logits * Variable(signs)) + errors_sorted, perm = torch.sort(errors, dim=0, descending=True) + perm = perm.data + gt_sorted = labels[perm] + grad = lovasz_grad(gt_sorted) + loss = torch.dot(F.relu(errors_sorted), Variable(grad)) + return loss + + +def flatten_binary_scores(scores, labels, ignore=None): + """ + Flattens predictions in the batch (binary case) + Remove labels equal to 'ignore' + """ + scores = scores.view(-1) + labels = labels.view(-1) + if ignore is None: + return scores, labels + valid = (labels != ignore) + vscores = scores[valid] + vlabels = labels[valid] + return vscores, vlabels + + +class StableBCELoss(torch.nn.modules.Module): + def __init__(self): + super(StableBCELoss, self).__init__() + + def forward(self, input, target): + neg_abs = - input.abs() + loss = input.clamp(min=0) - input * target + (1 + neg_abs.exp()).log() + return loss.mean() + + +def binary_xloss(logits, labels, ignore=None): + """ + Binary Cross entropy loss + logits: [B, H, W] Variable, logits at each pixel (between -\infty and +\infty) + labels: [B, H, W] Tensor, binary ground truth masks (0 or 1) + ignore: void class id + """ + logits, labels = flatten_binary_scores(logits, labels, ignore) + loss = StableBCELoss()(logits, Variable(labels.float())) + return loss + + +# --------------------------- MULTICLASS LOSSES --------------------------- + + +def lovasz_softmax(probas, labels, classes='present', per_image=False, ignore=255, weighted=None): + """ + Multi-class Lovasz-Softmax loss + probas: [B, C, H, W] Variable, class probabilities at each prediction (between 0 and 1). + Interpreted as binary (sigmoid) output with outputs of size [B, H, W]. + labels: [B, H, W] Tensor, ground truth labels (between 0 and C - 1) + classes: 'all' for all, 'present' for classes present in labels, or a list of classes to average. + per_image: compute the loss per image instead of per batch + ignore: void class labels + """ + if per_image: + loss = mean(lovasz_softmax_flat(*flatten_probas(prob.unsqueeze(0), lab.unsqueeze(0), ignore), classes=classes, weighted=weighted) + for prob, lab in zip(probas, labels)) + else: + loss = lovasz_softmax_flat(*flatten_probas(probas, labels, ignore), classes=classes, weighted=weighted ) + return loss + + +def lovasz_softmax_flat(probas, labels, classes='present', weighted=None): + """ + Multi-class Lovasz-Softmax loss + probas: [P, C] Variable, class probabilities at each prediction (between 0 and 1) + labels: [P] Tensor, ground truth labels (between 0 and C - 1) + classes: 'all' for all, 'present' for classes present in labels, or a list of classes to average. + """ + if probas.numel() == 0: + # only void pixels, the gradients should be 0 + return probas * 0. + C = probas.size(1) + losses = [] + class_to_sum = list(range(C)) if classes in ['all', 'present'] else classes + for c in class_to_sum: + fg = (labels == c).float() # foreground for class c + if (classes is 'present' and fg.sum() == 0): + continue + if C == 1: + if len(classes) > 1: + raise ValueError('Sigmoid output possible only with 1 class') + class_pred = probas[:, 0] + else: + class_pred = probas[:, c] + errors = (Variable(fg) - class_pred).abs() + errors_sorted, perm = torch.sort(errors, 0, descending=True) + perm = perm.data + fg_sorted = fg[perm] + if weighted is not None: + losses.append(weighted[c]*torch.dot(errors_sorted, Variable(lovasz_grad(fg_sorted)))) + else: + losses.append(torch.dot(errors_sorted, Variable(lovasz_grad(fg_sorted)))) + return mean(losses) + + +def flatten_probas(probas, labels, ignore=None): + """ + Flattens predictions in the batch + """ + if probas.dim() == 3: + # assumes output of a sigmoid layer + B, H, W = probas.size() + probas = probas.view(B, 1, H, W) + B, C, H, W = probas.size() + probas = probas.permute(0, 2, 3, 1).contiguous().view(-1, C) # B * H * W, C = P, C + labels = labels.view(-1) + if ignore is None: + return probas, labels + valid = (labels != ignore) + vprobas = probas[valid.nonzero().squeeze()] + vlabels = labels[valid] + return vprobas, vlabels + + +def xloss(logits, labels, ignore=None): + """ + Cross entropy loss + """ + return F.cross_entropy(logits, Variable(labels), ignore_index=255) + + +# --------------------------- HELPER FUNCTIONS --------------------------- +def isnan(x): + return x != x + + +def mean(l, ignore_nan=False, empty=0): + """ + nanmean compatible with generators. + """ + l = iter(l) + if ignore_nan: + l = ifilterfalse(isnan, l) + try: + n = 1 + acc = next(l) + except StopIteration: + if empty == 'raise': + raise ValueError('Empty mean') + return empty + for n, v in enumerate(l, 2): + acc += v + if n == 1: + return acc + return acc / n + +# --------------------------- Class --------------------------- +class LovaszSoftmax(nn.Module): + def __init__(self, per_image=False, ignore_index=255, weighted=None): + super(LovaszSoftmax, self).__init__() + self.lovasz_softmax = lovasz_softmax + self.per_image = per_image + self.ignore_index=ignore_index + self.weighted = weighted + + def forward(self, pred, label): + pred = F.softmax(pred, dim=1) + return self.lovasz_softmax(pred, label, per_image=self.per_image, ignore=self.ignore_index, weighted=self.weighted) \ No newline at end of file diff --git a/Self-Correction-Human-Parsing-for-ACGPN/utils/miou.py b/Self-Correction-Human-Parsing-for-ACGPN/utils/miou.py new file mode 100644 index 0000000000000000000000000000000000000000..51a2cc965a5c0cfd5497c9191906898da31485dd --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/utils/miou.py @@ -0,0 +1,155 @@ +import cv2 +import os +import numpy as np + +from collections import OrderedDict +from PIL import Image as PILImage +from utils.transforms import transform_parsing + +LABELS = ['Background', 'Hat', 'Hair', 'Glove', 'Sunglasses', 'Upper-clothes', 'Dress', 'Coat', \ + 'Socks', 'Pants', 'Jumpsuits', 'Scarf', 'Skirt', 'Face', 'Left-arm', 'Right-arm', 'Left-leg', + 'Right-leg', 'Left-shoe', 'Right-shoe'] + + +# LABELS = ['Background', 'Head', 'Torso', 'Upper Arms', 'Lower Arms', 'Upper Legs', 'Lower Legs'] + +def get_palette(num_cls): + """ Returns the color map for visualizing the segmentation mask. + Args: + num_cls: Number of classes + Returns: + The color map + """ + + n = num_cls + palette = [0] * (n * 3) + for j in range(0, n): + lab = j + palette[j * 3 + 0] = 0 + palette[j * 3 + 1] = 0 + palette[j * 3 + 2] = 0 + i = 0 + while lab: + palette[j * 3 + 0] |= (((lab >> 0) & 1) << (7 - i)) + palette[j * 3 + 1] |= (((lab >> 1) & 1) << (7 - i)) + palette[j * 3 + 2] |= (((lab >> 2) & 1) << (7 - i)) + i += 1 + lab >>= 3 + return palette + + +def get_confusion_matrix(gt_label, pred_label, num_classes): + """ + Calcute the confusion matrix by given label and pred + :param gt_label: the ground truth label + :param pred_label: the pred label + :param num_classes: the nunber of class + :return: the confusion matrix + """ + index = (gt_label * num_classes + pred_label).astype('int32') + label_count = np.bincount(index) + confusion_matrix = np.zeros((num_classes, num_classes)) + + for i_label in range(num_classes): + for i_pred_label in range(num_classes): + cur_index = i_label * num_classes + i_pred_label + if cur_index < len(label_count): + confusion_matrix[i_label, i_pred_label] = label_count[cur_index] + + return confusion_matrix + + +def compute_mean_ioU(preds, scales, centers, num_classes, datadir, input_size=[473, 473], dataset='val'): + val_file = os.path.join(datadir, dataset + '_id.txt') + val_id = [i_id.strip() for i_id in open(val_file)] + + confusion_matrix = np.zeros((num_classes, num_classes)) + + for i, pred_out in enumerate(preds): + im_name = val_id[i] + gt_path = os.path.join(datadir, dataset + '_segmentations', im_name + '.png') + gt = np.array(PILImage.open(gt_path)) + h, w = gt.shape + s = scales[i] + c = centers[i] + pred = transform_parsing(pred_out, c, s, w, h, input_size) + + gt = np.asarray(gt, dtype=np.int32) + pred = np.asarray(pred, dtype=np.int32) + + ignore_index = gt != 255 + + gt = gt[ignore_index] + pred = pred[ignore_index] + + confusion_matrix += get_confusion_matrix(gt, pred, num_classes) + + pos = confusion_matrix.sum(1) + res = confusion_matrix.sum(0) + tp = np.diag(confusion_matrix) + + pixel_accuracy = (tp.sum() / pos.sum()) * 100 + mean_accuracy = ((tp / np.maximum(1.0, pos)).mean()) * 100 + IoU_array = (tp / np.maximum(1.0, pos + res - tp)) + IoU_array = IoU_array * 100 + mean_IoU = IoU_array.mean() + print('Pixel accuracy: %f \n' % pixel_accuracy) + print('Mean accuracy: %f \n' % mean_accuracy) + print('Mean IU: %f \n' % mean_IoU) + name_value = [] + + for i, (label, iou) in enumerate(zip(LABELS, IoU_array)): + name_value.append((label, iou)) + + name_value.append(('Pixel accuracy', pixel_accuracy)) + name_value.append(('Mean accuracy', mean_accuracy)) + name_value.append(('Mean IU', mean_IoU)) + name_value = OrderedDict(name_value) + return name_value + + +def compute_mean_ioU_file(preds_dir, num_classes, datadir, dataset='val'): + list_path = os.path.join(datadir, dataset + '_id.txt') + val_id = [i_id.strip() for i_id in open(list_path)] + + confusion_matrix = np.zeros((num_classes, num_classes)) + + for i, im_name in enumerate(val_id): + gt_path = os.path.join(datadir, 'segmentations', im_name + '.png') + gt = cv2.imread(gt_path, cv2.IMREAD_GRAYSCALE) + + pred_path = os.path.join(preds_dir, im_name + '.png') + pred = np.asarray(PILImage.open(pred_path)) + + gt = np.asarray(gt, dtype=np.int32) + pred = np.asarray(pred, dtype=np.int32) + + ignore_index = gt != 255 + + gt = gt[ignore_index] + pred = pred[ignore_index] + + confusion_matrix += get_confusion_matrix(gt, pred, num_classes) + + pos = confusion_matrix.sum(1) + res = confusion_matrix.sum(0) + tp = np.diag(confusion_matrix) + + pixel_accuracy = (tp.sum() / pos.sum()) * 100 + mean_accuracy = ((tp / np.maximum(1.0, pos)).mean()) * 100 + IoU_array = (tp / np.maximum(1.0, pos + res - tp)) + IoU_array = IoU_array * 100 + mean_IoU = IoU_array.mean() + print('Pixel accuracy: %f \n' % pixel_accuracy) + print('Mean accuracy: %f \n' % mean_accuracy) + print('Mean IU: %f \n' % mean_IoU) + name_value = [] + + for i, (label, iou) in enumerate(zip(LABELS, IoU_array)): + name_value.append((label, iou)) + + name_value.append(('Pixel accuracy', pixel_accuracy)) + name_value.append(('Mean accuracy', mean_accuracy)) + name_value.append(('Mean IU', mean_IoU)) + name_value = OrderedDict(name_value) + return name_value diff --git a/Self-Correction-Human-Parsing-for-ACGPN/utils/schp.py b/Self-Correction-Human-Parsing-for-ACGPN/utils/schp.py new file mode 100644 index 0000000000000000000000000000000000000000..f57470452fac8183dc5c17156439416c15bd3265 --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/utils/schp.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : schp.py +@Time : 4/8/19 2:11 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import os +import torch +import modules + +def moving_average(net1, net2, alpha=1): + for param1, param2 in zip(net1.parameters(), net2.parameters()): + param1.data *= (1.0 - alpha) + param1.data += param2.data * alpha + + +def _check_bn(module, flag): + if issubclass(module.__class__, modules.bn.InPlaceABNSync): + flag[0] = True + + +def check_bn(model): + flag = [False] + model.apply(lambda module: _check_bn(module, flag)) + return flag[0] + + +def reset_bn(module): + if issubclass(module.__class__, modules.bn.InPlaceABNSync): + module.running_mean = torch.zeros_like(module.running_mean) + module.running_var = torch.ones_like(module.running_var) + + +def _get_momenta(module, momenta): + if issubclass(module.__class__, modules.bn.InPlaceABNSync): + momenta[module] = module.momentum + + +def _set_momenta(module, momenta): + if issubclass(module.__class__, modules.bn.InPlaceABNSync): + module.momentum = momenta[module] + + +def bn_re_estimate(loader, model): + if not check_bn(model): + print('No batch norm layer detected') + return + model.train() + momenta = {} + model.apply(reset_bn) + model.apply(lambda module: _get_momenta(module, momenta)) + n = 0 + for i_iter, batch in enumerate(loader): + images, labels, _ = batch + b = images.data.size(0) + momentum = b / (n + b) + for module in momenta.keys(): + module.momentum = momentum + model(images) + n += b + model.apply(lambda module: _set_momenta(module, momenta)) + + +def save_schp_checkpoint(states, is_best_parsing, output_dir, filename='schp_checkpoint.pth.tar'): + save_path = os.path.join(output_dir, filename) + if os.path.exists(save_path): + os.remove(save_path) + torch.save(states, save_path) + if is_best_parsing and 'state_dict' in states: + best_save_path = os.path.join(output_dir, 'model_parsing_best.pth.tar') + if os.path.exists(best_save_path): + os.remove(best_save_path) + torch.save(states, best_save_path) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/utils/soft_dice_loss.py b/Self-Correction-Human-Parsing-for-ACGPN/utils/soft_dice_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..cb5895fd37467d36f213f941d1b01d6d6f7f194c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/utils/soft_dice_loss.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : soft_dice_loss.py +@Time : 8/13/19 5:09 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +from __future__ import print_function, division + +import torch +import torch.nn.functional as F +from torch import nn + +try: + from itertools import ifilterfalse +except ImportError: # py3k + from itertools import filterfalse as ifilterfalse + + +def tversky_loss(probas, labels, alpha=0.5, beta=0.5, epsilon=1e-6): + ''' + Tversky loss function. + probas: [P, C] Variable, class probabilities at each prediction (between 0 and 1) + labels: [P] Tensor, ground truth labels (between 0 and C - 1) + + Same as soft dice loss when alpha=beta=0.5. + Same as Jaccord loss when alpha=beta=1.0. + See `Tversky loss function for image segmentation using 3D fully convolutional deep networks` + https://arxiv.org/pdf/1706.05721.pdf + ''' + C = probas.size(1) + losses = [] + for c in list(range(C)): + fg = (labels == c).float() + if fg.sum() == 0: + continue + class_pred = probas[:, c] + p0 = class_pred + p1 = 1 - class_pred + g0 = fg + g1 = 1 - fg + numerator = torch.sum(p0 * g0) + denominator = numerator + alpha * torch.sum(p0 * g1) + beta * torch.sum(p1 * g0) + losses.append(1 - ((numerator) / (denominator + epsilon))) + return mean(losses) + + +def flatten_probas(probas, labels, ignore=255): + """ + Flattens predictions in the batch + """ + B, C, H, W = probas.size() + probas = probas.permute(0, 2, 3, 1).contiguous().view(-1, C) # B * H * W, C = P, C + labels = labels.view(-1) + if ignore is None: + return probas, labels + valid = (labels != ignore) + vprobas = probas[valid.nonzero().squeeze()] + vlabels = labels[valid] + return vprobas, vlabels + + +def isnan(x): + return x != x + + +def mean(l, ignore_nan=False, empty=0): + """ + nanmean compatible with generators. + """ + l = iter(l) + if ignore_nan: + l = ifilterfalse(isnan, l) + try: + n = 1 + acc = next(l) + except StopIteration: + if empty == 'raise': + raise ValueError('Empty mean') + return empty + for n, v in enumerate(l, 2): + acc += v + if n == 1: + return acc + return acc / n + + +class SoftDiceLoss(nn.Module): + def __init__(self, ignore_index=255): + super(SoftDiceLoss, self).__init__() + self.ignore_index = ignore_index + + def forward(self, pred, label): + pred = F.softmax(pred, dim=1) + return tversky_loss(*flatten_probas(pred, label, ignore=self.ignore_index), alpha=0.5, beta=0.5) + + +class SoftJaccordLoss(nn.Module): + def __init__(self, ignore_index=255): + super(SoftJaccordLoss, self).__init__() + self.ignore_index = ignore_index + + def forward(self, pred, label): + pred = F.softmax(pred, dim=1) + return tversky_loss(*flatten_probas(pred, label, ignore=self.ignore_index), alpha=1.0, beta=1.0) diff --git a/Self-Correction-Human-Parsing-for-ACGPN/utils/transforms.py b/Self-Correction-Human-Parsing-for-ACGPN/utils/transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..1442a728938ca19fcb4ac21ae6588266df45631c --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/utils/transforms.py @@ -0,0 +1,167 @@ +# ------------------------------------------------------------------------------ +# Copyright (c) Microsoft +# Licensed under the MIT License. +# Written by Bin Xiao (Bin.Xiao@microsoft.com) +# ------------------------------------------------------------------------------ + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import numpy as np +import cv2 +import torch + +class BRG2Tensor_transform(object): + def __call__(self, pic): + img = torch.from_numpy(pic.transpose((2, 0, 1))) + if isinstance(img, torch.ByteTensor): + return img.float() + else: + return img + +class BGR2RGB_transform(object): + def __call__(self, tensor): + return tensor[[2,1,0],:,:] + +def flip_back(output_flipped, matched_parts): + ''' + ouput_flipped: numpy.ndarray(batch_size, num_joints, height, width) + ''' + assert output_flipped.ndim == 4,\ + 'output_flipped should be [batch_size, num_joints, height, width]' + + output_flipped = output_flipped[:, :, :, ::-1] + + for pair in matched_parts: + tmp = output_flipped[:, pair[0], :, :].copy() + output_flipped[:, pair[0], :, :] = output_flipped[:, pair[1], :, :] + output_flipped[:, pair[1], :, :] = tmp + + return output_flipped + + +def fliplr_joints(joints, joints_vis, width, matched_parts): + """ + flip coords + """ + # Flip horizontal + joints[:, 0] = width - joints[:, 0] - 1 + + # Change left-right parts + for pair in matched_parts: + joints[pair[0], :], joints[pair[1], :] = \ + joints[pair[1], :], joints[pair[0], :].copy() + joints_vis[pair[0], :], joints_vis[pair[1], :] = \ + joints_vis[pair[1], :], joints_vis[pair[0], :].copy() + + return joints*joints_vis, joints_vis + + +def transform_preds(coords, center, scale, input_size): + target_coords = np.zeros(coords.shape) + trans = get_affine_transform(center, scale, 0, input_size, inv=1) + for p in range(coords.shape[0]): + target_coords[p, 0:2] = affine_transform(coords[p, 0:2], trans) + return target_coords + +def transform_parsing(pred, center, scale, width, height, input_size): + + trans = get_affine_transform(center, scale, 0, input_size, inv=1) + target_pred = cv2.warpAffine( + pred, + trans, + (int(width), int(height)), #(int(width), int(height)), + flags=cv2.INTER_NEAREST, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(0)) + + return target_pred + +def transform_logits(logits, center, scale, width, height, input_size): + + trans = get_affine_transform(center, scale, 0, input_size, inv=1) + channel = logits.shape[2] + target_logits = [] + for i in range(channel): + target_logit = cv2.warpAffine( + logits[:,:,i], + trans, + (int(width), int(height)), #(int(width), int(height)), + flags=cv2.INTER_LINEAR, + borderMode=cv2.BORDER_CONSTANT, + borderValue=(0)) + target_logits.append(target_logit) + target_logits = np.stack(target_logits,axis=2) + + return target_logits + + +def get_affine_transform(center, + scale, + rot, + output_size, + shift=np.array([0, 0], dtype=np.float32), + inv=0): + if not isinstance(scale, np.ndarray) and not isinstance(scale, list): + print(scale) + scale = np.array([scale, scale]) + + scale_tmp = scale + + src_w = scale_tmp[0] + dst_w = output_size[1] + dst_h = output_size[0] + + rot_rad = np.pi * rot / 180 + src_dir = get_dir([0, src_w * -0.5], rot_rad) + dst_dir = np.array([0, (dst_w-1) * -0.5], np.float32) + + src = np.zeros((3, 2), dtype=np.float32) + dst = np.zeros((3, 2), dtype=np.float32) + src[0, :] = center + scale_tmp * shift + src[1, :] = center + src_dir + scale_tmp * shift + dst[0, :] = [(dst_w-1) * 0.5, (dst_h-1) * 0.5] + dst[1, :] = np.array([(dst_w-1) * 0.5, (dst_h-1) * 0.5]) + dst_dir + + src[2:, :] = get_3rd_point(src[0, :], src[1, :]) + dst[2:, :] = get_3rd_point(dst[0, :], dst[1, :]) + + if inv: + trans = cv2.getAffineTransform(np.float32(dst), np.float32(src)) + else: + trans = cv2.getAffineTransform(np.float32(src), np.float32(dst)) + + return trans + + +def affine_transform(pt, t): + new_pt = np.array([pt[0], pt[1], 1.]).T + new_pt = np.dot(t, new_pt) + return new_pt[:2] + + +def get_3rd_point(a, b): + direct = a - b + return b + np.array([-direct[1], direct[0]], dtype=np.float32) + + +def get_dir(src_point, rot_rad): + sn, cs = np.sin(rot_rad), np.cos(rot_rad) + + src_result = [0, 0] + src_result[0] = src_point[0] * cs - src_point[1] * sn + src_result[1] = src_point[0] * sn + src_point[1] * cs + + return src_result + + +def crop(img, center, scale, output_size, rot=0): + trans = get_affine_transform(center, scale, rot, output_size) + + dst_img = cv2.warpAffine(img, + trans, + (int(output_size[1]), int(output_size[0])), + flags=cv2.INTER_LINEAR) + + return dst_img diff --git a/Self-Correction-Human-Parsing-for-ACGPN/utils/warmup_scheduler.py b/Self-Correction-Human-Parsing-for-ACGPN/utils/warmup_scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..2528a9c598d5ee3477d60e2f8591ec37e8afb41d --- /dev/null +++ b/Self-Correction-Human-Parsing-for-ACGPN/utils/warmup_scheduler.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +""" +@Author : Peike Li +@Contact : peike.li@yahoo.com +@File : warmup_scheduler.py +@Time : 3/28/19 2:24 PM +@Desc : +@License : This source code is licensed under the license found in the + LICENSE file in the root directory of this source tree. +""" + +import math +from torch.optim.lr_scheduler import _LRScheduler + + +class GradualWarmupScheduler(_LRScheduler): + """ Gradually warm-up learning rate with cosine annealing in optimizer. + Proposed in 'Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour'. + """ + + def __init__(self, optimizer, total_epoch, eta_min=0, warmup_epoch=10, last_epoch=-1): + self.total_epoch = total_epoch + self.eta_min = eta_min + self.warmup_epoch = warmup_epoch + super(GradualWarmupScheduler, self).__init__(optimizer, last_epoch) + + def get_lr(self): + if self.last_epoch <= self.warmup_epoch: + return [self.eta_min + self.last_epoch*(base_lr - self.eta_min)/self.warmup_epoch for base_lr in self.base_lrs] + else: + return [self.eta_min + (base_lr-self.eta_min)*(1+math.cos(math.pi*(self.last_epoch-self.warmup_epoch)/(self.total_epoch-self.warmup_epoch))) / 2 for base_lr in self.base_lrs] + + +class SGDRScheduler(_LRScheduler): + """ Consine annealing with warm up and restarts. + Proposed in `SGDR: Stochastic Gradient Descent with Warm Restarts`. + """ + def __init__(self, optimizer, total_epoch=150, start_cyclical=100, cyclical_base_lr=7e-4, cyclical_epoch=10, eta_min=0, warmup_epoch=10, last_epoch=-1): + self.total_epoch = total_epoch + self.start_cyclical = start_cyclical + self.cyclical_epoch = cyclical_epoch + self.cyclical_base_lr = cyclical_base_lr + self.eta_min = eta_min + self.warmup_epoch = warmup_epoch + super(SGDRScheduler, self).__init__(optimizer, last_epoch) + + def get_lr(self): + if self.last_epoch < self.warmup_epoch: + return [self.eta_min + self.last_epoch*(base_lr - self.eta_min)/self.warmup_epoch for base_lr in self.base_lrs] + elif self.last_epoch < self.start_cyclical: + return [self.eta_min + (base_lr-self.eta_min)*(1+math.cos(math.pi*(self.last_epoch-self.warmup_epoch)/(self.start_cyclical-self.warmup_epoch))) / 2 for base_lr in self.base_lrs] + else: + return [self.eta_min + (self.cyclical_base_lr-self.eta_min)*(1+math.cos(math.pi* ((self.last_epoch-self.start_cyclical)% self.cyclical_epoch)/self.cyclical_epoch)) / 2 for base_lr in self.base_lrs] + + +if __name__ == '__main__': + import matplotlib.pyplot as plt + import torch + model = torch.nn.Linear(10, 2) + optimizer = torch.optim.SGD(params=model.parameters(), lr=7e-3, momentum=0.9, weight_decay=5e-4) + scheduler_warmup = SGDRScheduler(optimizer, total_epoch=150, eta_min=7e-5, warmup_epoch=10, start_cyclical=100, cyclical_base_lr=3.5e-3, cyclical_epoch=10) + lr = [] + for epoch in range(0,150): + scheduler_warmup.step(epoch) + lr.append(scheduler_warmup.get_lr()) + plt.style.use('ggplot') + plt.plot(list(range(0,150)), lr) + plt.show() + diff --git a/app.py b/app.py new file mode 100644 index 0000000000000000000000000000000000000000..0155766ba590ef6d3596602626b689f0ede17eea --- /dev/null +++ b/app.py @@ -0,0 +1,129 @@ +import numpy as np +import os +import time +import sys +import gradio as gr +import u2net_load +import u2net_run +from rembg import remove +from PIL import Image, ImageOps +from predict_pose import generate_pose_keypoints + +# Make directories +os.system("mkdir ./Data_preprocessing") +os.system("mkdir ./Data_preprocessing/test_color") +os.system("mkdir ./Data_preprocessing/test_colormask") +os.system("mkdir ./Data_preprocessing/test_edge") +os.system("mkdir ./Data_preprocessing/test_img") +os.system("mkdir ./Data_preprocessing/test_label") +os.system("mkdir ./Data_preprocessing/test_mask") +os.system("mkdir ./Data_preprocessing/test_pose") +os.system("mkdir ./inputs") +os.system("mkdir ./inputs/img") +os.system("mkdir ./inputs/cloth") +os.system("mkdir ./saved_models/u2net") +os.system("mkdir ./saved_models/u2netp") +os.system("mkdir ./pose") +os.system("mkdir ./checkpoints") + +# Get pose model +os.system("wget -O ./pose/pose_deploy_linevec.prototxt https://github.com/hasibzunair/fifa-demo/releases/download/v1.0/pose_deploy_linevec.prototxt") +os.system("wget -O ./pose/pose_iter_440000.caffemodel https://github.com/hasibzunair/fifa-demo/releases/download/v1.0/pose_iter_440000.caffemodel") + +# For segmentation mask generation +os.system("wget https://github.com/hasibzunair/fifa-demo/releases/download/v1.0/lip_final.pth") + +# Get U-2-Net weights +os.system("wget -P saved_models/u2netp/ https://github.com/hasibzunair/fifa-demo/releases/download/v1.0/u2netp.pth") +os.system("wget -P saved_models/u2net/ https://github.com/hasibzunair/fifa-demo/releases/download/v1.0/u2net.pth") + +# Get model checkpoints +os.system("wget -O ./checkpoints/decavtonfifapretrain.zip https://github.com/hasibzunair/vton-demo/releases/download/v1.0/decavtonfifapretrain.zip") +os.system("unzip ./checkpoints/decavtonfifapretrain.zip -d ./checkpoints/") + +print("########################Setup done!########################") + +# Load U-2-Net model +u2net = u2net_load.model(model_name = 'u2netp') +# Main inference function +def inference(clothing_image, person_image, remove_bg): + """ + Do try-on! + """ + # Read cloth and person images + cloth = Image.open(clothing_image) # cloth + person = Image.open(person_image) # person + # Save cloth and person images in "input" folder + cloth.save(os.path.join("inputs/cloth/cloth.png")) + person.save(os.path.join("inputs/img/person.png")) + + ############## Clothing image pre-processing + cloth_name = 'cloth.png' + cloth_path = os.path.join('inputs/cloth', sorted(os.listdir('inputs/cloth'))[0]) + cloth = Image.open(cloth_path) + # Resize cloth image + cloth = ImageOps.fit(cloth, (192, 256), Image.BICUBIC).convert("RGB") + # Save resized cloth image + cloth.save(os.path.join('Data_preprocessing/test_color', cloth_name)) + # 1. Get binary mask for clothing image + u2net_run.infer(u2net, 'Data_preprocessing/test_color', 'Data_preprocessing/test_edge') + + ############## Person image pre-processing + start_time = time.time() + # Person image + img_name = 'person.png' + img_path = os.path.join('inputs/img', sorted(os.listdir('inputs/img'))[0]) + img = Image.open(img_path) + if remove_bg == "yes": + # Remove background + img = remove(img, alpha_matting=True, alpha_matting_erode_size=15) + print("Removing background from person image..") + img = ImageOps.fit(img, (192, 256), Image.BICUBIC).convert("RGB") + img_path = os.path.join('Data_preprocessing/test_img', img_name) + img.save(img_path) + resize_time = time.time() + print('Resized image in {}s'.format(resize_time-start_time)) + + # 2. Get parsed person image (test_label), uses person image + os.system("python3 Self-Correction-Human-Parsing-for-ACGPN/simple_extractor.py --dataset 'lip' --model-restore 'lip_final.pth' --input-dir 'Data_preprocessing/test_img' --output-dir 'Data_preprocessing/test_label'") + parse_time = time.time() + print('Parsing generated in {}s'.format(parse_time-resize_time)) + + # 3. Get pose map from person image + pose_path = os.path.join('Data_preprocessing/test_pose', img_name.replace('.png', '_keypoints.json')) + generate_pose_keypoints(img_path, pose_path) + pose_time = time.time() + print('Pose map generated in {}s'.format(pose_time-parse_time)) + os.system("rm -rf Data_preprocessing/test_pairs.txt") + + # Format: person, cloth image + with open('Data_preprocessing/test_pairs.txt','w') as f: + f.write('person.png cloth.png') + + # Do try-on + os.system("python test.py --name decavtonfifapretrain") + tryon_image = Image.open("results/test/try-on/person.png") + print("Size of image is: ", tryon_image.size) + return os.path.join("results/test/try-on/person.png") + + +title = "Virtual Dressing Room" +description = "This is a demo for image based virtual try-on. It generates a synthetic image of a person wearing a target clothing item. To use it, simply upload your clothing item and person images, or click one of the examples to load them. Read more at the links below." +article = "

Fill in Fabrics: Body-Aware Self-Supervised Inpainting for Image-Based Virtual Try-On (Under Review!) | Github

" +thumbnail = None # "./pathtothumbnail.png" + +gr.Interface( + inference, + [gr.inputs.Image(type='filepath', label="Clothing Image"), + gr.inputs.Image(type='filepath', label="Person Image"), + gr.inputs.Radio(choices=["yes","no"], default="no", label="Remove background from person image?")], + gr.outputs.Image(type="filepath", label="Predicted Output"), + examples=[["./examples/1/cloth.jpg", "./examples/1/person.jpg"], + ["./examples/2/cloth.jpg", "./examples/2/person.jpg"]], + title=title, + description=description, + article=article, + allow_flagging=False, + analytics_enabled=False, + thumbnail=thumbnail, + ).launch(debug=True, enable_queue=True) \ No newline at end of file diff --git a/data/aligned_dataset.py b/data/aligned_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..e187b4d64fcedadf2fa3feca76e1d005a16f67d4 --- /dev/null +++ b/data/aligned_dataset.py @@ -0,0 +1,213 @@ +import os.path +from data.base_dataset import BaseDataset, get_params, get_transform, normalize +from data.image_folder import make_dataset, make_dataset_test +from PIL import Image +import torch +import json +import numpy as np +import os.path as osp +from PIL import ImageDraw + + +class AlignedDataset(BaseDataset): + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + self.diction = {} + + self.fine_height = 256 + self.fine_width = 192 + self.radius = 5 + + # load data list from pairs file + human_names = [] + cloth_names = [] + with open(os.path.join(opt.dataroot, opt.datapairs), 'r') as f: + for line in f.readlines(): + h_name, c_name = line.strip().split() + human_names.append(h_name) + cloth_names.append(c_name) + self.human_names = human_names + self.cloth_names = cloth_names + self.dataset_size = len(human_names) + + # input A (label maps) + dir_A = '_A' if self.opt.label_nc == 0 else '_label' + self.dir_A = os.path.join(opt.dataroot, opt.phase + dir_A) + self.A_paths = sorted(make_dataset(self.dir_A)) + + self.fine_height = 256 + self.fine_width = 192 + self.radius = 5 + + # input A test (label maps) + dir_A = '_A' if self.opt.label_nc == 0 else '_label' + self.dir_A = os.path.join(opt.dataroot, opt.phase + dir_A) + self.A_paths = sorted(make_dataset_test(self.dir_A)) + + # input B (real images) + dir_B = '_B' if self.opt.label_nc == 0 else '_img' + self.dir_B = os.path.join(opt.dataroot, opt.phase + dir_B) + self.B_paths = sorted(make_dataset(self.dir_B)) + + self.dataset_size = len(self.A_paths) + self.build_index(self.B_paths) + + dir_E = '_edge' + self.dir_E = os.path.join(opt.dataroot, opt.phase + dir_E) + self.E_paths = sorted(make_dataset(self.dir_E)) + self.ER_paths = make_dataset(self.dir_E) + + dir_M = '_mask' + self.dir_M = os.path.join(opt.dataroot, opt.phase + dir_M) + self.M_paths = sorted(make_dataset(self.dir_M)) + self.MR_paths = make_dataset(self.dir_M) + + dir_MC = '_colormask' + self.dir_MC = os.path.join(opt.dataroot, opt.phase + dir_MC) + self.MC_paths = sorted(make_dataset(self.dir_MC)) + self.MCR_paths = make_dataset(self.dir_MC) + + dir_C = '_color' + self.dir_C = os.path.join(opt.dataroot, opt.phase + dir_C) + self.C_paths = sorted(make_dataset(self.dir_C)) + self.CR_paths = make_dataset(self.dir_C) + # self.build_index(self.C_paths) + + dir_A = '_A' if self.opt.label_nc == 0 else '_label' + self.dir_A = os.path.join(opt.dataroot, opt.phase + dir_A) + self.A_paths = sorted(make_dataset_test(self.dir_A)) + + def random_sample(self, item): + name = item.split('/')[-1] + name = name.split('-')[0] + lst = self.diction[name] + new_lst = [] + for dir in lst: + if dir != item: + new_lst.append(dir) + return new_lst[np.random.randint(len(new_lst))] + + def build_index(self, dirs): + for k, dir in enumerate(dirs): + name = dir.split('/')[-1] + name = name.split('-')[0] + + # print(name) + for k, d in enumerate(dirs[max(k-20, 0):k+20]): + if name in d: + if name not in self.diction.keys(): + self.diction[name] = [] + self.diction[name].append(d) + else: + self.diction[name].append(d) + + def __getitem__(self, index): + train_mask = 9600 + # input A (label maps) + box = [] + # for k,x in enumerate(self.A_paths): + # if '000386' in x : + # index=k + # break + test = np.random.randint(2032) + # for k, s in enumerate(self.B_paths): + # if '006581' in s: + # test = k + # break + + # get names from the pairs file + c_name = self.cloth_names[index] + h_name = self.human_names[index] + + # A_path = self.A_paths[index] + A_path = osp.join(self.dir_A, h_name.replace(".jpg", ".png")) + A = Image.open(A_path).convert('L') + + params = get_params(self.opt, A.size) + if self.opt.label_nc == 0: + transform_A = get_transform(self.opt, params) + A_tensor = transform_A(A.convert('RGB')) + else: + transform_A = get_transform( + self.opt, params, method=Image.NEAREST, normalize=False) + A_tensor = transform_A(A) * 255.0 + + B_tensor = inst_tensor = feat_tensor = 0 + # input B (real images) + + # B_path = self.B_paths[index] + B_path = osp.join(self.dir_B, h_name) + name = B_path.split('/')[-1] + + B = Image.open(B_path).convert('RGB') + transform_B = get_transform(self.opt, params) + B_tensor = transform_B(B) + + # input M (masks) + M_path = B_path # self.M_paths[np.random.randint(1)] + MR_path = B_path # self.MR_paths[np.random.randint(1)] + M = Image.open(M_path).convert('L') + MR = Image.open(MR_path).convert('L') + M_tensor = transform_A(MR) + + ### input_MC (colorMasks) + MC_path = B_path # self.MC_paths[1] + MCR_path = B_path # self.MCR_paths[1] + MCR = Image.open(MCR_path).convert('L') + MC_tensor = transform_A(MCR) + + ### input_C (color) + # print(self.C_paths) + # C_path = self.C_paths[test] + C_path = osp.join(self.dir_C, c_name) + C = Image.open(C_path).convert('RGB') + C_tensor = transform_B(C) + + # Edge + # E_path = self.E_paths[test] + E_path = osp.join(self.dir_E, c_name) + # print(E_path) + E = Image.open(E_path).convert('L') + E_tensor = transform_A(E) + + # Pose + pose_name = B_path.replace('.jpg', '_keypoints.json').replace('.png', '_keypoints.json').replace( + 'test_img', 'test_pose') + with open(osp.join(pose_name), 'r') as f: + pose_label = json.load(f) + pose_data = pose_label['people'][0]['pose_keypoints'] + pose_data = np.array(pose_data) + pose_data = pose_data.reshape((-1, 3)) + + point_num = pose_data.shape[0] + pose_map = torch.zeros(point_num, self.fine_height, self.fine_width) + r = self.radius + im_pose = Image.new('L', (self.fine_width, self.fine_height)) + pose_draw = ImageDraw.Draw(im_pose) + for i in range(point_num): + one_map = Image.new('L', (self.fine_width, self.fine_height)) + draw = ImageDraw.Draw(one_map) + pointx = pose_data[i, 0] + pointy = pose_data[i, 1] + if pointx > 1 and pointy > 1: + draw.rectangle((pointx-r, pointy-r, pointx + + r, pointy+r), 'white', 'white') + pose_draw.rectangle( + (pointx-r, pointy-r, pointx+r, pointy+r), 'white', 'white') + one_map = transform_B(one_map.convert('RGB')) + pose_map[i] = one_map[0] + P_tensor = pose_map + + input_dict = {'label': A_tensor, 'image': B_tensor, + 'path': A_path, 'name': A_path.split("/")[-1], + 'edge': E_tensor, 'color': C_tensor, 'mask': M_tensor, 'colormask': MC_tensor, 'pose': P_tensor + } + + return input_dict + + def __len__(self): + return len(self.A_paths) // self.opt.batchSize * self.opt.batchSize + + def name(self): + return 'AlignedDataset' diff --git a/data/base_data_loader.py b/data/base_data_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..778c7d809d716ce6e511e647fe32dbc7cb6c5cac --- /dev/null +++ b/data/base_data_loader.py @@ -0,0 +1,11 @@ + +class BaseDataLoader(): + def __init__(self): + pass + + def initialize(self, opt): + self.opt = opt + pass + + def load_data(): + return None diff --git a/data/base_dataset.py b/data/base_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..00a6a9e6e66cecdd852cf191812451d97042adb7 --- /dev/null +++ b/data/base_dataset.py @@ -0,0 +1,104 @@ +import torch.utils.data as data +from PIL import Image +import torchvision.transforms as transforms +import numpy as np +import random + + +class BaseDataset(data.Dataset): + def __init__(self): + super(BaseDataset, self).__init__() + + def name(self): + return 'BaseDataset' + + def initialize(self, opt): + pass + + +def get_params(opt, size): + w, h = size + new_h = h + new_w = w + if opt.resize_or_crop == 'resize_and_crop': + new_h = new_w = opt.loadSize + elif opt.resize_or_crop == 'scale_width_and_crop': + new_w = opt.loadSize + new_h = opt.loadSize * h // w + + x = random.randint(0, np.maximum(0, new_w - opt.fineSize)) + y = random.randint(0, np.maximum(0, new_h - opt.fineSize)) + + #flip = random.random() > 0.5 + flip = 0 + return {'crop_pos': (x, y), 'flip': flip} + + +def get_transform(opt, params, method=Image.BICUBIC, normalize=True): + transform_list = [] + if 'resize' in opt.resize_or_crop: + osize = [opt.loadSize, opt.loadSize] + transform_list.append(transforms.Resize(osize, method)) + elif 'scale_width' in opt.resize_or_crop: + transform_list.append(transforms.Lambda( + lambda img: __scale_width(img, opt.loadSize, method))) + osize = [256, 192] + transform_list.append(transforms.Resize(osize, method)) + if 'crop' in opt.resize_or_crop: + transform_list.append(transforms.Lambda( + lambda img: __crop(img, params['crop_pos'], opt.fineSize))) + + if opt.resize_or_crop == 'none': + base = float(2 ** opt.n_downsample_global) + if opt.netG == 'local': + base *= (2 ** opt.n_local_enhancers) + transform_list.append(transforms.Lambda( + lambda img: __make_power_2(img, base, method))) + + if opt.isTrain and not opt.no_flip: + transform_list.append(transforms.Lambda( + lambda img: __flip(img, params['flip']))) + + transform_list += [transforms.ToTensor()] + + if normalize: + transform_list += [transforms.Normalize((0.5, 0.5, 0.5), + (0.5, 0.5, 0.5))] + return transforms.Compose(transform_list) + + +def normalize(): + return transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)) + + +def __make_power_2(img, base, method=Image.BICUBIC): + ow, oh = img.size + h = int(round(oh / base) * base) + w = int(round(ow / base) * base) + if (h == oh) and (w == ow): + return img + return img.resize((w, h), method) + + +def __scale_width(img, target_width, method=Image.BICUBIC): + ow, oh = img.size + if (ow == target_width): + return img + w = target_width + h = int(target_width * oh / ow) + return img.resize((w, h), method) + + +def __crop(img, pos, size): + ow, oh = img.size + x1, y1 = pos + tw = th = size + if (ow > tw or oh > th): + return img.crop((x1, y1, x1 + tw, y1 + th)) + return img + + +def __flip(img, flip): + if flip: + return img.transpose(Image.FLIP_LEFT_RIGHT) + return img diff --git a/data/custom_dataset_data_loader.py b/data/custom_dataset_data_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..6fa2bf3745f81afef1061dce5b141bbba22bcad4 --- /dev/null +++ b/data/custom_dataset_data_loader.py @@ -0,0 +1,32 @@ +import torch.utils.data +from data.base_data_loader import BaseDataLoader + + +def CreateDataset(opt): + dataset = None + from data.aligned_dataset import AlignedDataset + dataset = AlignedDataset() + + print("dataset [%s] was created" % (dataset.name())) + dataset.initialize(opt) + return dataset + + +class CustomDatasetDataLoader(BaseDataLoader): + def name(self): + return 'CustomDatasetDataLoader' + + def initialize(self, opt): + BaseDataLoader.initialize(self, opt) + self.dataset = CreateDataset(opt) + self.dataloader = torch.utils.data.DataLoader( + self.dataset, + batch_size=opt.batchSize, + shuffle=not opt.serial_batches, + num_workers=int(opt.nThreads)) + + def load_data(self): + return self.dataloader + + def __len__(self): + return min(len(self.dataset), self.opt.max_dataset_size) diff --git a/data/data_loader.py b/data/data_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..2a4433a29eb1e0a2be75c477f292c9e784ce5b6b --- /dev/null +++ b/data/data_loader.py @@ -0,0 +1,7 @@ + +def CreateDataLoader(opt): + from data.custom_dataset_data_loader import CustomDatasetDataLoader + data_loader = CustomDatasetDataLoader() + print(data_loader.name()) + data_loader.initialize(opt) + return data_loader diff --git a/data/image_folder.py b/data/image_folder.py new file mode 100644 index 0000000000000000000000000000000000000000..90d58ea2abb19d564cfae0e0685553227bdcab28 --- /dev/null +++ b/data/image_folder.py @@ -0,0 +1,76 @@ +############################################################################### +import torch.utils.data as data +from PIL import Image +import os + +IMG_EXTENSIONS = [ + '.jpg', '.JPG', '.jpeg', '.JPEG', + '.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP', '.tiff' +] + + +def is_image_file(filename): + return any(filename.endswith(extension) for extension in IMG_EXTENSIONS) + + +def make_dataset(dir): + images = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + + f = dir.split('/')[-1].split('_')[-1] + print(dir, f) + dirs = os.listdir(dir) + for img in dirs: + + path = os.path.join(dir, img) + # print(path) + images.append(path) + return images + + +def make_dataset_test(dir): + images = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + + f = dir.split('/')[-1].split('_')[-1] + names = os.listdir(dir) + for i in range(len([name for name in os.listdir(dir) if os.path.isfile(os.path.join(dir, name))])): + img = names[i] + path = os.path.join(dir, img) + # print(path) + images.append(path) + return images + + +def default_loader(path): + return Image.open(path).convert('RGB') + + +class ImageFolder(data.Dataset): + + def __init__(self, root, transform=None, return_paths=False, + loader=default_loader): + imgs = make_dataset(root) + if len(imgs) == 0: + raise(RuntimeError("Found 0 images in: " + root + "\n" + "Supported image extensions are: " + + ",".join(IMG_EXTENSIONS))) + + self.root = root + self.imgs = imgs + self.transform = transform + self.return_paths = return_paths + self.loader = loader + + def __getitem__(self, index): + path = self.imgs[index] + img = self.loader(path) + if self.transform is not None: + img = self.transform(img) + if self.return_paths: + return img, path + else: + return img + + def __len__(self): + return len(self.imgs) diff --git a/data_loader.py b/data_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..3d4bfa19a10c2f8f68f528bb88e2f162ed641321 --- /dev/null +++ b/data_loader.py @@ -0,0 +1,268 @@ +# data loader +from __future__ import print_function, division +import glob +import torch +from skimage import io, transform, color +import numpy as np +import random +import math +import matplotlib.pyplot as plt +from torch.utils.data import Dataset, DataLoader +from torchvision import transforms, utils +from PIL import Image + +#==========================dataset load========================== +class RescaleT(object): + + def __init__(self,output_size): + assert isinstance(output_size,(int,tuple)) + self.output_size = output_size + + def __call__(self,sample): + imidx, image, label = sample['imidx'], sample['image'],sample['label'] + + h, w = image.shape[:2] + + if isinstance(self.output_size,int): + if h > w: + new_h, new_w = self.output_size*h/w,self.output_size + else: + new_h, new_w = self.output_size,self.output_size*w/h + else: + new_h, new_w = self.output_size + + new_h, new_w = int(new_h), int(new_w) + + # #resize the image to new_h x new_w and convert image from range [0,255] to [0,1] + # img = transform.resize(image,(new_h,new_w),mode='constant') + # lbl = transform.resize(label,(new_h,new_w),mode='constant', order=0, preserve_range=True) + + img = transform.resize(image,(self.output_size,self.output_size),mode='constant') + lbl = transform.resize(label,(self.output_size,self.output_size),mode='constant', order=0, preserve_range=True) + + return {'imidx':imidx, 'image':img,'label':lbl} + +class Rescale(object): + + def __init__(self,output_size): + assert isinstance(output_size,(int,tuple)) + self.output_size = output_size + + def __call__(self,sample): + imidx, image, label = sample['imidx'], sample['image'],sample['label'] + + if random.random() >= 0.5: + image = image[::-1] + label = label[::-1] + + h, w = image.shape[:2] + + if isinstance(self.output_size,int): + if h > w: + new_h, new_w = self.output_size*h/w,self.output_size + else: + new_h, new_w = self.output_size,self.output_size*w/h + else: + new_h, new_w = self.output_size + + new_h, new_w = int(new_h), int(new_w) + + # #resize the image to new_h x new_w and convert image from range [0,255] to [0,1] + img = transform.resize(image,(new_h,new_w),mode='constant') + lbl = transform.resize(label,(new_h,new_w),mode='constant', order=0, preserve_range=True) + + return {'imidx':imidx, 'image':img,'label':lbl} + +class RandomCrop(object): + + def __init__(self,output_size): + assert isinstance(output_size, (int, tuple)) + if isinstance(output_size, int): + self.output_size = (output_size, output_size) + else: + assert len(output_size) == 2 + self.output_size = output_size + def __call__(self,sample): + imidx, image, label = sample['imidx'], sample['image'], sample['label'] + + if random.random() >= 0.5: + image = image[::-1] + label = label[::-1] + + h, w = image.shape[:2] + new_h, new_w = self.output_size + + top = np.random.randint(0, h - new_h) + left = np.random.randint(0, w - new_w) + + image = image[top: top + new_h, left: left + new_w] + label = label[top: top + new_h, left: left + new_w] + + return {'imidx':imidx,'image':image, 'label':label} + +class ToTensor(object): + """Convert ndarrays in sample to Tensors.""" + + def __call__(self, sample): + + imidx, image, label = sample['imidx'], sample['image'], sample['label'] + + tmpImg = np.zeros((image.shape[0],image.shape[1],3)) + tmpLbl = np.zeros(label.shape) + + image = image/np.max(image) + if(np.max(label)<1e-6): + label = label + else: + label = label/np.max(label) + + if image.shape[2]==1: + tmpImg[:,:,0] = (image[:,:,0]-0.485)/0.229 + tmpImg[:,:,1] = (image[:,:,0]-0.485)/0.229 + tmpImg[:,:,2] = (image[:,:,0]-0.485)/0.229 + else: + tmpImg[:,:,0] = (image[:,:,0]-0.485)/0.229 + tmpImg[:,:,1] = (image[:,:,1]-0.456)/0.224 + tmpImg[:,:,2] = (image[:,:,2]-0.406)/0.225 + + tmpLbl[:,:,0] = label[:,:,0] + + # change the r,g,b to b,r,g from [0,255] to [0,1] + #transforms.Normalize(mean = (0.485, 0.456, 0.406), std = (0.229, 0.224, 0.225)) + tmpImg = tmpImg.transpose((2, 0, 1)) + tmpLbl = label.transpose((2, 0, 1)) + + return {'imidx':torch.from_numpy(imidx), 'image': torch.from_numpy(tmpImg), 'label': torch.from_numpy(tmpLbl)} + +class ToTensorLab(object): + """Convert ndarrays in sample to Tensors.""" + def __init__(self,flag=0): + self.flag = flag + + def __call__(self, sample): + + imidx, image, label =sample['imidx'], sample['image'], sample['label'] + + tmpLbl = np.zeros(label.shape) + + if(np.max(label)<1e-6): + label = label + else: + label = label/np.max(label) + + # change the color space + if self.flag == 2: # with rgb and Lab colors + tmpImg = np.zeros((image.shape[0],image.shape[1],6)) + tmpImgt = np.zeros((image.shape[0],image.shape[1],3)) + if image.shape[2]==1: + tmpImgt[:,:,0] = image[:,:,0] + tmpImgt[:,:,1] = image[:,:,0] + tmpImgt[:,:,2] = image[:,:,0] + else: + tmpImgt = image + tmpImgtl = color.rgb2lab(tmpImgt) + + # nomalize image to range [0,1] + tmpImg[:,:,0] = (tmpImgt[:,:,0]-np.min(tmpImgt[:,:,0]))/(np.max(tmpImgt[:,:,0])-np.min(tmpImgt[:,:,0])) + tmpImg[:,:,1] = (tmpImgt[:,:,1]-np.min(tmpImgt[:,:,1]))/(np.max(tmpImgt[:,:,1])-np.min(tmpImgt[:,:,1])) + tmpImg[:,:,2] = (tmpImgt[:,:,2]-np.min(tmpImgt[:,:,2]))/(np.max(tmpImgt[:,:,2])-np.min(tmpImgt[:,:,2])) + tmpImg[:,:,3] = (tmpImgtl[:,:,0]-np.min(tmpImgtl[:,:,0]))/(np.max(tmpImgtl[:,:,0])-np.min(tmpImgtl[:,:,0])) + tmpImg[:,:,4] = (tmpImgtl[:,:,1]-np.min(tmpImgtl[:,:,1]))/(np.max(tmpImgtl[:,:,1])-np.min(tmpImgtl[:,:,1])) + tmpImg[:,:,5] = (tmpImgtl[:,:,2]-np.min(tmpImgtl[:,:,2]))/(np.max(tmpImgtl[:,:,2])-np.min(tmpImgtl[:,:,2])) + + # tmpImg = tmpImg/(np.max(tmpImg)-np.min(tmpImg)) + + tmpImg[:,:,0] = (tmpImg[:,:,0]-np.mean(tmpImg[:,:,0]))/np.std(tmpImg[:,:,0]) + tmpImg[:,:,1] = (tmpImg[:,:,1]-np.mean(tmpImg[:,:,1]))/np.std(tmpImg[:,:,1]) + tmpImg[:,:,2] = (tmpImg[:,:,2]-np.mean(tmpImg[:,:,2]))/np.std(tmpImg[:,:,2]) + tmpImg[:,:,3] = (tmpImg[:,:,3]-np.mean(tmpImg[:,:,3]))/np.std(tmpImg[:,:,3]) + tmpImg[:,:,4] = (tmpImg[:,:,4]-np.mean(tmpImg[:,:,4]))/np.std(tmpImg[:,:,4]) + tmpImg[:,:,5] = (tmpImg[:,:,5]-np.mean(tmpImg[:,:,5]))/np.std(tmpImg[:,:,5]) + + elif self.flag == 1: #with Lab color + tmpImg = np.zeros((image.shape[0],image.shape[1],3)) + + if image.shape[2]==1: + tmpImg[:,:,0] = image[:,:,0] + tmpImg[:,:,1] = image[:,:,0] + tmpImg[:,:,2] = image[:,:,0] + else: + tmpImg = image + + tmpImg = color.rgb2lab(tmpImg) + + # tmpImg = tmpImg/(np.max(tmpImg)-np.min(tmpImg)) + + tmpImg[:,:,0] = (tmpImg[:,:,0]-np.min(tmpImg[:,:,0]))/(np.max(tmpImg[:,:,0])-np.min(tmpImg[:,:,0])) + tmpImg[:,:,1] = (tmpImg[:,:,1]-np.min(tmpImg[:,:,1]))/(np.max(tmpImg[:,:,1])-np.min(tmpImg[:,:,1])) + tmpImg[:,:,2] = (tmpImg[:,:,2]-np.min(tmpImg[:,:,2]))/(np.max(tmpImg[:,:,2])-np.min(tmpImg[:,:,2])) + + tmpImg[:,:,0] = (tmpImg[:,:,0]-np.mean(tmpImg[:,:,0]))/np.std(tmpImg[:,:,0]) + tmpImg[:,:,1] = (tmpImg[:,:,1]-np.mean(tmpImg[:,:,1]))/np.std(tmpImg[:,:,1]) + tmpImg[:,:,2] = (tmpImg[:,:,2]-np.mean(tmpImg[:,:,2]))/np.std(tmpImg[:,:,2]) + + else: # with rgb color + tmpImg = np.zeros((image.shape[0],image.shape[1],3)) + image = image/np.max(image) + if image.shape[2]==1: + tmpImg[:,:,0] = (image[:,:,0]-0.485)/0.229 + tmpImg[:,:,1] = (image[:,:,0]-0.485)/0.229 + tmpImg[:,:,2] = (image[:,:,0]-0.485)/0.229 + else: + tmpImg[:,:,0] = (image[:,:,0]-0.485)/0.229 + tmpImg[:,:,1] = (image[:,:,1]-0.456)/0.224 + tmpImg[:,:,2] = (image[:,:,2]-0.406)/0.225 + + tmpLbl[:,:,0] = label[:,:,0] + + # change the r,g,b to b,r,g from [0,255] to [0,1] + #transforms.Normalize(mean = (0.485, 0.456, 0.406), std = (0.229, 0.224, 0.225)) + tmpImg = tmpImg.transpose((2, 0, 1)) + tmpLbl = label.transpose((2, 0, 1)) + + return {'imidx':torch.from_numpy(imidx), 'image': torch.from_numpy(tmpImg), 'label': torch.from_numpy(tmpLbl)} + +class SalObjDataset(Dataset): + def __init__(self,img_name_list,lbl_name_list,transform=None): + # self.root_dir = root_dir + # self.image_name_list = glob.glob(image_dir+'*.png') + # self.label_name_list = glob.glob(label_dir+'*.png') + self.image_name_list = img_name_list + self.label_name_list = lbl_name_list + self.transform = transform + + def __len__(self): + return len(self.image_name_list) + + def __getitem__(self,idx): + + # image = Image.open(self.image_name_list[idx])#io.imread(self.image_name_list[idx]) + # label = Image.open(self.label_name_list[idx])#io.imread(self.label_name_list[idx]) + + image = io.imread(self.image_name_list[idx]) + imname = self.image_name_list[idx] + imidx = np.array([idx]) + + if(0==len(self.label_name_list)): + label_3 = np.zeros(image.shape) + else: + label_3 = io.imread(self.label_name_list[idx]) + + label = np.zeros(label_3.shape[0:2]) + if(3==len(label_3.shape)): + label = label_3[:,:,0] + elif(2==len(label_3.shape)): + label = label_3 + + if(3==len(image.shape) and 2==len(label.shape)): + label = label[:,:,np.newaxis] + elif(2==len(image.shape) and 2==len(label.shape)): + image = image[:,:,np.newaxis] + label = label[:,:,np.newaxis] + + sample = {'imidx':imidx, 'image':image, 'label':label} + + if self.transform: + sample = self.transform(sample) + + return sample diff --git a/demo.ipynb b/demo.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..a84967cc785a327ac60ee36c29ea44835757fd35 --- /dev/null +++ b/demo.ipynb @@ -0,0 +1,420 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "3CCSQIBMAYYZ" + }, + "source": [ + "# FIFA for Virtual Try-On Demo\n", + "\n", + "This notebook generates a synthetic image of a person wearing a target clothing. It requires an image of a person and a target clothing as inputs.\n", + "\n", + "Note: For this colab demo, repo is forked from https://github.com/hasibzunair/vton-demo since dkunited is private." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7WJtP2PfBcPN" + }, + "source": [ + "### Setup dependencies" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "GVm5QFBMDBbT" + }, + "outputs": [], + "source": [ + "!git clone https://github.com/hasibzunair/fifa-demo.git\n", + "%cd fifa-demo" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "Cnk7syY0rPKp" + }, + "outputs": [], + "source": [ + "!pip install ninja" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "xQwI--uhoH6R" + }, + "outputs": [], + "source": [ + "import gdown\n", + "import numpy as np\n", + "from PIL import Image, ImageOps\n", + "import IPython\n", + "import gdown\n", + "import os\n", + "import sys\n", + "\n", + "from predict_pose import generate_pose_keypoints" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "86Ll7LIW1kGw" + }, + "outputs": [], + "source": [ + "# Create dummy folders same as train/test data dir tree\n", + "\n", + "!mkdir Data_preprocessing/test_color\n", + "!mkdir Data_preprocessing/test_colormask\n", + "!mkdir Data_preprocessing/test_edge\n", + "!mkdir Data_preprocessing/test_img\n", + "!mkdir Data_preprocessing/test_label\n", + "!mkdir Data_preprocessing/test_mask\n", + "!mkdir Data_preprocessing/test_pose\n", + "!mkdir inputs\n", + "!mkdir inputs/img\n", + "!mkdir inputs/cloth" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "K8hYM6XqCnxC" + }, + "outputs": [], + "source": [ + "# Get pose model\n", + "\n", + "%cd pose\n", + "!wget https://github.com/hasibzunair/fifa-demo/releases/download/v1.0/pose_deploy_linevec.prototxt\n", + "!wget https://github.com/hasibzunair/fifa-demo/releases/download/v1.0/pose_iter_440000.caffemodel\n", + "%cd .." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "HbGDB31KrKHV" + }, + "outputs": [], + "source": [ + "# Get parser and segmentation model\n", + "\n", + "!git clone https://github.com/hasibzunair/Self-Correction-Human-Parsing-for-ACGPN.git\n", + "!git clone https://github.com/hasibzunair/U-2-Net.git" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "3hSJI347rZtQ" + }, + "outputs": [], + "source": [ + "# For segmentation mask generation\n", + "\n", + "!wget https://github.com/hasibzunair/fifa-demo/releases/download/v1.0/lip_final.pth" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "ooARXEZZYnGD" + }, + "outputs": [], + "source": [ + "# Get U-2-Net weights\n", + "\n", + "%cd U-2-Net\n", + "!mkdir saved_models\n", + "!mkdir saved_models/u2net\n", + "!mkdir saved_models/u2netp\n", + "!wget -P saved_models/u2netp/ https://github.com/hasibzunair/fifa-demo/releases/download/v1.0/u2netp.pth\n", + "!wget -P saved_models/u2net/ https://github.com/hasibzunair/fifa-demo/releases/download/v1.0/u2net.pth\n", + "import u2net_load\n", + "import u2net_run\n", + "u2net = u2net_load.model(model_name = 'u2netp')\n", + "%cd .." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "r1VknOqswSTW" + }, + "outputs": [], + "source": [ + "# Get model checkpoints\n", + "\n", + "!mkdir checkpoints\n", + "%cd checkpoints\n", + "!wget https://github.com/hasibzunair/vton-demo/releases/download/v1.0/decavtonfifapretrain.zip\n", + "!unzip decavtonfifapretrain.zip\n", + "%cd .." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "eD-DMczQthkd" + }, + "source": [ + "### Please upload your cloth image below" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "eAMmTiQ7zibP" + }, + "outputs": [], + "source": [ + "%cd inputs/cloth\n", + "from google.colab import files\n", + "uploaded = files.upload()\n", + "%cd ..\n", + "%cd .." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "dQP04cGctrOW" + }, + "source": [ + "### Please upload your person image below" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "_wmUOR92170v" + }, + "outputs": [], + "source": [ + "%cd inputs/img\n", + "from google.colab import files\n", + "uploaded = files.upload()\n", + "%cd ..\n", + "%cd .." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "u-zkuJWVWgrX" + }, + "source": [ + "### Preprocessing" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "Tm-9Up6z0Zpg" + }, + "outputs": [], + "source": [ + "cloth_name = 'cloth.png'\n", + "cloth_path = os.path.join('inputs/cloth', sorted(os.listdir('inputs/cloth'))[0])\n", + "cloth = Image.open(cloth_path)\n", + "\n", + "# Resize cloth image\n", + "cloth = ImageOps.fit(cloth, (192, 256), Image.BICUBIC).convert(\"RGB\")\n", + "\n", + "# Save resized cloth image\n", + "cloth.save(os.path.join('Data_preprocessing/test_color', cloth_name))\n", + "\n", + "# 1. Get binary mask for clothing image\n", + "u2net_run.infer(u2net, 'Data_preprocessing/test_color', 'Data_preprocessing/test_edge')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "Y3aHah45D655" + }, + "outputs": [], + "source": [ + "import time\n", + "\n", + "start_time = time.time()\n", + "# Person image\n", + "img_name = 'person.png'\n", + "img_path = os.path.join('inputs/img', sorted(os.listdir('inputs/img'))[0])\n", + "img = Image.open(img_path)\n", + "img = ImageOps.fit(img, (192, 256), Image.BICUBIC).convert(\"RGB\")\n", + "img_path = os.path.join('Data_preprocessing/test_img', img_name)\n", + "img.save(img_path)\n", + "resize_time = time.time()\n", + "print('Resized image in {}s'.format(resize_time-start_time))\n", + "\n", + "# 2. Get parsed person image (test_label), uses person image\n", + "!python3 Self-Correction-Human-Parsing-for-ACGPN/simple_extractor.py --dataset 'lip' --model-restore 'lip_final.pth' --input-dir 'Data_preprocessing/test_img' --output-dir 'Data_preprocessing/test_label'\n", + "parse_time = time.time()\n", + "print('Parsing generated in {}s'.format(parse_time-resize_time))\n", + "\n", + "# 3. Get pose map from person image\n", + "pose_path = os.path.join('Data_preprocessing/test_pose', img_name.replace('.png', '_keypoints.json'))\n", + "generate_pose_keypoints(img_path, pose_path)\n", + "pose_time = time.time()\n", + "print('Pose map generated in {}s'.format(pose_time-parse_time))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "EgMi912KAUNs" + }, + "outputs": [], + "source": [ + "!rm -rf Data_preprocessing/test_pairs.txt\n", + "\n", + "# Format: person, cloth image\n", + "with open('Data_preprocessing/test_pairs.txt','w') as f:\n", + " f.write('person.png cloth.png')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "NTfjVq44X9dg" + }, + "source": [ + "### Run inference" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "EIcoP4ll14Ia" + }, + "outputs": [], + "source": [ + "# Run test.py using the preferred configuration (e.g. changes in architecture etc.)\n", + "!python test.py --name decavtonfifapretrain" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "0IfZakpL1xX9" + }, + "source": [ + "### Show results" + ] + }, + { + "cell_type": "code", + "source": [ + "# See 'results' directory\n", + "output_grid = np.concatenate([np.array(Image.open('Data_preprocessing/test_img/person.png')),\n", + " np.array(Image.open('Data_preprocessing/test_color/cloth.png')),\n", + " np.array(Image.open('results/test/try-on/person.png'))], axis=1)\n", + "image_grid = Image.fromarray(output_grid)\n", + "image_grid" + ], + "metadata": { + "id": "Jm2ZSQ3xMW1G" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "V_zzoKdCOTNn" + }, + "source": [ + "**To try a new person and cloth pair, remove current files and start again by uploading a new cloth image!**" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "9b-Fx2TyKbZ9" + }, + "outputs": [], + "source": [ + "!rm -rf Data_preprocessing/test_color/*\n", + "!rm -rf Data_preprocessing/test_colormask/*\n", + "!rm -rf Data_preprocessing/test_edge/*\n", + "!rm -rf Data_preprocessing/test_img/*\n", + "!rm -rf Data_preprocessing/test_label/*\n", + "!rm -rf Data_preprocessing/test_mask/*\n", + "!rm -rf Data_preprocessing/test_pose/*\n", + "!rm -rf inputs/cloth/*\n", + "!rm -rf inputs/img/*\n", + "!rm -rf results/*\n", + "%cd /content/fifa-demo" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "0wpSoOskH2qq" + }, + "outputs": [], + "source": [ + "" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "collapsed_sections": [ + "7WJtP2PfBcPN", + "eD-DMczQthkd", + "dQP04cGctrOW", + "u-zkuJWVWgrX", + "NTfjVq44X9dg", + "0IfZakpL1xX9" + ], + "name": "demo.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/examples/1/cloth.jpg b/examples/1/cloth.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5354be48374c3a2efd7be2dddc904597542420ca Binary files /dev/null and b/examples/1/cloth.jpg differ diff --git a/examples/1/person.jpg b/examples/1/person.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0e623627bea9bf1f3cfb82e16d49cb8b1a13ba94 Binary files /dev/null and b/examples/1/person.jpg differ diff --git a/examples/2/cloth.jpg b/examples/2/cloth.jpg new file mode 100644 index 0000000000000000000000000000000000000000..748c7d839f90a244d1cf76da3e88da4a3b350399 Binary files /dev/null and b/examples/2/cloth.jpg differ diff --git a/examples/2/person.jpg b/examples/2/person.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2cdd2d4666d0f0859ffb58bba256e8b813d747ff Binary files /dev/null and b/examples/2/person.jpg differ diff --git a/gradio/demo.py b/gradio/demo.py new file mode 100644 index 0000000000000000000000000000000000000000..2ad81ef24cdb3e645331aacae729fd20cec78082 --- /dev/null +++ b/gradio/demo.py @@ -0,0 +1,37 @@ +import cv2 +import paddlehub as hub +import gradio as gr +import torch + +# Images +torch.hub.download_url_to_file('https://cdn.pixabay.com/photo/2018/08/12/16/59/ara-3601194_1280.jpg', 'parrot.jpg') +torch.hub.download_url_to_file('https://cdn.pixabay.com/photo/2016/10/21/14/46/fox-1758183_1280.jpg', 'fox.jpg') + +model = hub.Module(name='U2Net') + +def infer(img): + result = model.Segmentation( + images=[cv2.imread(img.name)], + paths=None, + batch_size=1, + input_size=320, + output_dir='output', + visualization=True) + return result[0]['front'][:,:,::-1], result[0]['mask'] + +inputs = gr.inputs.Image(type='file', label="Original Image") +outputs = [ + gr.outputs.Image(type="numpy",label="Front"), + gr.outputs.Image(type="numpy",label="Mask") + ] + +title = "U^2-Net" +description = "demo for U^2-Net. To use it, simply upload your image, or click one of the examples to load them. Read more at the links below." +article = "

U^2-Net: Going Deeper with Nested U-Structure for Salient Object Detection | Github Repo

" + +examples = [ + ['fox.jpg'], + ['parrot.jpg'] +] + +gr.Interface(infer, inputs, outputs, title=title, description=description, article=article, examples=examples).launch() \ No newline at end of file diff --git a/grid_sample.py b/grid_sample.py new file mode 100644 index 0000000000000000000000000000000000000000..d5b603e0751e36c52f5cb668231ada3e27dfb995 --- /dev/null +++ b/grid_sample.py @@ -0,0 +1,15 @@ +# encoding: utf-8 + +import torch.nn.functional as F +from torch.autograd import Variable + + +def grid_sample(input, grid, canvas=None): + output = F.grid_sample(input, grid) + if canvas is None: + return output + else: + input_mask = Variable(input.data.new(input.size()).fill_(1)) + output_mask = F.grid_sample(input_mask, grid) + padded_output = output * output_mask + canvas * (1 - output_mask) + return padded_output diff --git a/inputs/cloth/cloth.png b/inputs/cloth/cloth.png new file mode 100644 index 0000000000000000000000000000000000000000..ee9c8bc30337fd1c4f184bfa57ec55bcfe4d2edf Binary files /dev/null and b/inputs/cloth/cloth.png differ diff --git a/inputs/img/person.png b/inputs/img/person.png new file mode 100644 index 0000000000000000000000000000000000000000..8ece7e389d2770f71ce8bcb5f8b2f1a4215f6327 Binary files /dev/null and b/inputs/img/person.png differ diff --git a/model/__init__.py b/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4d8fa272fb03208e17723b0269eb579b81514540 --- /dev/null +++ b/model/__init__.py @@ -0,0 +1,2 @@ +from .u2net import U2NET +from .u2net import U2NETP diff --git a/model/u2net.py b/model/u2net.py new file mode 100644 index 0000000000000000000000000000000000000000..5b85f138f3af4e2ceae1ff07dee514c859a831af --- /dev/null +++ b/model/u2net.py @@ -0,0 +1,525 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +class REBNCONV(nn.Module): + def __init__(self,in_ch=3,out_ch=3,dirate=1): + super(REBNCONV,self).__init__() + + self.conv_s1 = nn.Conv2d(in_ch,out_ch,3,padding=1*dirate,dilation=1*dirate) + self.bn_s1 = nn.BatchNorm2d(out_ch) + self.relu_s1 = nn.ReLU(inplace=True) + + def forward(self,x): + + hx = x + xout = self.relu_s1(self.bn_s1(self.conv_s1(hx))) + + return xout + +## upsample tensor 'src' to have the same spatial size with tensor 'tar' +def _upsample_like(src,tar): + + src = F.upsample(src,size=tar.shape[2:],mode='bilinear') + + return src + + +### RSU-7 ### +class RSU7(nn.Module):#UNet07DRES(nn.Module): + + def __init__(self, in_ch=3, mid_ch=12, out_ch=3): + super(RSU7,self).__init__() + + self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1) + + self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1) + self.pool1 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool2 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool3 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool4 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv5 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool5 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv6 = REBNCONV(mid_ch,mid_ch,dirate=1) + + self.rebnconv7 = REBNCONV(mid_ch,mid_ch,dirate=2) + + self.rebnconv6d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv5d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv4d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1) + + def forward(self,x): + + hx = x + hxin = self.rebnconvin(hx) + + hx1 = self.rebnconv1(hxin) + hx = self.pool1(hx1) + + hx2 = self.rebnconv2(hx) + hx = self.pool2(hx2) + + hx3 = self.rebnconv3(hx) + hx = self.pool3(hx3) + + hx4 = self.rebnconv4(hx) + hx = self.pool4(hx4) + + hx5 = self.rebnconv5(hx) + hx = self.pool5(hx5) + + hx6 = self.rebnconv6(hx) + + hx7 = self.rebnconv7(hx6) + + hx6d = self.rebnconv6d(torch.cat((hx7,hx6),1)) + hx6dup = _upsample_like(hx6d,hx5) + + hx5d = self.rebnconv5d(torch.cat((hx6dup,hx5),1)) + hx5dup = _upsample_like(hx5d,hx4) + + hx4d = self.rebnconv4d(torch.cat((hx5dup,hx4),1)) + hx4dup = _upsample_like(hx4d,hx3) + + hx3d = self.rebnconv3d(torch.cat((hx4dup,hx3),1)) + hx3dup = _upsample_like(hx3d,hx2) + + hx2d = self.rebnconv2d(torch.cat((hx3dup,hx2),1)) + hx2dup = _upsample_like(hx2d,hx1) + + hx1d = self.rebnconv1d(torch.cat((hx2dup,hx1),1)) + + return hx1d + hxin + +### RSU-6 ### +class RSU6(nn.Module):#UNet06DRES(nn.Module): + + def __init__(self, in_ch=3, mid_ch=12, out_ch=3): + super(RSU6,self).__init__() + + self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1) + + self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1) + self.pool1 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool2 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool3 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool4 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv5 = REBNCONV(mid_ch,mid_ch,dirate=1) + + self.rebnconv6 = REBNCONV(mid_ch,mid_ch,dirate=2) + + self.rebnconv5d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv4d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1) + + def forward(self,x): + + hx = x + + hxin = self.rebnconvin(hx) + + hx1 = self.rebnconv1(hxin) + hx = self.pool1(hx1) + + hx2 = self.rebnconv2(hx) + hx = self.pool2(hx2) + + hx3 = self.rebnconv3(hx) + hx = self.pool3(hx3) + + hx4 = self.rebnconv4(hx) + hx = self.pool4(hx4) + + hx5 = self.rebnconv5(hx) + + hx6 = self.rebnconv6(hx5) + + + hx5d = self.rebnconv5d(torch.cat((hx6,hx5),1)) + hx5dup = _upsample_like(hx5d,hx4) + + hx4d = self.rebnconv4d(torch.cat((hx5dup,hx4),1)) + hx4dup = _upsample_like(hx4d,hx3) + + hx3d = self.rebnconv3d(torch.cat((hx4dup,hx3),1)) + hx3dup = _upsample_like(hx3d,hx2) + + hx2d = self.rebnconv2d(torch.cat((hx3dup,hx2),1)) + hx2dup = _upsample_like(hx2d,hx1) + + hx1d = self.rebnconv1d(torch.cat((hx2dup,hx1),1)) + + return hx1d + hxin + +### RSU-5 ### +class RSU5(nn.Module):#UNet05DRES(nn.Module): + + def __init__(self, in_ch=3, mid_ch=12, out_ch=3): + super(RSU5,self).__init__() + + self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1) + + self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1) + self.pool1 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool2 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool3 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=1) + + self.rebnconv5 = REBNCONV(mid_ch,mid_ch,dirate=2) + + self.rebnconv4d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1) + + def forward(self,x): + + hx = x + + hxin = self.rebnconvin(hx) + + hx1 = self.rebnconv1(hxin) + hx = self.pool1(hx1) + + hx2 = self.rebnconv2(hx) + hx = self.pool2(hx2) + + hx3 = self.rebnconv3(hx) + hx = self.pool3(hx3) + + hx4 = self.rebnconv4(hx) + + hx5 = self.rebnconv5(hx4) + + hx4d = self.rebnconv4d(torch.cat((hx5,hx4),1)) + hx4dup = _upsample_like(hx4d,hx3) + + hx3d = self.rebnconv3d(torch.cat((hx4dup,hx3),1)) + hx3dup = _upsample_like(hx3d,hx2) + + hx2d = self.rebnconv2d(torch.cat((hx3dup,hx2),1)) + hx2dup = _upsample_like(hx2d,hx1) + + hx1d = self.rebnconv1d(torch.cat((hx2dup,hx1),1)) + + return hx1d + hxin + +### RSU-4 ### +class RSU4(nn.Module):#UNet04DRES(nn.Module): + + def __init__(self, in_ch=3, mid_ch=12, out_ch=3): + super(RSU4,self).__init__() + + self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1) + + self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1) + self.pool1 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=1) + self.pool2 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=1) + + self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=2) + + self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=1) + self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1) + + def forward(self,x): + + hx = x + + hxin = self.rebnconvin(hx) + + hx1 = self.rebnconv1(hxin) + hx = self.pool1(hx1) + + hx2 = self.rebnconv2(hx) + hx = self.pool2(hx2) + + hx3 = self.rebnconv3(hx) + + hx4 = self.rebnconv4(hx3) + + hx3d = self.rebnconv3d(torch.cat((hx4,hx3),1)) + hx3dup = _upsample_like(hx3d,hx2) + + hx2d = self.rebnconv2d(torch.cat((hx3dup,hx2),1)) + hx2dup = _upsample_like(hx2d,hx1) + + hx1d = self.rebnconv1d(torch.cat((hx2dup,hx1),1)) + + return hx1d + hxin + +### RSU-4F ### +class RSU4F(nn.Module):#UNet04FRES(nn.Module): + + def __init__(self, in_ch=3, mid_ch=12, out_ch=3): + super(RSU4F,self).__init__() + + self.rebnconvin = REBNCONV(in_ch,out_ch,dirate=1) + + self.rebnconv1 = REBNCONV(out_ch,mid_ch,dirate=1) + self.rebnconv2 = REBNCONV(mid_ch,mid_ch,dirate=2) + self.rebnconv3 = REBNCONV(mid_ch,mid_ch,dirate=4) + + self.rebnconv4 = REBNCONV(mid_ch,mid_ch,dirate=8) + + self.rebnconv3d = REBNCONV(mid_ch*2,mid_ch,dirate=4) + self.rebnconv2d = REBNCONV(mid_ch*2,mid_ch,dirate=2) + self.rebnconv1d = REBNCONV(mid_ch*2,out_ch,dirate=1) + + def forward(self,x): + + hx = x + + hxin = self.rebnconvin(hx) + + hx1 = self.rebnconv1(hxin) + hx2 = self.rebnconv2(hx1) + hx3 = self.rebnconv3(hx2) + + hx4 = self.rebnconv4(hx3) + + hx3d = self.rebnconv3d(torch.cat((hx4,hx3),1)) + hx2d = self.rebnconv2d(torch.cat((hx3d,hx2),1)) + hx1d = self.rebnconv1d(torch.cat((hx2d,hx1),1)) + + return hx1d + hxin + + +##### U^2-Net #### +class U2NET(nn.Module): + + def __init__(self,in_ch=3,out_ch=1): + super(U2NET,self).__init__() + + self.stage1 = RSU7(in_ch,32,64) + self.pool12 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage2 = RSU6(64,32,128) + self.pool23 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage3 = RSU5(128,64,256) + self.pool34 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage4 = RSU4(256,128,512) + self.pool45 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage5 = RSU4F(512,256,512) + self.pool56 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage6 = RSU4F(512,256,512) + + # decoder + self.stage5d = RSU4F(1024,256,512) + self.stage4d = RSU4(1024,128,256) + self.stage3d = RSU5(512,64,128) + self.stage2d = RSU6(256,32,64) + self.stage1d = RSU7(128,16,64) + + self.side1 = nn.Conv2d(64,out_ch,3,padding=1) + self.side2 = nn.Conv2d(64,out_ch,3,padding=1) + self.side3 = nn.Conv2d(128,out_ch,3,padding=1) + self.side4 = nn.Conv2d(256,out_ch,3,padding=1) + self.side5 = nn.Conv2d(512,out_ch,3,padding=1) + self.side6 = nn.Conv2d(512,out_ch,3,padding=1) + + self.outconv = nn.Conv2d(6*out_ch,out_ch,1) + + def forward(self,x): + + hx = x + + #stage 1 + hx1 = self.stage1(hx) + hx = self.pool12(hx1) + + #stage 2 + hx2 = self.stage2(hx) + hx = self.pool23(hx2) + + #stage 3 + hx3 = self.stage3(hx) + hx = self.pool34(hx3) + + #stage 4 + hx4 = self.stage4(hx) + hx = self.pool45(hx4) + + #stage 5 + hx5 = self.stage5(hx) + hx = self.pool56(hx5) + + #stage 6 + hx6 = self.stage6(hx) + hx6up = _upsample_like(hx6,hx5) + + #-------------------- decoder -------------------- + hx5d = self.stage5d(torch.cat((hx6up,hx5),1)) + hx5dup = _upsample_like(hx5d,hx4) + + hx4d = self.stage4d(torch.cat((hx5dup,hx4),1)) + hx4dup = _upsample_like(hx4d,hx3) + + hx3d = self.stage3d(torch.cat((hx4dup,hx3),1)) + hx3dup = _upsample_like(hx3d,hx2) + + hx2d = self.stage2d(torch.cat((hx3dup,hx2),1)) + hx2dup = _upsample_like(hx2d,hx1) + + hx1d = self.stage1d(torch.cat((hx2dup,hx1),1)) + + + #side output + d1 = self.side1(hx1d) + + d2 = self.side2(hx2d) + d2 = _upsample_like(d2,d1) + + d3 = self.side3(hx3d) + d3 = _upsample_like(d3,d1) + + d4 = self.side4(hx4d) + d4 = _upsample_like(d4,d1) + + d5 = self.side5(hx5d) + d5 = _upsample_like(d5,d1) + + d6 = self.side6(hx6) + d6 = _upsample_like(d6,d1) + + d0 = self.outconv(torch.cat((d1,d2,d3,d4,d5,d6),1)) + + return F.sigmoid(d0), F.sigmoid(d1), F.sigmoid(d2), F.sigmoid(d3), F.sigmoid(d4), F.sigmoid(d5), F.sigmoid(d6) + +### U^2-Net small ### +class U2NETP(nn.Module): + + def __init__(self,in_ch=3,out_ch=1): + super(U2NETP,self).__init__() + + self.stage1 = RSU7(in_ch,16,64) + self.pool12 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage2 = RSU6(64,16,64) + self.pool23 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage3 = RSU5(64,16,64) + self.pool34 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage4 = RSU4(64,16,64) + self.pool45 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage5 = RSU4F(64,16,64) + self.pool56 = nn.MaxPool2d(2,stride=2,ceil_mode=True) + + self.stage6 = RSU4F(64,16,64) + + # decoder + self.stage5d = RSU4F(128,16,64) + self.stage4d = RSU4(128,16,64) + self.stage3d = RSU5(128,16,64) + self.stage2d = RSU6(128,16,64) + self.stage1d = RSU7(128,16,64) + + self.side1 = nn.Conv2d(64,out_ch,3,padding=1) + self.side2 = nn.Conv2d(64,out_ch,3,padding=1) + self.side3 = nn.Conv2d(64,out_ch,3,padding=1) + self.side4 = nn.Conv2d(64,out_ch,3,padding=1) + self.side5 = nn.Conv2d(64,out_ch,3,padding=1) + self.side6 = nn.Conv2d(64,out_ch,3,padding=1) + + self.outconv = nn.Conv2d(6*out_ch,out_ch,1) + + def forward(self,x): + + hx = x + + #stage 1 + hx1 = self.stage1(hx) + hx = self.pool12(hx1) + + #stage 2 + hx2 = self.stage2(hx) + hx = self.pool23(hx2) + + #stage 3 + hx3 = self.stage3(hx) + hx = self.pool34(hx3) + + #stage 4 + hx4 = self.stage4(hx) + hx = self.pool45(hx4) + + #stage 5 + hx5 = self.stage5(hx) + hx = self.pool56(hx5) + + #stage 6 + hx6 = self.stage6(hx) + hx6up = _upsample_like(hx6,hx5) + + #decoder + hx5d = self.stage5d(torch.cat((hx6up,hx5),1)) + hx5dup = _upsample_like(hx5d,hx4) + + hx4d = self.stage4d(torch.cat((hx5dup,hx4),1)) + hx4dup = _upsample_like(hx4d,hx3) + + hx3d = self.stage3d(torch.cat((hx4dup,hx3),1)) + hx3dup = _upsample_like(hx3d,hx2) + + hx2d = self.stage2d(torch.cat((hx3dup,hx2),1)) + hx2dup = _upsample_like(hx2d,hx1) + + hx1d = self.stage1d(torch.cat((hx2dup,hx1),1)) + + + #side output + d1 = self.side1(hx1d) + + d2 = self.side2(hx2d) + d2 = _upsample_like(d2,d1) + + d3 = self.side3(hx3d) + d3 = _upsample_like(d3,d1) + + d4 = self.side4(hx4d) + d4 = _upsample_like(d4,d1) + + d5 = self.side5(hx5d) + d5 = _upsample_like(d5,d1) + + d6 = self.side6(hx6) + d6 = _upsample_like(d6,d1) + + d0 = self.outconv(torch.cat((d1,d2,d3,d4,d5,d6),1)) + + return F.sigmoid(d0), F.sigmoid(d1), F.sigmoid(d2), F.sigmoid(d3), F.sigmoid(d4), F.sigmoid(d5), F.sigmoid(d6) diff --git a/model/u2net_refactor.py b/model/u2net_refactor.py new file mode 100644 index 0000000000000000000000000000000000000000..e668de2c2bc67cbef280eaa5f789c762c4745fa4 --- /dev/null +++ b/model/u2net_refactor.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn + +import math + +__all__ = ['U2NET_full', 'U2NET_lite'] + + +def _upsample_like(x, size): + return nn.Upsample(size=size, mode='bilinear', align_corners=False)(x) + + +def _size_map(x, height): + # {height: size} for Upsample + size = list(x.shape[-2:]) + sizes = {} + for h in range(1, height): + sizes[h] = size + size = [math.ceil(w / 2) for w in size] + return sizes + + +class REBNCONV(nn.Module): + def __init__(self, in_ch=3, out_ch=3, dilate=1): + super(REBNCONV, self).__init__() + + self.conv_s1 = nn.Conv2d(in_ch, out_ch, 3, padding=1 * dilate, dilation=1 * dilate) + self.bn_s1 = nn.BatchNorm2d(out_ch) + self.relu_s1 = nn.ReLU(inplace=True) + + def forward(self, x): + return self.relu_s1(self.bn_s1(self.conv_s1(x))) + + +class RSU(nn.Module): + def __init__(self, name, height, in_ch, mid_ch, out_ch, dilated=False): + super(RSU, self).__init__() + self.name = name + self.height = height + self.dilated = dilated + self._make_layers(height, in_ch, mid_ch, out_ch, dilated) + + def forward(self, x): + sizes = _size_map(x, self.height) + x = self.rebnconvin(x) + + # U-Net like symmetric encoder-decoder structure + def unet(x, height=1): + if height < self.height: + x1 = getattr(self, f'rebnconv{height}')(x) + if not self.dilated and height < self.height - 1: + x2 = unet(getattr(self, 'downsample')(x1), height + 1) + else: + x2 = unet(x1, height + 1) + + x = getattr(self, f'rebnconv{height}d')(torch.cat((x2, x1), 1)) + return _upsample_like(x, sizes[height - 1]) if not self.dilated and height > 1 else x + else: + return getattr(self, f'rebnconv{height}')(x) + + return x + unet(x) + + def _make_layers(self, height, in_ch, mid_ch, out_ch, dilated=False): + self.add_module('rebnconvin', REBNCONV(in_ch, out_ch)) + self.add_module('downsample', nn.MaxPool2d(2, stride=2, ceil_mode=True)) + + self.add_module(f'rebnconv1', REBNCONV(out_ch, mid_ch)) + self.add_module(f'rebnconv1d', REBNCONV(mid_ch * 2, out_ch)) + + for i in range(2, height): + dilate = 1 if not dilated else 2 ** (i - 1) + self.add_module(f'rebnconv{i}', REBNCONV(mid_ch, mid_ch, dilate=dilate)) + self.add_module(f'rebnconv{i}d', REBNCONV(mid_ch * 2, mid_ch, dilate=dilate)) + + dilate = 2 if not dilated else 2 ** (height - 1) + self.add_module(f'rebnconv{height}', REBNCONV(mid_ch, mid_ch, dilate=dilate)) + + +class U2NET(nn.Module): + def __init__(self, cfgs, out_ch): + super(U2NET, self).__init__() + self.out_ch = out_ch + self._make_layers(cfgs) + + def forward(self, x): + sizes = _size_map(x, self.height) + maps = [] # storage for maps + + # side saliency map + def unet(x, height=1): + if height < 6: + x1 = getattr(self, f'stage{height}')(x) + x2 = unet(getattr(self, 'downsample')(x1), height + 1) + x = getattr(self, f'stage{height}d')(torch.cat((x2, x1), 1)) + side(x, height) + return _upsample_like(x, sizes[height - 1]) if height > 1 else x + else: + x = getattr(self, f'stage{height}')(x) + side(x, height) + return _upsample_like(x, sizes[height - 1]) + + def side(x, h): + # side output saliency map (before sigmoid) + x = getattr(self, f'side{h}')(x) + x = _upsample_like(x, sizes[1]) + maps.append(x) + + def fuse(): + # fuse saliency probability maps + maps.reverse() + x = torch.cat(maps, 1) + x = getattr(self, 'outconv')(x) + maps.insert(0, x) + return [torch.sigmoid(x) for x in maps] + + unet(x) + maps = fuse() + return maps + + def _make_layers(self, cfgs): + self.height = int((len(cfgs) + 1) / 2) + self.add_module('downsample', nn.MaxPool2d(2, stride=2, ceil_mode=True)) + for k, v in cfgs.items(): + # build rsu block + self.add_module(k, RSU(v[0], *v[1])) + if v[2] > 0: + # build side layer + self.add_module(f'side{v[0][-1]}', nn.Conv2d(v[2], self.out_ch, 3, padding=1)) + # build fuse layer + self.add_module('outconv', nn.Conv2d(int(self.height * self.out_ch), self.out_ch, 1)) + + +def U2NET_full(): + full = { + # cfgs for building RSUs and sides + # {stage : [name, (height(L), in_ch, mid_ch, out_ch, dilated), side]} + 'stage1': ['En_1', (7, 3, 32, 64), -1], + 'stage2': ['En_2', (6, 64, 32, 128), -1], + 'stage3': ['En_3', (5, 128, 64, 256), -1], + 'stage4': ['En_4', (4, 256, 128, 512), -1], + 'stage5': ['En_5', (4, 512, 256, 512, True), -1], + 'stage6': ['En_6', (4, 512, 256, 512, True), 512], + 'stage5d': ['De_5', (4, 1024, 256, 512, True), 512], + 'stage4d': ['De_4', (4, 1024, 128, 256), 256], + 'stage3d': ['De_3', (5, 512, 64, 128), 128], + 'stage2d': ['De_2', (6, 256, 32, 64), 64], + 'stage1d': ['De_1', (7, 128, 16, 64), 64], + } + return U2NET(cfgs=full, out_ch=1) + + +def U2NET_lite(): + lite = { + # cfgs for building RSUs and sides + # {stage : [name, (height(L), in_ch, mid_ch, out_ch, dilated), side]} + 'stage1': ['En_1', (7, 3, 16, 64), -1], + 'stage2': ['En_2', (6, 64, 16, 64), -1], + 'stage3': ['En_3', (5, 64, 16, 64), -1], + 'stage4': ['En_4', (4, 64, 16, 64), -1], + 'stage5': ['En_5', (4, 64, 16, 64, True), -1], + 'stage6': ['En_6', (4, 64, 16, 64, True), 64], + 'stage5d': ['De_5', (4, 128, 16, 64, True), 64], + 'stage4d': ['De_4', (4, 128, 16, 64), 64], + 'stage3d': ['De_3', (5, 128, 16, 64), 64], + 'stage2d': ['De_2', (6, 128, 16, 64), 64], + 'stage1d': ['De_1', (7, 128, 16, 64), 64], + } + return U2NET(cfgs=lite, out_ch=1) diff --git a/models/__init__.py b/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8a3f782535e343701ca598947ed76cdcc491d2ea --- /dev/null +++ b/models/__init__.py @@ -0,0 +1 @@ +# model_init diff --git a/models/base_model.py b/models/base_model.py new file mode 100644 index 0000000000000000000000000000000000000000..0741c88eddc652ed54308422667982d5b793a0f3 --- /dev/null +++ b/models/base_model.py @@ -0,0 +1,95 @@ +# Copyright (C) 2017 NVIDIA Corporation. All rights reserved. +# Licensed under the CC BY-NC-SA 4.0 license (https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode). +import os +import torch +import sys + + +class BaseModel(torch.nn.Module): + def name(self): + return 'BaseModel' + + def initialize(self, opt): + self.opt = opt + self.gpu_ids = opt.gpu_ids + self.isTrain = opt.isTrain + self.Tensor = torch.cuda.FloatTensor if self.gpu_ids else torch.Tensor + self.save_dir = os.path.join(opt.checkpoints_dir, opt.name) + + def set_input(self, input): + self.input = input + + def forward(self): + pass + + # used in test time, no backprop + def test(self): + pass + + def get_image_paths(self): + pass + + def optimize_parameters(self): + pass + + def get_current_visuals(self): + return self.input + + def get_current_errors(self): + return {} + + def save(self, label): + pass + + # helper saving function that can be used by subclasses + def save_network(self, network, network_label, epoch_label, gpu_ids): + save_filename = '%s_net_%s.pth' % (epoch_label, network_label) + save_path = os.path.join(self.save_dir, save_filename) + torch.save(network.state_dict(), save_path) + # if len(gpu_ids) and torch.cuda.is_available(): + # network.cuda() + + # helper loading function that can be used by subclasses + def load_network(self, network, network_label, epoch_label, save_dir=''): + save_filename = '%s_net_%s.pth' % (epoch_label, network_label) + print(save_filename) + if not save_dir: + save_dir = self.save_dir + save_path = os.path.join(save_dir, save_filename) + if not os.path.isfile(save_path): + print('%s not exists yet!' % save_path) + if network_label == 'G': + raise('Generator must exist!') + else: + # network.load_state_dict(torch.load(save_path)) + + network.load_state_dict(torch.load(save_path)) + # except: + # pretrained_dict = torch.load(save_path) + # model_dict = network.state_dict() + # try: + # pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict} + # network.load_state_dict(pretrained_dict) + # if self.opt.verbose: + # print('Pretrained network %s has excessive layers; Only loading layers that are used' % network_label) + # except: + # print('Pretrained network %s has fewer layers; The following are not initialized:' % network_label) + # for k, v in pretrained_dict.items(): + # if v.size() == model_dict[k].size(): + # model_dict[k] = v + # + # if sys.version_info >= (3,0): + # not_initialized = set() + # else: + # from sets import Set + # not_initialized = Set() + # + # for k, v in model_dict.items(): + # if k not in pretrained_dict or v.size() != pretrained_dict[k].size(): + # not_initialized.add(k.split('.')[0]) + # + # print(sorted(not_initialized)) + # network.load_state_dict(model_dict) + + def update_learning_rate(): + pass diff --git a/models/mnist_model.py b/models/mnist_model.py new file mode 100644 index 0000000000000000000000000000000000000000..a1bcca6bc10fbe4ea1ef27258fd079e528dee9a9 --- /dev/null +++ b/models/mnist_model.py @@ -0,0 +1,122 @@ +# encoding: utf-8 + +import math +import torch +import itertools +import numpy as np +import torch.nn as nn +import torch.nn.functional as F +from grid_sample import grid_sample +from torch.autograd import Variable +from tps_grid_gen import TPSGridGen + + +class CNN(nn.Module): + def __init__(self, num_output): + super(CNN, self).__init__() + self.conv1 = nn.Conv2d(1, 10, kernel_size=5) + self.conv2 = nn.Conv2d(10, 20, kernel_size=5) + self.conv2_drop = nn.Dropout2d() + self.fc1 = nn.Linear(320, 50) + self.fc2 = nn.Linear(50, num_output) + + def forward(self, x): + x = F.relu(F.max_pool2d(self.conv1(x), 2)) + x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2)) + x = x.view(-1, 320) + x = F.relu(self.fc1(x)) + x = F.dropout(x, training=self.training) + x = self.fc2(x) + return x + + +class ClsNet(nn.Module): + + def __init__(self): + super(ClsNet, self).__init__() + self.cnn = CNN(10) + + def forward(self, x): + return F.log_softmax(self.cnn(x)) + + +class BoundedGridLocNet(nn.Module): + + def __init__(self, grid_height, grid_width, target_control_points): + super(BoundedGridLocNet, self).__init__() + self.cnn = CNN(grid_height * grid_width * 2) + + bias = torch.from_numpy(np.arctanh(target_control_points.numpy())) + bias = bias.view(-1) + self.cnn.fc2.bias.data.copy_(bias) + self.cnn.fc2.weight.data.zero_() + + def forward(self, x): + batch_size = x.size(0) + points = F.tanh(self.cnn(x)) + return points.view(batch_size, -1, 2) + + +class UnBoundedGridLocNet(nn.Module): + + def __init__(self, grid_height, grid_width, target_control_points): + super(UnBoundedGridLocNet, self).__init__() + self.cnn = CNN(grid_height * grid_width * 2) + + bias = target_control_points.view(-1) + self.cnn.fc2.bias.data.copy_(bias) + self.cnn.fc2.weight.data.zero_() + + def forward(self, x): + batch_size = x.size(0) + points = self.cnn(x) + return points.view(batch_size, -1, 2) + + +class STNClsNet(nn.Module): + + def __init__(self, args): + super(STNClsNet, self).__init__() + self.args = args + + r1 = args.span_range_height + r2 = args.span_range_width + assert r1 < 1 and r2 < 1 # if >= 1, arctanh will cause error in BoundedGridLocNet + target_control_points = torch.Tensor(list(itertools.product( + np.arange(-r1, r1 + 0.00001, 2.0 * r1 / (args.grid_height - 1)), + np.arange(-r2, r2 + 0.00001, 2.0 * r2 / (args.grid_width - 1)), + ))) + Y, X = target_control_points.split(1, dim=1) + target_control_points = torch.cat([X, Y], dim=1) + + GridLocNet = { + 'unbounded_stn': UnBoundedGridLocNet, + 'bounded_stn': BoundedGridLocNet, + }[args.model] + self.loc_net = GridLocNet( + args.grid_height, args.grid_width, target_control_points) + + self.tps = TPSGridGen( + args.image_height, args.image_width, target_control_points) + + self.cls_net = ClsNet() + + def forward(self, x): + batch_size = x.size(0) + source_control_points = self.loc_net(x) + source_coordinate = self.tps(source_control_points) + grid = source_coordinate.view( + batch_size, self.args.image_height, self.args.image_width, 2) + transformed_x = grid_sample(x, grid) + logit = self.cls_net(transformed_x) + return logit + + +def get_model(args): + if args.model == 'no_stn': + print('create model without STN') + model = ClsNet() + else: + print('create model with STN') + model = STNClsNet(args) + return model diff --git a/models/mnist_train.py b/models/mnist_train.py new file mode 100644 index 0000000000000000000000000000000000000000..5d859d07e87e6fbb0f2e3266335fea37042fac00 --- /dev/null +++ b/models/mnist_train.py @@ -0,0 +1,113 @@ +# encoding: utf-8 + +import os +import torch +import random +import argparse +import mnist_model +import data_loader +import torch.nn as nn +import torch.optim as optim +import torch.nn.functional as F +from torch.autograd import Variable + +# Training settings +parser = argparse.ArgumentParser() +parser.add_argument('--batch-size', type=int, default=64) +parser.add_argument('--test-batch-size', type=int, default=1000) +parser.add_argument('--epochs', type=int, default=10) +parser.add_argument('--lr', type=float, default=0.01) +parser.add_argument('--momentum', type=float, default=0.5) +parser.add_argument('--no-cuda', action='store_true', default=False) +parser.add_argument('--seed', type=int, default=1) +parser.add_argument('--log-interval', type=int, default=10) +parser.add_argument('--save-interval', type=int, default=100) +parser.add_argument('--model', required=True) +parser.add_argument('--angle', type=int, default=60) +parser.add_argument('--span_range', type=int, default=0.9) +parser.add_argument('--grid_size', type=int, default=4) +args = parser.parse_args() +args.cuda = not args.no_cuda and torch.cuda.is_available() + +args.span_range_height = args.span_range_width = args.span_range +args.grid_height = args.grid_width = args.grid_size +args.image_height = args.image_width = 28 + +torch.manual_seed(args.seed) +if args.cuda: + torch.cuda.manual_seed(args.seed) + +model = mnist_model.get_model(args) +if args.cuda: + model.cuda() + +optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum) +train_loader = data_loader.get_train_loader(args) +test_loader = data_loader.get_test_loader(args) + + +def train(epoch): + model.train() + for batch_idx, (data, target) in enumerate(train_loader): + if args.cuda: + data, target = data.cuda(), target.cuda() + # print(data.shape) + data, target = Variable(data), Variable(target) + optimizer.zero_grad() + output = model(data) + loss = F.nll_loss(output, target) + loss.backward() + optimizer.step() + if batch_idx % args.log_interval == 0: + print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format( + epoch, batch_idx * len(data), len(train_loader.dataset), + 100. * batch_idx / len(train_loader), loss.data)) + if batch_idx % args.save_interval == 0: + checkpoint_path = checkpoint_dir + \ + 'epoch%03d_iter%03d.pth' % (epoch, batch_idx) + torch.save(model.cpu().state_dict(), checkpoint_path) + if args.cuda: + model.cuda() + + +def test(epoch): + model.eval() + test_loss = 0 + correct = 0 + for data, target in test_loader: + if args.cuda: + data, target = data.cuda(), target.cuda() + data, target = Variable(data, volatile=True), Variable(target) + output = model(data) + test_loss += F.nll_loss(output, target).data + # get the index of the max log-probability + pred = output.data.max(1)[1] + correct += pred.eq(target.data).cpu().sum() + + test_loss = test_loss + # loss function already averages over batch size + test_loss /= len(test_loader) + accuracy = 100. * correct / len(test_loader.dataset) + print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.02f}%)\n'.format( + test_loss, correct, len(test_loader.dataset), accuracy, + )) + log_file.write('{:.02f}\n'.format(accuracy)) + log_file.flush() + os.fsync(log_file) + + +checkpoint_dir = 'checkpoint/%s_angle%d_grid%d/' % ( + args.model, args.angle, args.grid_size, +) +if not os.path.isdir(checkpoint_dir): + os.makedirs(checkpoint_dir) +if not os.path.isdir('accuracy_log'): + os.makedirs('accuracy_log') +log_file_path = 'accuracy_log/%s_angle%d_grid%d.txt' % ( + args.model, args.angle, args.grid_size, +) + +with open(log_file_path, 'w') as log_file: + for epoch in range(1, args.epochs + 1): + train(epoch) + test(epoch) diff --git a/models/models.py b/models/models.py new file mode 100644 index 0000000000000000000000000000000000000000..507144261ccd890685196f1c5f75378f2973c004 --- /dev/null +++ b/models/models.py @@ -0,0 +1,23 @@ +# Copyright (C) 2017 NVIDIA Corporation. All rights reserved. +# Licensed under the CC BY-NC-SA 4.0 license (https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode). +import torch +#import ipdb + + +def create_model(opt): + if opt.model == 'pix2pixHD': + from .pix2pixHD_model import Pix2PixHDModel, InferenceModel + if opt.isTrain: + model = Pix2PixHDModel() + # ipdb.set_trace() + else: + model = InferenceModel() + + model.initialize(opt) + if opt.verbose: + print("model [%s] was created" % (model.name())) + + if opt.isTrain and len(opt.gpu_ids): + model = torch.nn.DataParallel(model, device_ids=opt.gpu_ids) + + return model diff --git a/models/networks.py b/models/networks.py new file mode 100644 index 0000000000000000000000000000000000000000..d2c86bc137f372b289df75b6e9213ea4b6c6a98d --- /dev/null +++ b/models/networks.py @@ -0,0 +1,1776 @@ +import torch +import os +import torch.nn as nn +import functools +from torch.autograd import Variable +import numpy as np +import torch.nn.functional as F +import math +import torch +import itertools +import numpy as np +import torch.nn as nn +import torch.nn.functional as F +from grid_sample import grid_sample +from torch.autograd import Variable +from tps_grid_gen import TPSGridGen + + +############################################################################### +# Functions +############################################################################### +def weights_init(m): + classname = m.__class__.__name__ + if classname.find('Conv2d') != -1: + m.weight.data.normal_(0.0, 0.02) + elif classname.find('BatchNorm2d') != -1: + m.weight.data.normal_(1.0, 0.02) + m.bias.data.fill_(0) + + +def get_norm_layer(norm_type='instance'): + if norm_type == 'batch': + norm_layer = functools.partial(nn.BatchNorm2d, affine=True) + elif norm_type == 'instance': + norm_layer = functools.partial(nn.InstanceNorm2d, affine=False) + else: + raise NotImplementedError('normalization layer [%s] is not found' % norm_type) + return norm_layer + + +def define_G(input_nc, output_nc, ngf, netG, L=1, S=1, n_downsample_global=3, n_blocks_global=9, n_local_enhancers=1, + n_blocks_local=3, norm='instance', gpu_ids=[]): + norm_layer = get_norm_layer(norm_type=norm) + if netG == 'global': + netG = GlobalGenerator(input_nc, output_nc, L, S, ngf, n_downsample_global, n_blocks_global, norm_layer) + elif netG == 'local': + netG = LocalEnhancer(input_nc, output_nc, ngf, n_downsample_global, n_blocks_global, + n_local_enhancers, n_blocks_local, norm_layer) + else: + raise ('generator not implemented!') + print(netG) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + netG.cuda(gpu_ids[0]) + netG.apply(weights_init) + return netG + + +def define_Unet(input_nc, gpu_ids=[]): + netG = Unet(input_nc) + netG.cuda(gpu_ids[0]) + netG.apply(weights_init) + return netG + + +def define_UnetMask(input_nc, gpu_ids=[]): + netG = UnetMask(input_nc,output_nc=4) + netG.cuda(gpu_ids[0]) + netG.apply(weights_init) + return netG + +def define_Refine(input_nc, output_nc, gpu_ids=[]): + netG = Refine(input_nc, output_nc) + netG.cuda(gpu_ids[0]) + netG.apply(weights_init) + return netG + +#################################################### +def define_Refine_ResUnet(input_nc, output_nc, gpu_ids=[]): + #ipdb.set_trace() + netG = Refine_ResUnet_New(input_nc, output_nc) #norm_layer=nn.InstanceNorm2d + #ipdb.set_trace() + netG.cuda(gpu_ids[0]) + netG.apply(weights_init) + return netG +#################################################### + +def define_D(input_nc, ndf, n_layers_D, norm='instance', use_sigmoid=False, num_D=1, getIntermFeat=False, gpu_ids=[]): + norm_layer = get_norm_layer(norm_type=norm) + netD = MultiscaleDiscriminator(input_nc, ndf, n_layers_D, norm_layer, use_sigmoid, num_D, getIntermFeat) + print(netD) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + netD.cuda(gpu_ids[0]) + netD.apply(weights_init) + return netD + + +def define_VAE(input_nc, gpu_ids=[]): + netVAE = VAE(19, 32, 32, 1024) + print(netVAE) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + netVAE.cuda(gpu_ids[0]) + return netVAE + + +def define_B(input_nc, output_nc, ngf, n_downsample_global=3, n_blocks_global=3, norm='instance', gpu_ids=[]): + norm_layer = get_norm_layer(norm_type=norm) + netB = BlendGenerator(input_nc, output_nc, ngf, n_downsample_global, n_blocks_global, norm_layer) + print(netB) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + netB.cuda(gpu_ids[0]) + netB.apply(weights_init) + return netB + + +def define_partial_enc(input_nc, gpu_ids=[]): + net = PartialConvEncoder(input_nc) + print(net) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + net.cuda(gpu_ids[0]) + net.apply(weights_init) + return net + + +def define_conv_enc(input_nc, gpu_ids=[]): + net = ConvEncoder(input_nc) + print(net) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + net.cuda(gpu_ids[0]) + net.apply(weights_init) + return net + + +def define_AttG(output_nc, gpu_ids=[]): + net = AttGenerator(output_nc) + print(net) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + net.cuda(gpu_ids[0]) + net.apply(weights_init) + return net + + +def print_network(net): + if isinstance(net, list): + net = net[0] + num_params = 0 + for param in net.parameters(): + num_params += param.numel() + print(net) + print('Total number of parameters: %d' % num_params) + + +############################################################################## +# Losses +############################################################################## +class GANLoss(nn.Module): + def __init__(self, use_lsgan=True, target_real_label=1.0, target_fake_label=0.0, + tensor=torch.FloatTensor): + super(GANLoss, self).__init__() + self.real_label = target_real_label + self.fake_label = target_fake_label + self.real_label_var = None + self.fake_label_var = None + self.Tensor = tensor + if use_lsgan: + self.loss = nn.MSELoss() + else: + self.loss = nn.BCELoss() + + def get_target_tensor(self, input, target_is_real): + target_tensor = None + if target_is_real: + create_label = ((self.real_label_var is None) or + (self.real_label_var.numel() != input.numel())) + if create_label: + real_tensor = self.Tensor(input.size()).fill_(self.real_label) + self.real_label_var = Variable(real_tensor, requires_grad=False) + target_tensor = self.real_label_var + else: + create_label = ((self.fake_label_var is None) or + (self.fake_label_var.numel() != input.numel())) + if create_label: + fake_tensor = self.Tensor(input.size()).fill_(self.fake_label) + self.fake_label_var = Variable(fake_tensor, requires_grad=False) + target_tensor = self.fake_label_var + return target_tensor + + def __call__(self, input, target_is_real): + if isinstance(input[0], list): + loss = 0 + for input_i in input: + pred = input_i[-1] + target_tensor = self.get_target_tensor(pred, target_is_real) + loss += self.loss(pred, target_tensor) + return loss + else: + target_tensor = self.get_target_tensor(input[-1], target_is_real) + return self.loss(input[-1], target_tensor) + + +class VGGLossWarp(nn.Module): + def __init__(self, gpu_ids): + super(VGGLossWarp, self).__init__() + self.vgg = Vgg19().cuda() + self.criterion = nn.L1Loss() + self.weights = [1.0 / 32, 1.0 / 16, 1.0 / 8, 1.0 / 4, 1.0] + + def forward(self, x, y): + x_vgg, y_vgg = self.vgg(x), self.vgg(y) + loss = 0 + loss += self.weights[4] * self.criterion(x_vgg[4], y_vgg[4].detach()) + return loss + + +class VGGLoss(nn.Module): + def __init__(self, gpu_ids): + super(VGGLoss, self).__init__() + self.vgg = Vgg19().cuda() + self.criterion = nn.L1Loss() + self.weights = [1.0 / 32, 1.0 / 16, 1.0 / 8, 1.0 / 4, 1.0] + + def forward(self, x, y): + x_vgg, y_vgg = self.vgg(x), self.vgg(y) + loss = 0 + for i in range(len(x_vgg)): + loss += self.weights[i] * self.criterion(x_vgg[i], y_vgg[i].detach()) + return loss + + def warp(self, x, y): + x_vgg, y_vgg = self.vgg(x), self.vgg(y) + loss = 0 + loss += self.weights[4] * self.criterion(x_vgg[4], y_vgg[4].detach()) + return loss + + +class StyleLoss(nn.Module): + def __init__(self, gpu_ids): + super(StyleLoss, self).__init__() + self.vgg = Vgg19().cuda() + self.weights = [1.0 / 32, 1.0 / 16, 1.0 / 8, 1.0 / 4, 1.0] + + def forward(self, x, y): + x_vgg, y_vgg = self.vgg(x), self.vgg(y) + loss = 0 + for i in range(len(x_vgg)): + N, C, H, W = x_vgg[i].shape + for n in range(N): + phi_x = x_vgg[i][n] + phi_y = y_vgg[i][n] + phi_x = phi_x.reshape(C, H * W) + phi_y = phi_y.reshape(C, H * W) + G_x = torch.matmul(phi_x, phi_x.t()) / (C * H * W) + G_y = torch.matmul(phi_y, phi_y.t()) / (C * H * W) + loss += torch.sqrt(torch.mean((G_x - G_y) ** 2)) * self.weights[i] + return loss + + +############################################################################## +# Generator +############################################################################## + +class PartialConvEncoder(nn.Module): + def __init__(self, input_nc, ngf=32, norm_layer=nn.BatchNorm2d): + super(PartialConvEncoder, self).__init__() + activation = nn.ReLU(True) + self.pad1 = nn.ReflectionPad2d(3) + self.partial_conv1 = PartialConv(input_nc, ngf, kernel_size=7) + self.norm_layer1 = norm_layer(ngf) + self.activation = activation + ##down sample + mult = 2 ** 0 + self.down1 = PartialConv(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1) + self.norm_layer2 = norm_layer(ngf * mult * 2) + mult = 2 ** 1 + self.down2 = PartialConv(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1) + self.norm_layer3 = norm_layer(ngf * mult * 2) + + mult = 2 ** 2 + self.down3 = PartialConv(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1) + self.norm_layer4 = norm_layer(ngf * mult * 2) + + mult = 2 ** 3 + self.down4 = PartialConv(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1) + self.norm_layer5 = norm_layer(ngf * mult * 2) + + def forward(self, input, mask): + input = self.pad1(input) + mask = self.pad1(mask) + input, mask = self.partial_conv1(input, mask) + input = self.norm_layer1(input) + input = self.activation(input) + + input, mask = self.down1(input, mask) + input = self.norm_layer2(input) + input = self.activation(input) + input, mask = self.down2(input, mask) + input = self.norm_layer3(input) + input = self.activation(input) + input, mask = self.down3(input, mask) + input = self.norm_layer4(input) + input = self.activation(input) + input, mask = self.down4(input, mask) + input = self.norm_layer5(input) + input = self.activation(input) + return input + + +class ConvEncoder(nn.Module): + def __init__(self, input_nc, ngf=32, n_downsampling=4, n_blocks=4, norm_layer=nn.BatchNorm2d, + padding_type='reflect'): + super(ConvEncoder, self).__init__() + activation = nn.ReLU(True) + # print("input_nc",input_nc) + model = [nn.ReflectionPad2d(3), nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0), norm_layer(ngf), activation] + ### downsample + for i in range(n_downsampling): + stride = 2 + + mult = 2 ** i + model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=stride, padding=1), + norm_layer(ngf * mult * 2), activation] + self.model = nn.Sequential(*model) + + def forward(self, input): + return self.model(input) + + +class AttGenerator(nn.Module): + def __init__(self, output_nc, ngf=32, n_blocks=4, n_downsampling=4, padding_type='reflect'): + super(AttGenerator, self).__init__() + mult = 2 ** n_downsampling + model = [] + for i in range(n_blocks): + model += [ResnetBlock(ngf * mult * 2, norm_type='in', padding_type=padding_type)] + + self.model = nn.Sequential(*model) + self.upsampling = [] + self.out_channels = [] + self.AttNorm = [] + ##upsampling + norm_layer = nn.BatchNorm2d + activation = nn.ReLU(True) + + for i in range(n_downsampling): + mult = 2 ** (n_downsampling - i) + up_module = [nn.ConvTranspose2d(ngf * mult * 2, int(ngf * mult / 2) * 2, kernel_size=3, stride=2, padding=1, + output_padding=1), + norm_layer(int(ngf * mult / 2) * 2), activation + ] + up_module = nn.Sequential(*up_module) + self.upsampling += [up_module] + self.out_channels += [int(ngf * mult / 2) * 2] + self.upsampling = nn.Sequential(*self.upsampling) + + # + self.AttNorm += [AttentionNorm(5, self.out_channels[0], 2, 4)] + self.AttNorm += [AttentionNorm(5, self.out_channels[1], 2, 2)] + self.AttNorm += [AttentionNorm(5, self.out_channels[2], 1, 2)] + self.AttNorm += [AttentionNorm(5, self.out_channels[3], 1, 1)] + self.AttNorm = nn.Sequential(*self.AttNorm) + self.last_conv = [nn.ReflectionPad2d(3), nn.Conv2d(ngf * 2, output_nc, kernel_size=7, padding=0), nn.Tanh()] + self.last_conv = nn.Sequential(*self.last_conv) + + def forward(self, input, unattended): + up = self.model(unattended) + for i in range(4): + # print(i) + up = self.upsampling[i](up) + if i == 3: + break; + up = self.AttNorm[i](input, up) + return self.last_conv(up) + + +class PartialConv(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride=1, + padding=0, dilation=1, groups=1, bias=True): + super(PartialConv, self).__init__() + self.input_conv = nn.Conv2d(in_channels, out_channels, kernel_size, + stride, padding, dilation, groups, bias) + self.mask_conv = nn.Conv2d(in_channels, out_channels, kernel_size, + stride, padding, dilation, groups, False) + self.input_conv.apply(weights_init) + + torch.nn.init.constant_(self.mask_conv.weight, 1.0) + + # mask is not updated + for param in self.mask_conv.parameters(): + param.requires_grad = False + + def forward(self, input, mask): + # http://masc.cs.gmu.edu/wiki/partialconv + # C(X) = W^T * X + b, C(0) = b, D(M) = 1 * M + 0 = sum(M) + # W^T* (M .* X) / sum(M) + b = [C(M .* X) – C(0)] / D(M) + C(0) + + output = self.input_conv(input * mask) + if self.input_conv.bias is not None: + output_bias = self.input_conv.bias.view(1, -1, 1, 1).expand_as( + output) + else: + output_bias = torch.zeros_like(output) + + with torch.no_grad(): + output_mask = self.mask_conv(mask) + + no_update_holes = output_mask == 0 + mask_sum = output_mask.masked_fill_(no_update_holes, 1.0) + + output_pre = (output - output_bias) / mask_sum + output_bias + output = output_pre.masked_fill_(no_update_holes, 0.0) + + new_mask = torch.ones_like(output) + new_mask = new_mask.masked_fill_(no_update_holes, 0.0) + + return output, new_mask + + +class AttentionNorm(nn.Module): + def __init__(self, ref_channels, out_channels, first_rate, second_rate): + super(AttentionNorm, self).__init__() + self.first = first_rate + self.second = second_rate + mid_channels = int(out_channels / 2) + self.conv_1time_f = nn.Conv2d(ref_channels, mid_channels, kernel_size=3, stride=1, padding=1) + self.conv_2times_f = nn.Conv2d(ref_channels, mid_channels, kernel_size=3, stride=2, padding=1) + self.conv_4times_f = nn.Conv2d(ref_channels, mid_channels, kernel_size=3, stride=4, padding=1) + + self.conv_1time_s = nn.Conv2d(mid_channels, out_channels, kernel_size=3, stride=1, padding=1) + self.conv_2times_s = nn.Conv2d(mid_channels, out_channels, kernel_size=3, stride=2, padding=1) + self.conv_4times_s = nn.Conv2d(mid_channels, out_channels, kernel_size=3, stride=4, padding=1) + + self.conv_1time_m = nn.Conv2d(mid_channels, out_channels, kernel_size=3, stride=1, padding=1) + self.conv_2times_m = nn.Conv2d(mid_channels, out_channels, kernel_size=3, stride=2, padding=1) + self.conv_4times_m = nn.Conv2d(mid_channels, out_channels, kernel_size=3, stride=4, padding=1) + self.norm = nn.BatchNorm2d(out_channels) + self.conv = nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=1, padding=1) + + def forward(self, input, unattended): + # attention weights + # print(input.shape,unattended.shape) + if self.first == 1: + input = self.conv_1time_f(input) + elif self.first == 2: + input = self.conv_2times_f(input) + elif self.first == 4: + input = self.conv_4times_f(input) + mask = None + if self.second == 1: + bias = self.conv_1time_s(input) + mask = self.conv_1time_m(input) + elif self.second == 2: + bias = self.conv_2times_s(input) + mask = self.conv_2times_m(input) + elif self.second == 4: + bias = self.conv_4times_s(input) + mask = self.conv_4times_m(input) + mask = torch.sigmoid(mask) + attended = self.norm(unattended) + # print(attended.shape,mask.shape,bias.shape) + attended = attended * mask + bias + attended = torch.relu(attended) + attended = self.conv(attended) + output = attended + unattended + return output +class UnetMask(nn.Module): + def __init__(self, input_nc, output_nc=3): + super(UnetMask, self).__init__() + self.stn = STNNet() + nl = nn.InstanceNorm2d + self.conv1 = nn.Sequential(*[nn.Conv2d(input_nc, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU()]) + self.pool1 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv2 = nn.Sequential(*[nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU(), + nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU()]) + self.pool2 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv3 = nn.Sequential(*[nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU(), + nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU()]) + self.pool3 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv4 = nn.Sequential(*[nn.Conv2d(256, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU(), + nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU()]) + self.drop4 = nn.Dropout(0.5) + self.pool4 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv5 = nn.Sequential(*[nn.Conv2d(512, 1024, kernel_size=3, stride=1, padding=1), nl(1024), nn.ReLU(), + nn.Conv2d(1024, 1024, kernel_size=3, stride=1, padding=1), nl(1024), nn.ReLU()]) + self.drop5 = nn.Dropout(0.5) + + self.up6 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), nl(512), + nn.ReLU()]) + + self.conv6 = nn.Sequential(*[nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU(), + nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU()]) + self.up7 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nl(256), + nn.ReLU()]) + self.conv7 = nn.Sequential(*[nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU(), + nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU()]) + + self.up8 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nl(128), + nn.ReLU()]) + + self.conv8 = nn.Sequential(*[nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU(), + nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU()]) + + self.up9 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nl(64), + nn.ReLU()]) + + self.conv9 = nn.Sequential(*[nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, output_nc, kernel_size=3, stride=1, padding=1) + ]) + + def forward(self, input, refer, mask,grid): + + + input, warped_mask,rx,ry,cx,cy,grid = self.stn(input, torch.cat([mask, refer, input], 1), mask,grid) + # print(input.shape) + + + conv1 = self.conv1(torch.cat([refer.detach(), input.detach()], 1)) + pool1 = self.pool1(conv1) + + conv2 = self.conv2(pool1) + pool2 = self.pool2(conv2) + + conv3 = self.conv3(pool2) + pool3 = self.pool3(conv3) + + conv4 = self.conv4(pool3) + drop4 = self.drop4(conv4) + pool4 = self.pool4(drop4) + + conv5 = self.conv5(pool4) + drop5 = self.drop5(conv5) + + up6 = self.up6(drop5) + conv6 = self.conv6(torch.cat([drop4, up6], 1)) + + up7 = self.up7(conv6) + conv7 = self.conv7(torch.cat([conv3, up7], 1)) + + up8 = self.up8(conv7) + conv8 = self.conv8(torch.cat([conv2, up8], 1)) + + up9 = self.up9(conv8) + conv9 = self.conv9(torch.cat([conv1, up9], 1)) + return conv9, input, warped_mask,grid + +class Unet(nn.Module): + def __init__(self, input_nc, output_nc=3): + super(Unet, self).__init__() + self.stn = STNNet() + nl = nn.InstanceNorm2d + self.conv1 = nn.Sequential(*[nn.Conv2d(input_nc, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU()]) + self.pool1 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv2 = nn.Sequential(*[nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU(), + nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU()]) + self.pool2 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv3 = nn.Sequential(*[nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU(), + nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU()]) + self.pool3 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv4 = nn.Sequential(*[nn.Conv2d(256, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU(), + nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU()]) + self.drop4 = nn.Dropout(0.5) + self.pool4 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv5 = nn.Sequential(*[nn.Conv2d(512, 1024, kernel_size=3, stride=1, padding=1), nl(1024), nn.ReLU(), + nn.Conv2d(1024, 1024, kernel_size=3, stride=1, padding=1), nl(1024), nn.ReLU()]) + self.drop5 = nn.Dropout(0.5) + + self.up6 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), nl(512), + nn.ReLU()]) + + self.conv6 = nn.Sequential(*[nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU(), + nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU()]) + self.up7 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nl(256), + nn.ReLU()]) + self.conv7 = nn.Sequential(*[nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU(), + nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU()]) + + self.up8 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nl(128), + nn.ReLU()]) + + self.conv8 = nn.Sequential(*[nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU(), + nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU()]) + + self.up9 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nl(64), + nn.ReLU()]) + + self.conv9 = nn.Sequential(*[nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, output_nc, kernel_size=3, stride=1, padding=1) + ]) + + def forward(self, input, refer, mask): + input, warped_mask,rx,ry,cx,cy = self.stn(input, torch.cat([mask, refer, input], 1), mask) + # print(input.shape) + + conv1 = self.conv1(torch.cat([refer.detach(), input.detach()], 1)) + pool1 = self.pool1(conv1) + + conv2 = self.conv2(pool1) + pool2 = self.pool2(conv2) + + conv3 = self.conv3(pool2) + pool3 = self.pool3(conv3) + + conv4 = self.conv4(pool3) + drop4 = self.drop4(conv4) + pool4 = self.pool4(drop4) + + conv5 = self.conv5(pool4) + drop5 = self.drop5(conv5) + + up6 = self.up6(drop5) + conv6 = self.conv6(torch.cat([drop4, up6], 1)) + + up7 = self.up7(conv6) + conv7 = self.conv7(torch.cat([conv3, up7], 1)) + + up8 = self.up8(conv7) + conv8 = self.conv8(torch.cat([conv2, up8], 1)) + + up9 = self.up9(conv8) + conv9 = self.conv9(torch.cat([conv1, up9], 1)) + return conv9, input, warped_mask,rx,ry,cx,cy + + def refine(self, input): + conv1 = self.conv1(input) + pool1 = self.pool1(conv1) + + conv2 = self.conv2(pool1) + pool2 = self.pool2(conv2) + + conv3 = self.conv3(pool2) + pool3 = self.pool3(conv3) + + conv4 = self.conv4(pool3) + drop4 = self.drop4(conv4) + pool4 = self.pool4(drop4) + + conv5 = self.conv5(pool4) + drop5 = self.drop5(conv5) + + up6 = self.up6(drop5) + conv6 = self.conv6(torch.cat([drop4, up6], 1)) + + up7 = self.up7(conv6) + conv7 = self.conv7(torch.cat([conv3, up7], 1)) + + up8 = self.up8(conv7) + conv8 = self.conv8(torch.cat([conv2, up8], 1)) + + up9 = self.up9(conv8) + conv9 = self.conv9(torch.cat([conv1, up9], 1)) + return conv9 + + +class Refine(nn.Module): + def __init__(self, input_nc, output_nc=3): + super(Refine, self).__init__() + nl = nn.InstanceNorm2d + self.conv1 = nn.Sequential(*[nn.Conv2d(input_nc, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU()]) + self.pool1 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv2 = nn.Sequential(*[nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU(), + nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU()]) + self.pool2 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv3 = nn.Sequential(*[nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU(), + nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU()]) + self.pool3 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv4 = nn.Sequential(*[nn.Conv2d(256, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU(), + nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU()]) + self.drop4 = nn.Dropout(0.5) + self.pool4 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv5 = nn.Sequential(*[nn.Conv2d(512, 1024, kernel_size=3, stride=1, padding=1), nl(1024), nn.ReLU(), + nn.Conv2d(1024, 1024, kernel_size=3, stride=1, padding=1), nl(1024), nn.ReLU()]) + self.drop5 = nn.Dropout(0.5) + + self.up6 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), nl(512), + nn.ReLU()]) + + self.conv6 = nn.Sequential(*[nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU(), + nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU()]) + self.up7 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nl(256), + nn.ReLU()]) + self.conv7 = nn.Sequential(*[nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU(), + nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU()]) + + self.up8 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nl(128), + nn.ReLU()]) + + self.conv8 = nn.Sequential(*[nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU(), + nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU()]) + + self.up9 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nl(64), + nn.ReLU()]) + + self.conv9 = nn.Sequential(*[nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, output_nc, kernel_size=3, stride=1, padding=1) + ]) + + def refine(self, input): + conv1 = self.conv1(input) + pool1 = self.pool1(conv1) + + conv2 = self.conv2(pool1) + pool2 = self.pool2(conv2) + + conv3 = self.conv3(pool2) + pool3 = self.pool3(conv3) + + conv4 = self.conv4(pool3) + drop4 = self.drop4(conv4) + pool4 = self.pool4(drop4) + + conv5 = self.conv5(pool4) + drop5 = self.drop5(conv5) + + up6 = self.up6(drop5) + conv6 = self.conv6(torch.cat([drop4, up6], 1)) + + up7 = self.up7(conv6) + conv7 = self.conv7(torch.cat([conv3, up7], 1)) + + up8 = self.up8(conv7) + conv8 = self.conv8(torch.cat([conv2, up8], 1)) + + up9 = self.up9(conv8) + conv9 = self.conv9(torch.cat([conv1, up9], 1)) + return conv9 + + +###### ResUnet new +class ResidualBlock(nn.Module): + def __init__(self, in_features=64, norm_layer=nn.BatchNorm2d): + super(ResidualBlock, self).__init__() + self.relu = nn.ReLU(True) + if norm_layer == None: + self.block = nn.Sequential( + nn.Conv2d(in_features, in_features, 3, 1, 1, bias=False), + nn.ReLU(inplace=True), + nn.Conv2d(in_features, in_features, 3, 1, 1, bias=False), + ) + else: + self.block = nn.Sequential( + nn.Conv2d(in_features, in_features, 3, 1, 1, bias=False), + norm_layer(in_features), + nn.ReLU(inplace=True), + nn.Conv2d(in_features, in_features, 3, 1, 1, bias=False), + norm_layer(in_features) + ) + + def forward(self, x): + residual = x + out = self.block(x) + out += residual + out = self.relu(out) + return out + + +class Refine_ResUnet_New(nn.Module): + def __init__(self, input_nc, output_nc, num_downs=5, ngf=32, + norm_layer=nn.BatchNorm2d, use_dropout=False): + super(Refine_ResUnet_New, self).__init__() + # construct unet structure + unet_block = ResUnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + + for i in range(num_downs - 5): + unet_block = ResUnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = ResUnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = ResUnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = ResUnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = ResUnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + + self.model = unet_block + + def refine(self, input): + return self.model(input) + + +# Defines the submodule with skip connection. +# X -------------------identity---------------------- X +# |-- downsampling -- |submodule| -- upsampling --| +class ResUnetSkipConnectionBlock(nn.Module): + def __init__(self, outer_nc, inner_nc, input_nc=None, + submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(ResUnetSkipConnectionBlock, self).__init__() + self.outermost = outermost + use_bias = norm_layer == nn.InstanceNorm2d + + if input_nc is None: + input_nc = outer_nc + downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=3, + stride=2, padding=1, bias=use_bias) + # add two resblock + res_downconv = [ResidualBlock(inner_nc, norm_layer), ResidualBlock(inner_nc, norm_layer)] + res_upconv = [ResidualBlock(outer_nc, norm_layer), ResidualBlock(outer_nc, norm_layer)] + + downrelu = nn.ReLU(True) + uprelu = nn.ReLU(True) + if norm_layer != None: + downnorm = norm_layer(inner_nc) + upnorm = norm_layer(outer_nc) + + if outermost: + upsample = nn.Upsample(scale_factor=2, mode='nearest') + upconv = nn.Conv2d(inner_nc * 2, outer_nc, kernel_size=3, stride=1, padding=1, bias=use_bias) + down = [downconv, downrelu] + res_downconv + up = [upsample, upconv] + model = down + [submodule] + up + elif innermost: + upsample = nn.Upsample(scale_factor=2, mode='nearest') + upconv = nn.Conv2d(inner_nc, outer_nc, kernel_size=3, stride=1, padding=1, bias=use_bias) + down = [downconv, downrelu] + res_downconv + if norm_layer == None: + up = [upsample, upconv, uprelu] + res_upconv + else: + up = [upsample, upconv, upnorm, uprelu] + res_upconv + model = down + up + else: + upsample = nn.Upsample(scale_factor=2, mode='nearest') + upconv = nn.Conv2d(inner_nc*2, outer_nc, kernel_size=3, stride=1, padding=1, bias=use_bias) + if norm_layer == None: + down = [downconv, downrelu] + res_downconv + up = [upsample, upconv, uprelu] + res_upconv + else: + down = [downconv, downnorm, downrelu] + res_downconv + up = [upsample, upconv, upnorm, uprelu] + res_upconv + + if use_dropout: + model = down + [submodule] + up + [nn.Dropout(0.5)] + else: + model = down + [submodule] + up + + self.model = nn.Sequential(*model) + + def forward(self, x): + if self.outermost: + return self.model(x) + else: + return torch.cat([x, self.model(x)], 1) +################## + + +class GlobalGenerator(nn.Module): + def __init__(self, input_nc, output_nc, L, S, ngf=64, n_downsampling=3, n_blocks=9, norm_layer=nn.BatchNorm2d, + padding_type='reflect'): + assert (n_blocks >= 0) + super(GlobalGenerator, self).__init__() + activation = nn.ReLU(True) + + model = [nn.ReflectionPad2d(3), nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0), norm_layer(ngf), activation] + ### downsample + for i in range(n_downsampling): + mult = 2 ** i + model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1), + norm_layer(ngf * mult * 2), activation] + + ### resnet blocks + mult = 2 ** n_downsampling + for i in range(n_blocks): + model += [ResnetBlock(ngf * mult, norm_type='adain', padding_type=padding_type)] + ### upsample + for i in range(n_downsampling): + mult = 2 ** (n_downsampling - i) + model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2), kernel_size=3, stride=2, padding=1, + output_padding=1), + norm_layer(int(ngf * mult / 2)), activation] + model += [nn.ReflectionPad2d(3), nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)] + self.model = nn.Sequential(*model) + + # style encoder + self.enc_style = StyleEncoder(5, S, 16, self.get_num_adain_params(self.model), norm='none', activ='relu', + pad_type='reflect') + # label encoder + self.enc_label = LabelEncoder(5, L, 16, 64, norm='none', activ='relu', pad_type='reflect') + + def assign_adain_params(self, adain_params, model): + # assign the adain_params to the AdaIN layers in model + for m in model.modules(): + if m.__class__.__name__ == "AdaptiveInstanceNorm2d": + mean = adain_params[:, :m.num_features] + std = adain_params[:, m.num_features:2 * m.num_features] + m.bias = mean.contiguous().view(-1) + m.weight = std.contiguous().view(-1) + if adain_params.size(1) > 2 * m.num_features: + adain_params = adain_params[:, 2 * m.num_features:] + + def get_num_adain_params(self, model): + # return the number of AdaIN parameters needed by the model + num_adain_params = 0 + for m in model.modules(): + if m.__class__.__name__ == "AdaptiveInstanceNorm2d": + num_adain_params += 2 * m.num_features + return num_adain_params + + def forward(self, input, input_ref, image_ref): + fea1, fea2 = self.enc_label(input_ref) + adain_params = self.enc_style((image_ref, fea1, fea2)) + self.assign_adain_params(adain_params, self.model) + return self.model(input) + + +class BlendGenerator(nn.Module): + def __init__(self, input_nc, output_nc, ngf=64, n_downsampling=3, n_blocks=3, norm_layer=nn.BatchNorm2d, + padding_type='reflect'): + assert (n_blocks >= 0) + super(BlendGenerator, self).__init__() + activation = nn.ReLU(True) + + model = [nn.ReflectionPad2d(3), nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0), norm_layer(ngf), activation] + ### downsample + for i in range(n_downsampling): + mult = 2 ** i + model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1), + norm_layer(ngf * mult * 2), activation] + + ### resnet blocks + mult = 2 ** n_downsampling + for i in range(n_blocks): + model += [ResnetBlock(ngf * mult, norm_type='in', padding_type=padding_type)] + + ### upsample + for i in range(n_downsampling): + mult = 2 ** (n_downsampling - i) + model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2), kernel_size=3, stride=2, padding=1, + output_padding=1), + norm_layer(int(ngf * mult / 2)), activation] + model += [nn.ReflectionPad2d(3), nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0), nn.Sigmoid()] + self.model = nn.Sequential(*model) + + def forward(self, input1, input2): + m = self.model(torch.cat([input1, input2], 1)) + return input1 * m + input2 * (1 - m), m + + # Define the Multiscale Discriminator. + + +class MultiscaleDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d, + use_sigmoid=False, num_D=3, getIntermFeat=False): + super(MultiscaleDiscriminator, self).__init__() + self.num_D = num_D + self.n_layers = n_layers + self.getIntermFeat = getIntermFeat + + for i in range(num_D): + netD = NLayerDiscriminator(input_nc, ndf, n_layers, norm_layer, use_sigmoid, getIntermFeat) + if getIntermFeat: + for j in range(n_layers + 2): + setattr(self, 'scale' + str(i) + '_layer' + str(j), getattr(netD, 'model' + str(j))) + else: + setattr(self, 'layer' + str(i), netD.model) + + self.downsample = nn.AvgPool2d(3, stride=2, padding=[1, 1], count_include_pad=False) + + def singleD_forward(self, model, input): + if self.getIntermFeat: + result = [input] + for i in range(len(model)): + result.append(model[i](result[-1])) + return result[1:] + else: + return [model(input)] + + def forward(self, input): + num_D = self.num_D + result = [] + input_downsampled = input + for i in range(num_D): + if self.getIntermFeat: + model = [getattr(self, 'scale' + str(num_D - 1 - i) + '_layer' + str(j)) for j in + range(self.n_layers + 2)] + else: + model = getattr(self, 'layer' + str(num_D - 1 - i)) + result.append(self.singleD_forward(model, input_downsampled)) + if i != (num_D - 1): + input_downsampled = self.downsample(input_downsampled) + return result + + +# Define the PatchGAN discriminator with the specified arguments. +class NLayerDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d, use_sigmoid=False, getIntermFeat=False): + super(NLayerDiscriminator, self).__init__() + self.getIntermFeat = getIntermFeat + self.n_layers = n_layers + + kw = 4 + padw = int(np.ceil((kw - 1.0) / 2)) + sequence = [[nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), nn.LeakyReLU(0.2, True)]] + + nf = ndf + for n in range(1, n_layers): + nf_prev = nf + nf = min(nf * 2, 512) + sequence += [[ + nn.Conv2d(nf_prev, nf, kernel_size=kw, stride=2, padding=padw), + norm_layer(nf), nn.LeakyReLU(0.2, True) + ]] + + nf_prev = nf + nf = min(nf * 2, 512) + sequence += [[ + nn.Conv2d(nf_prev, nf, kernel_size=kw, stride=1, padding=padw), + norm_layer(nf), + nn.LeakyReLU(0.2, True) + ]] + + sequence += [[nn.Conv2d(nf, 1, kernel_size=kw, stride=1, padding=padw)]] + + if use_sigmoid: + sequence += [[nn.Sigmoid()]] + + if getIntermFeat: + for n in range(len(sequence)): + setattr(self, 'model' + str(n), nn.Sequential(*sequence[n])) + else: + sequence_stream = [] + for n in range(len(sequence)): + sequence_stream += sequence[n] + self.model = nn.Sequential(*sequence_stream) + + def forward(self, input): + if self.getIntermFeat: + res = [input] + for n in range(self.n_layers + 2): + model = getattr(self, 'model' + str(n)) + res.append(model(res[-1])) + return res[1:] + else: + return self.model(input) + + +from torchvision import models + + +class Vgg19(torch.nn.Module): + def __init__(self, requires_grad=False): + super(Vgg19, self).__init__() + vgg = models.vgg19(pretrained=False) + vgg_pretrained_features = vgg.features + self.vgg = vgg + self.slice1 = torch.nn.Sequential() + self.slice2 = torch.nn.Sequential() + self.slice3 = torch.nn.Sequential() + self.slice4 = torch.nn.Sequential() + self.slice5 = torch.nn.Sequential() + for x in range(2): + self.slice1.add_module(str(x), vgg_pretrained_features[x]) + for x in range(2, 7): + self.slice2.add_module(str(x), vgg_pretrained_features[x]) + for x in range(7, 12): + self.slice3.add_module(str(x), vgg_pretrained_features[x]) + for x in range(12, 21): + self.slice4.add_module(str(x), vgg_pretrained_features[x]) + for x in range(21, 30): + self.slice5.add_module(str(x), vgg_pretrained_features[x]) + if not requires_grad: + for param in self.parameters(): + param.requires_grad = False + + def forward(self, X): + h_relu1 = self.slice1(X) + h_relu2 = self.slice2(h_relu1) + h_relu3 = self.slice3(h_relu2) + h_relu4 = self.slice4(h_relu3) + h_relu5 = self.slice5(h_relu4) + out = [h_relu1, h_relu2, h_relu3, h_relu4, h_relu5] + return out + + def extract(self, x): + x = self.vgg.features(x) + x = self.vgg.avgpool(x) + return x + + +# Define the MaskVAE +class VAE(nn.Module): + def __init__(self, nc, ngf, ndf, latent_variable_size): + super(VAE, self).__init__() + # self.cuda = True + self.nc = nc + self.ngf = ngf + self.ndf = ndf + self.latent_variable_size = latent_variable_size + + # encoder + self.e1 = nn.Conv2d(nc, ndf, 4, 2, 1) + self.bn1 = nn.BatchNorm2d(ndf) + + self.e2 = nn.Conv2d(ndf, ndf * 2, 4, 2, 1) + self.bn2 = nn.BatchNorm2d(ndf * 2) + + self.e3 = nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1) + self.bn3 = nn.BatchNorm2d(ndf * 4) + + self.e4 = nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1) + self.bn4 = nn.BatchNorm2d(ndf * 8) + + self.e5 = nn.Conv2d(ndf * 8, ndf * 16, 4, 2, 1) + self.bn5 = nn.BatchNorm2d(ndf * 16) + + self.e6 = nn.Conv2d(ndf * 16, ndf * 32, 4, 2, 1) + self.bn6 = nn.BatchNorm2d(ndf * 32) + + self.e7 = nn.Conv2d(ndf * 32, ndf * 64, 4, 2, 1) + self.bn7 = nn.BatchNorm2d(ndf * 64) + + self.fc1 = nn.Linear(ndf * 64 * 4 * 4, latent_variable_size) + self.fc2 = nn.Linear(ndf * 64 * 4 * 4, latent_variable_size) + + # decoder + self.d1 = nn.Linear(latent_variable_size, ngf * 64 * 4 * 4) + + self.up1 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd1 = nn.ReplicationPad2d(1) + self.d2 = nn.Conv2d(ngf * 64, ngf * 32, 3, 1) + self.bn8 = nn.BatchNorm2d(ngf * 32, 1.e-3) + + self.up2 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd2 = nn.ReplicationPad2d(1) + self.d3 = nn.Conv2d(ngf * 32, ngf * 16, 3, 1) + self.bn9 = nn.BatchNorm2d(ngf * 16, 1.e-3) + + self.up3 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd3 = nn.ReplicationPad2d(1) + self.d4 = nn.Conv2d(ngf * 16, ngf * 8, 3, 1) + self.bn10 = nn.BatchNorm2d(ngf * 8, 1.e-3) + + self.up4 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd4 = nn.ReplicationPad2d(1) + self.d5 = nn.Conv2d(ngf * 8, ngf * 4, 3, 1) + self.bn11 = nn.BatchNorm2d(ngf * 4, 1.e-3) + + self.up5 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd5 = nn.ReplicationPad2d(1) + self.d6 = nn.Conv2d(ngf * 4, ngf * 2, 3, 1) + self.bn12 = nn.BatchNorm2d(ngf * 2, 1.e-3) + + self.up6 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd6 = nn.ReplicationPad2d(1) + self.d7 = nn.Conv2d(ngf * 2, ngf, 3, 1) + self.bn13 = nn.BatchNorm2d(ngf, 1.e-3) + + self.up7 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd7 = nn.ReplicationPad2d(1) + self.d8 = nn.Conv2d(ngf, nc, 3, 1) + + self.leakyrelu = nn.LeakyReLU(0.2) + self.relu = nn.ReLU() + # self.sigmoid = nn.Sigmoid() + self.maxpool = nn.MaxPool2d((2, 2), (2, 2)) + + def encode(self, x): + h1 = self.leakyrelu(self.bn1(self.e1(x))) + h2 = self.leakyrelu(self.bn2(self.e2(h1))) + h3 = self.leakyrelu(self.bn3(self.e3(h2))) + h4 = self.leakyrelu(self.bn4(self.e4(h3))) + h5 = self.leakyrelu(self.bn5(self.e5(h4))) + h6 = self.leakyrelu(self.bn6(self.e6(h5))) + h7 = self.leakyrelu(self.bn7(self.e7(h6))) + h7 = h7.view(-1, self.ndf * 64 * 4 * 4) + return self.fc1(h7), self.fc2(h7) + + def reparametrize(self, mu, logvar): + std = logvar.mul(0.5).exp_() + # if self.cuda: + eps = torch.cuda.FloatTensor(std.size()).normal_() + # else: + # eps = torch.FloatTensor(std.size()).normal_() + eps = Variable(eps) + return eps.mul(std).add_(mu) + + def decode(self, z): + h1 = self.relu(self.d1(z)) + h1 = h1.view(-1, self.ngf * 64, 4, 4) + h2 = self.leakyrelu(self.bn8(self.d2(self.pd1(self.up1(h1))))) + h3 = self.leakyrelu(self.bn9(self.d3(self.pd2(self.up2(h2))))) + h4 = self.leakyrelu(self.bn10(self.d4(self.pd3(self.up3(h3))))) + h5 = self.leakyrelu(self.bn11(self.d5(self.pd4(self.up4(h4))))) + h6 = self.leakyrelu(self.bn12(self.d6(self.pd5(self.up5(h5))))) + h7 = self.leakyrelu(self.bn13(self.d7(self.pd6(self.up6(h6))))) + return self.d8(self.pd7(self.up7(h7))) + + def get_latent_var(self, x): + mu, logvar = self.encode(x) + z = self.reparametrize(mu, logvar) + return z, mu, logvar.mul(0.5).exp_() + + def forward(self, x): + mu, logvar = self.encode(x) + z = self.reparametrize(mu, logvar) + res = self.decode(z) + + return res, x, mu, logvar + + +# style encode part +class StyleEncoder(nn.Module): + def __init__(self, n_downsample, input_dim, dim, style_dim, norm, activ, pad_type): + super(StyleEncoder, self).__init__() + self.model = [] + self.model_middle = [] + self.model_last = [] + self.model += [ConvBlock(input_dim, dim, 7, 1, 3, norm=norm, activation=activ, pad_type=pad_type)] + for i in range(2): + self.model += [ConvBlock(dim, 2 * dim, 4, 2, 1, norm=norm, activation=activ, pad_type=pad_type)] + dim *= 2 + for i in range(n_downsample - 2): + self.model_middle += [ConvBlock(dim, dim, 4, 2, 1, norm=norm, activation=activ, pad_type=pad_type)] + self.model_last += [nn.AdaptiveAvgPool2d(1)] # global average pooling + self.model_last += [nn.Conv2d(dim, style_dim, 1, 1, 0)] + + self.model = nn.Sequential(*self.model) + self.model_middle = nn.Sequential(*self.model_middle) + self.model_last = nn.Sequential(*self.model_last) + + self.output_dim = dim + + self.sft1 = SFTLayer() + self.sft2 = SFTLayer() + + def forward(self, x): + fea = self.model(x[0]) + fea = self.sft1((fea, x[1])) + fea = self.model_middle(fea) + fea = self.sft2((fea, x[2])) + return self.model_last(fea) + + +# label encode part +class LabelEncoder(nn.Module): + def __init__(self, n_downsample, input_dim, dim, style_dim, norm, activ, pad_type): + super(LabelEncoder, self).__init__() + self.model = [] + self.model_last = [nn.ReLU()] + self.model += [ConvBlock(input_dim, dim, 7, 1, 3, norm=norm, activation=activ, pad_type=pad_type)] + self.model += [ConvBlock(dim, 2 * dim, 4, 2, 1, norm=norm, activation=activ, pad_type=pad_type)] + dim *= 2 + self.model += [ConvBlock(dim, 2 * dim, 4, 2, 1, norm=norm, activation='none', pad_type=pad_type)] + dim *= 2 + for i in range(n_downsample - 3): + self.model_last += [ConvBlock(dim, dim, 4, 2, 1, norm=norm, activation=activ, pad_type=pad_type)] + self.model_last += [ConvBlock(dim, dim, 4, 2, 1, norm=norm, activation='none', pad_type=pad_type)] + self.model = nn.Sequential(*self.model) + self.model_last = nn.Sequential(*self.model_last) + self.output_dim = dim + + def forward(self, x): + fea = self.model(x) + return fea, self.model_last(fea) + + +# Define the basic block +class ConvBlock(nn.Module): + def __init__(self, input_dim, output_dim, kernel_size, stride, + padding=0, norm='none', activation='relu', pad_type='zero'): + super(ConvBlock, self).__init__() + self.use_bias = True + # initialize padding + if pad_type == 'reflect': + self.pad = nn.ReflectionPad2d(padding) + elif pad_type == 'replicate': + self.pad = nn.ReplicationPad2d(padding) + elif pad_type == 'zero': + self.pad = nn.ZeroPad2d(padding) + else: + assert 0, "Unsupported padding type: {}".format(pad_type) + + # initialize normalization + norm_dim = output_dim + if norm == 'bn': + self.norm = nn.BatchNorm2d(norm_dim) + elif norm == 'in': + # self.norm = nn.InstanceNorm2d(norm_dim, track_running_stats=True) + self.norm = nn.InstanceNorm2d(norm_dim) + elif norm == 'ln': + self.norm = LayerNorm(norm_dim) + elif norm == 'adain': + self.norm = AdaptiveInstanceNorm2d(norm_dim) + elif norm == 'none' or norm == 'sn': + self.norm = None + else: + assert 0, "Unsupported normalization: {}".format(norm) + + # initialize activation + if activation == 'relu': + self.activation = nn.ReLU(inplace=True) + elif activation == 'lrelu': + self.activation = nn.LeakyReLU(0.2, inplace=True) + elif activation == 'prelu': + self.activation = nn.PReLU() + elif activation == 'selu': + self.activation = nn.SELU(inplace=True) + elif activation == 'tanh': + self.activation = nn.Tanh() + elif activation == 'none': + self.activation = None + else: + assert 0, "Unsupported activation: {}".format(activation) + + # initialize convolution + if norm == 'sn': + self.conv = SpectralNorm(nn.Conv2d(input_dim, output_dim, kernel_size, stride, bias=self.use_bias)) + else: + self.conv = nn.Conv2d(input_dim, output_dim, kernel_size, stride, bias=self.use_bias) + + def forward(self, x): + x = self.conv(self.pad(x)) + if self.norm: + x = self.norm(x) + if self.activation: + x = self.activation(x) + return x + + +class LinearBlock(nn.Module): + def __init__(self, input_dim, output_dim, norm='none', activation='relu'): + super(LinearBlock, self).__init__() + use_bias = True + # initialize fully connected layer + if norm == 'sn': + self.fc = SpectralNorm(nn.Linear(input_dim, output_dim, bias=use_bias)) + else: + self.fc = nn.Linear(input_dim, output_dim, bias=use_bias) + + # initialize normalization + norm_dim = output_dim + if norm == 'bn': + self.norm = nn.BatchNorm1d(norm_dim) + elif norm == 'in': + self.norm = nn.InstanceNorm1d(norm_dim) + elif norm == 'ln': + self.norm = LayerNorm(norm_dim) + elif norm == 'none' or norm == 'sn': + self.norm = None + else: + assert 0, "Unsupported normalization: {}".format(norm) + + # initialize activation + if activation == 'relu': + self.activation = nn.ReLU(inplace=True) + elif activation == 'lrelu': + self.activation = nn.LeakyReLU(0.2, inplace=True) + elif activation == 'prelu': + self.activation = nn.PReLU() + elif activation == 'selu': + self.activation = nn.SELU(inplace=True) + elif activation == 'tanh': + self.activation = nn.Tanh() + elif activation == 'none': + self.activation = None + else: + assert 0, "Unsupported activation: {}".format(activation) + + def forward(self, x): + out = self.fc(x) + if self.norm: + out = self.norm(out) + if self.activation: + out = self.activation(out) + return out + + +# Define a resnet block +class ResnetBlock(nn.Module): + def __init__(self, dim, norm_type, padding_type, use_dropout=False): + super(ResnetBlock, self).__init__() + self.conv_block = self.build_conv_block(dim, norm_type, padding_type, use_dropout) + + def build_conv_block(self, dim, norm_type, padding_type, use_dropout): + conv_block = [] + conv_block += [ConvBlock(dim, dim, 3, 1, 1, norm=norm_type, activation='relu', pad_type=padding_type)] + conv_block += [ConvBlock(dim, dim, 3, 1, 1, norm=norm_type, activation='none', pad_type=padding_type)] + + return nn.Sequential(*conv_block) + + def forward(self, x): + out = x + self.conv_block(x) + return out + + +class SFTLayer(nn.Module): + def __init__(self): + super(SFTLayer, self).__init__() + self.SFT_scale_conv1 = nn.Conv2d(64, 64, 1) + self.SFT_scale_conv2 = nn.Conv2d(64, 64, 1) + self.SFT_shift_conv1 = nn.Conv2d(64, 64, 1) + self.SFT_shift_conv2 = nn.Conv2d(64, 64, 1) + + def forward(self, x): + scale = self.SFT_scale_conv2(F.leaky_relu(self.SFT_scale_conv1(x[1]), 0.1, inplace=True)) + shift = self.SFT_shift_conv2(F.leaky_relu(self.SFT_shift_conv1(x[1]), 0.1, inplace=True)) + return x[0] * scale + shift + + +class ConvBlock_SFT(nn.Module): + def __init__(self, dim, norm_type, padding_type, use_dropout=False): + super(ResnetBlock_SFT, self).__init__() + self.sft1 = SFTLayer() + self.conv1 = ConvBlock(dim, dim, 4, 2, 1, norm=norm_type, activation='none', pad_type=padding_type) + + def forward(self, x): + fea = self.sft1((x[0], x[1])) + fea = F.relu(self.conv1(fea), inplace=True) + return (x[0] + fea, x[1]) + + +class ConvBlock_SFT_last(nn.Module): + def __init__(self, dim, norm_type, padding_type, use_dropout=False): + super(ResnetBlock_SFT_last, self).__init__() + self.sft1 = SFTLayer() + self.conv1 = ConvBlock(dim, dim, 4, 2, 1, norm=norm_type, activation='none', pad_type=padding_type) + + def forward(self, x): + fea = self.sft1((x[0], x[1])) + fea = F.relu(self.conv1(fea), inplace=True) + return x[0] + fea + + +# Definition of normalization layer +class AdaptiveInstanceNorm2d(nn.Module): + def __init__(self, num_features, eps=1e-5, momentum=0.1): + super(AdaptiveInstanceNorm2d, self).__init__() + self.num_features = num_features + self.eps = eps + self.momentum = momentum + # weight and bias are dynamically assigned + self.weight = None + self.bias = None + # just dummy buffers, not used + self.register_buffer('running_mean', torch.zeros(num_features)) + self.register_buffer('running_var', torch.ones(num_features)) + + def forward(self, x): + assert self.weight is not None and self.bias is not None, "Please assign weight and bias before calling AdaIN!" + b, c = x.size(0), x.size(1) + running_mean = self.running_mean.repeat(b) + running_var = self.running_var.repeat(b) + + # Apply instance norm + x_reshaped = x.contiguous().view(1, b * c, *x.size()[2:]) + + out = F.batch_norm( + x_reshaped, running_mean, running_var, self.weight, self.bias, + True, self.momentum, self.eps) + + return out.view(b, c, *x.size()[2:]) + + def __repr__(self): + return self.__class__.__name__ + '(' + str(self.num_features) + ')' + + +class LayerNorm(nn.Module): + def __init__(self, num_features, eps=1e-5, affine=True): + super(LayerNorm, self).__init__() + self.num_features = num_features + self.affine = affine + self.eps = eps + + if self.affine: + self.gamma = nn.Parameter(torch.Tensor(num_features).uniform_()) + self.beta = nn.Parameter(torch.zeros(num_features)) + + def forward(self, x): + shape = [-1] + [1] * (x.dim() - 1) + # print(x.size()) + if x.size(0) == 1: + # These two lines run much faster in pytorch 0.4 than the two lines listed below. + mean = x.view(-1).mean().view(*shape) + std = x.view(-1).std().view(*shape) + else: + mean = x.view(x.size(0), -1).mean(1).view(*shape) + std = x.view(x.size(0), -1).std(1).view(*shape) + + x = (x - mean) / (std + self.eps) + + if self.affine: + shape = [1, -1] + [1] * (x.dim() - 2) + x = x * self.gamma.view(*shape) + self.beta.view(*shape) + return x + + +def l2normalize(v, eps=1e-12): + return v / (v.norm() + eps) + + +class SpectralNorm(nn.Module): + """ + Based on the paper "Spectral Normalization for Generative Adversarial Networks" by Takeru Miyato, Toshiki Kataoka, Masanori Koyama, Yuichi Yoshida + and the Pytorch implementation https://github.com/christiancosgrove/pytorch-spectral-normalization-gan + """ + + def __init__(self, module, name='weight', power_iterations=1): + super(SpectralNorm, self).__init__() + self.module = module + self.name = name + self.power_iterations = power_iterations + if not self._made_params(): + self._make_params() + + def _update_u_v(self): + u = getattr(self.module, self.name + "_u") + v = getattr(self.module, self.name + "_v") + w = getattr(self.module, self.name + "_bar") + + height = w.data.shape[0] + for _ in range(self.power_iterations): + v.data = l2normalize(torch.mv(torch.t(w.view(height, -1).data), u.data)) + u.data = l2normalize(torch.mv(w.view(height, -1).data, v.data)) + + # sigma = torch.dot(u.data, torch.mv(w.view(height,-1).data, v.data)) + sigma = u.dot(w.view(height, -1).mv(v)) + setattr(self.module, self.name, w / sigma.expand_as(w)) + + def _made_params(self): + try: + u = getattr(self.module, self.name + "_u") + v = getattr(self.module, self.name + "_v") + w = getattr(self.module, self.name + "_bar") + return True + except AttributeError: + return False + + def _make_params(self): + w = getattr(self.module, self.name) + + height = w.data.shape[0] + width = w.view(height, -1).data.shape[1] + + u = nn.Parameter(w.data.new(height).normal_(0, 1), requires_grad=False) + v = nn.Parameter(w.data.new(width).normal_(0, 1), requires_grad=False) + u.data = l2normalize(u.data) + v.data = l2normalize(v.data) + w_bar = nn.Parameter(w.data) + + del self.module._parameters[self.name] + + self.module.register_parameter(self.name + "_u", u) + self.module.register_parameter(self.name + "_v", v) + self.module.register_parameter(self.name + "_bar", w_bar) + + def forward(self, *args): + self._update_u_v() + return self.module.forward(*args) + + +### STN TPS + +class CNN(nn.Module): + def __init__(self, num_output, input_nc=5, ngf=8, n_layers=5, norm_layer=nn.InstanceNorm2d, use_dropout=False): + super(CNN, self).__init__() + downconv = nn.Conv2d(5, ngf, kernel_size=4, stride=2, padding=1) + model = [downconv, nn.ReLU(True), norm_layer(ngf)] + for i in range(n_layers): + in_ngf = 2 ** i * ngf if 2 ** i * ngf < 1024 else 1024 + out_ngf = 2 ** (i + 1) * ngf if 2 ** i * ngf < 1024 else 1024 + downconv = nn.Conv2d(in_ngf, out_ngf, kernel_size=4, stride=2, padding=1) + model += [downconv, norm_layer(out_ngf), nn.ReLU(True)] + model += [nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), norm_layer(64), nn.ReLU(True)] + model += [nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), norm_layer(64), nn.ReLU(True)] + self.maxpool = nn.MaxPool2d(kernel_size=2, stride=2) + self.model = nn.Sequential(*model) + self.fc1 = nn.Linear(512, 128) + self.fc2 = nn.Linear(128, num_output) + + def forward(self, x): + x = self.model(x) + x = self.maxpool(x) + x = x.view(x.shape[0], -1) + x = F.relu(self.fc1(x)) + x = F.dropout(x, training=self.training) + x = self.fc2(x) + + return x + + +class ClsNet(nn.Module): + + def __init__(self): + super(ClsNet, self).__init__() + self.cnn = CNN(10) + + def forward(self, x): + return F.log_softmax(self.cnn(x)) + + +class BoundedGridLocNet(nn.Module): + + def __init__(self, grid_height, grid_width, target_control_points): + super(BoundedGridLocNet, self).__init__() + self.cnn = CNN(grid_height * grid_width * 2) + + bias = torch.from_numpy(np.arctanh(target_control_points.numpy())) + bias = bias.view(-1) + self.cnn.fc2.bias.data.copy_(bias) + self.cnn.fc2.weight.data.zero_() + + def forward(self, x): + batch_size = x.size(0) + points = F.tanh(self.cnn(x)) + coor=points.view(batch_size, -1, 2) + # coor+=torch.randn(coor.shape).cuda()/10 + row=self.get_row(coor,5) + col=self.get_col(coor,5) + rx,ry,cx,cy=torch.tensor(0.08).cuda(),torch.tensor(0.08).cuda()\ + ,torch.tensor(0.08).cuda(),torch.tensor(0.08).cuda() + row_x,row_y=row[:,:,0],row[:,:,1] + col_x,col_y=col[:,:,0],col[:,:,1] + rx_loss=torch.max(rx,row_x).mean() + ry_loss=torch.max(ry,row_y).mean() + cx_loss=torch.max(cx,col_x).mean() + cy_loss=torch.max(cy,col_y).mean() + + + return coor,rx_loss,ry_loss,cx_loss,cy_loss + + def get_row(self,coor,num): + sec_dic=[] + for j in range(num): + sum=0 + buffer=0 + flag=False + max=-1 + for i in range(num-1): + differ=(coor[:,j*num+i+1,:]-coor[:,j*num+i,:])**2 + if not flag: + second_dif=0 + flag=True + else: + second_dif=torch.abs(differ-buffer) + sec_dic.append(second_dif) + + buffer=differ + sum+=second_dif + return torch.stack(sec_dic,dim=1) + + def get_col(self,coor,num): + sec_dic=[] + for i in range(num): + sum = 0 + buffer = 0 + flag = False + max = -1 + for j in range(num - 1): + differ = (coor[:, (j+1) * num + i , :] - coor[:, j * num + i, :]) ** 2 + if not flag: + second_dif = 0 + flag = True + else: + second_dif = torch.abs(differ-buffer) + sec_dic.append(second_dif) + buffer = differ + sum += second_dif + return torch.stack(sec_dic,dim=1) + +class UnBoundedGridLocNet(nn.Module): + + def __init__(self, grid_height, grid_width, target_control_points): + super(UnBoundedGridLocNet, self).__init__() + self.cnn = CNN(grid_height * grid_width * 2) + + bias = target_control_points.view(-1) + self.cnn.fc2.bias.data.copy_(bias) + self.cnn.fc2.weight.data.zero_() + + def forward(self, x): + batch_size = x.size(0) + points = self.cnn(x) + return points.view(batch_size, -1, 2) + + +class STNNet(nn.Module): + + def __init__(self): + super(STNNet, self).__init__() + range = 0.9 + r1 = range + r2 = range + grid_size_h = 5 + grid_size_w = 5 + + assert r1 < 1 and r2 < 1 # if >= 1, arctanh will cause error in BoundedGridLocNet + target_control_points = torch.Tensor(list(itertools.product( + np.arange(-r1, r1 + 0.00001, 2.0 * r1 / (grid_size_h - 1)), + np.arange(-r2, r2 + 0.00001, 2.0 * r2 / (grid_size_w - 1)), + ))) + Y, X = target_control_points.split(1, dim=1) + target_control_points = torch.cat([X, Y], dim=1) + self.target_control_points=target_control_points + # self.get_row(target_control_points,5) + GridLocNet = { + 'unbounded_stn': UnBoundedGridLocNet, + 'bounded_stn': BoundedGridLocNet, + }['bounded_stn'] + self.loc_net = GridLocNet(grid_size_h, grid_size_w, target_control_points) + + self.tps = TPSGridGen(256, 192, target_control_points) + + def get_row(self, coor, num): + for j in range(num): + sum = 0 + buffer = 0 + flag = False + max = -1 + for i in range(num - 1): + differ = (coor[j * num + i + 1, :] - coor[j * num + i, :]) ** 2 + if not flag: + second_dif = 0 + flag = True + else: + second_dif = torch.abs(differ - buffer) + + buffer = differ + sum += second_dif + print(sum / num) + def get_col(self,coor,num): + for i in range(num): + sum = 0 + buffer = 0 + flag = False + max = -1 + for j in range(num - 1): + differ = (coor[ (j + 1) * num + i, :] - coor[j * num + i, :]) ** 2 + if not flag: + second_dif = 0 + flag = True + else: + second_dif = torch.abs(differ-buffer) + + buffer = differ + sum += second_dif + print(sum) + def forward(self, x, reference, mask,grid_pic): + batch_size = x.size(0) + source_control_points,rx,ry,cx,cy = self.loc_net(reference) + source_control_points=(source_control_points) + # print('control points',source_control_points.shape) + source_coordinate = self.tps(source_control_points) + grid = source_coordinate.view(batch_size, 256, 192, 2) + # print('grid size',grid.shape) + transformed_x = grid_sample(x, grid, canvas=0) + warped_mask = grid_sample(mask, grid, canvas=0) + warped_gpic= grid_sample(grid_pic, grid, canvas=0) + return transformed_x, warped_mask,rx,ry,cx,cy,warped_gpic \ No newline at end of file diff --git a/models/networks_backup.py b/models/networks_backup.py new file mode 100644 index 0000000000000000000000000000000000000000..ad5182af00888273f06924229321d0a9c9ea87f8 --- /dev/null +++ b/models/networks_backup.py @@ -0,0 +1,1730 @@ +# Copyright (C) 2017 NVIDIA Corporation. All rights reserved. +# Licensed under the CC BY-NC-SA 4.0 license (https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode). +from torchvision import models +import torch +import os +import torch.nn as nn +import functools +from torch.autograd import Variable +import numpy as np +import torch.nn.functional as F +import math +import torch +import itertools +import numpy as np +import torch.nn as nn +import torch.nn.functional as F +from grid_sample import grid_sample +from torch.autograd import Variable +from tps_grid_gen import TPSGridGen +import ipdb + +############################################################################### +# Functions +############################################################################### + + +def weights_init(m): + classname = m.__class__.__name__ + if classname.find('Conv2d') != -1: + m.weight.data.normal_(0.0, 0.02) + elif classname.find('BatchNorm2d') != -1: + m.weight.data.normal_(1.0, 0.02) + m.bias.data.fill_(0) + + +def get_norm_layer(norm_type='instance'): + if norm_type == 'batch': + norm_layer = functools.partial(nn.BatchNorm2d, affine=True) + elif norm_type == 'instance': + norm_layer = functools.partial(nn.InstanceNorm2d, affine=False) + else: + raise NotImplementedError( + 'normalization layer [%s] is not found' % norm_type) + return norm_layer + + +def define_G(input_nc, output_nc, ngf, netG, L=1, S=1, n_downsample_global=3, n_blocks_global=9, n_local_enhancers=1, + n_blocks_local=3, norm='instance', gpu_ids=[]): + norm_layer = get_norm_layer(norm_type=norm) + if netG == 'global': + netG = GlobalGenerator(input_nc, output_nc, L, S, ngf, + n_downsample_global, n_blocks_global, norm_layer) + elif netG == 'local': + netG = LocalEnhancer(input_nc, output_nc, ngf, n_downsample_global, n_blocks_global, + n_local_enhancers, n_blocks_local, norm_layer) + else: + raise ('generator not implemented!') + print(netG) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + netG.cuda(gpu_ids[0]) + netG.apply(weights_init) + return netG + + +def define_Unet(input_nc, gpu_ids=[]): + netG = Unet(input_nc) + netG.cuda(gpu_ids[0]) + netG.apply(weights_init) + return netG + + +def define_UnetMask(input_nc, gpu_ids=[]): + netG = UnetMask(input_nc, output_nc=4) + netG.cuda(gpu_ids[0]) + netG.apply(weights_init) + return netG + + +def define_Refine(input_nc, output_nc, gpu_ids=[]): + netG = Refine(input_nc, output_nc) + netG.cuda(gpu_ids[0]) + netG.apply(weights_init) + return netG + + +def define_D(input_nc, ndf, n_layers_D, norm='instance', use_sigmoid=False, num_D=1, getIntermFeat=False, gpu_ids=[]): + norm_layer = get_norm_layer(norm_type=norm) + netD = MultiscaleDiscriminator( + input_nc, ndf, n_layers_D, norm_layer, use_sigmoid, num_D, getIntermFeat) + print(netD) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + netD.cuda(gpu_ids[0]) + netD.apply(weights_init) + return netD + + +def define_VAE(input_nc, gpu_ids=[]): + netVAE = VAE(19, 32, 32, 1024) + print(netVAE) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + netVAE.cuda(gpu_ids[0]) + return netVAE + + +def define_B(input_nc, output_nc, ngf, n_downsample_global=3, n_blocks_global=3, norm='instance', gpu_ids=[]): + norm_layer = get_norm_layer(norm_type=norm) + netB = BlendGenerator(input_nc, output_nc, ngf, + n_downsample_global, n_blocks_global, norm_layer) + print(netB) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + netB.cuda(gpu_ids[0]) + netB.apply(weights_init) + return netB + + +def define_partial_enc(input_nc, gpu_ids=[]): + net = PartialConvEncoder(input_nc) + print(net) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + net.cuda(gpu_ids[0]) + net.apply(weights_init) + return net + + +def define_conv_enc(input_nc, gpu_ids=[]): + net = ConvEncoder(input_nc) + print(net) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + net.cuda(gpu_ids[0]) + net.apply(weights_init) + return net + + +def define_AttG(output_nc, gpu_ids=[]): + net = AttGenerator(output_nc) + print(net) + if len(gpu_ids) > 0: + assert (torch.cuda.is_available()) + net.cuda(gpu_ids[0]) + net.apply(weights_init) + return net + + +def print_network(net): + if isinstance(net, list): + net = net[0] + num_params = 0 + for param in net.parameters(): + num_params += param.numel() + print(net) + print('Total number of parameters: %d' % num_params) + + +############################################################################## +# Losses +############################################################################## +class GANLoss(nn.Module): + def __init__(self, use_lsgan=True, target_real_label=1.0, target_fake_label=0.0, + tensor=torch.FloatTensor): + super(GANLoss, self).__init__() + self.real_label = target_real_label + self.fake_label = target_fake_label + self.real_label_var = None + self.fake_label_var = None + self.Tensor = tensor + if use_lsgan: + self.loss = nn.MSELoss() + else: + self.loss = nn.BCELoss() + + def get_target_tensor(self, input, target_is_real): + target_tensor = None + if target_is_real: + create_label = ((self.real_label_var is None) or + (self.real_label_var.numel() != input.numel())) + if create_label: + real_tensor = self.Tensor(input.size()).fill_(self.real_label) + self.real_label_var = Variable( + real_tensor, requires_grad=False) + target_tensor = self.real_label_var + else: + create_label = ((self.fake_label_var is None) or + (self.fake_label_var.numel() != input.numel())) + if create_label: + fake_tensor = self.Tensor(input.size()).fill_(self.fake_label) + self.fake_label_var = Variable( + fake_tensor, requires_grad=False) + target_tensor = self.fake_label_var + return target_tensor + + def __call__(self, input, target_is_real): + if isinstance(input[0], list): + loss = 0 + for input_i in input: + pred = input_i[-1] + target_tensor = self.get_target_tensor(pred, target_is_real) + loss += self.loss(pred, target_tensor) + return loss + else: + target_tensor = self.get_target_tensor(input[-1], target_is_real) + return self.loss(input[-1], target_tensor) + + +class VGGLossWarp(nn.Module): + def __init__(self, gpu_ids): + super(VGGLossWarp, self).__init__() + self.vgg = Vgg19().cuda() + self.criterion = nn.L1Loss() + self.weights = [1.0 / 32, 1.0 / 16, 1.0 / 8, 1.0 / 4, 1.0] + + def forward(self, x, y): + x_vgg, y_vgg = self.vgg(x), self.vgg(y) + loss = 0 + loss += self.weights[4] * self.criterion(x_vgg[4], y_vgg[4].detach()) + return loss + + +class VGGLoss(nn.Module): + def __init__(self, gpu_ids): + super(VGGLoss, self).__init__() + self.vgg = Vgg19().cuda() + self.criterion = nn.L1Loss() + self.weights = [1.0 / 32, 1.0 / 16, 1.0 / 8, 1.0 / 4, 1.0] + + def forward(self, x, y): + x_vgg, y_vgg = self.vgg(x), self.vgg(y) + loss = 0 + for i in range(len(x_vgg)): + loss += self.weights[i] * \ + self.criterion(x_vgg[i], y_vgg[i].detach()) + return loss + + def warp(self, x, y): + x_vgg, y_vgg = self.vgg(x), self.vgg(y) + loss = 0 + loss += self.weights[4] * self.criterion(x_vgg[4], y_vgg[4].detach()) + return loss + + +class StyleLoss(nn.Module): + def __init__(self, gpu_ids): + super(StyleLoss, self).__init__() + self.vgg = Vgg19().cuda() + self.weights = [1.0 / 32, 1.0 / 16, 1.0 / 8, 1.0 / 4, 1.0] + + def forward(self, x, y): + x_vgg, y_vgg = self.vgg(x), self.vgg(y) + loss = 0 + for i in range(len(x_vgg)): + N, C, H, W = x_vgg[i].shape + for n in range(N): + phi_x = x_vgg[i][n] + phi_y = y_vgg[i][n] + phi_x = phi_x.reshape(C, H * W) + phi_y = phi_y.reshape(C, H * W) + G_x = torch.matmul(phi_x, phi_x.t()) / (C * H * W) + G_y = torch.matmul(phi_y, phi_y.t()) / (C * H * W) + loss += torch.sqrt(torch.mean((G_x - G_y) ** 2) + ) * self.weights[i] + return loss + + +############################################################################## +# Generator +############################################################################## + +class PartialConvEncoder(nn.Module): + def __init__(self, input_nc, ngf=32, norm_layer=nn.BatchNorm2d): + super(PartialConvEncoder, self).__init__() + activation = nn.ReLU(True) + self.pad1 = nn.ReflectionPad2d(3) + self.partial_conv1 = PartialConv(input_nc, ngf, kernel_size=7) + self.norm_layer1 = norm_layer(ngf) + self.activation = activation + # down sample + mult = 2 ** 0 + self.down1 = PartialConv( + ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1) + self.norm_layer2 = norm_layer(ngf * mult * 2) + mult = 2 ** 1 + self.down2 = PartialConv( + ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1) + self.norm_layer3 = norm_layer(ngf * mult * 2) + + mult = 2 ** 2 + self.down3 = PartialConv( + ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1) + self.norm_layer4 = norm_layer(ngf * mult * 2) + + mult = 2 ** 3 + self.down4 = PartialConv( + ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1) + self.norm_layer5 = norm_layer(ngf * mult * 2) + + def forward(self, input, mask): + input = self.pad1(input) + mask = self.pad1(mask) + input, mask = self.partial_conv1(input, mask) + input = self.norm_layer1(input) + input = self.activation(input) + + input, mask = self.down1(input, mask) + input = self.norm_layer2(input) + input = self.activation(input) + input, mask = self.down2(input, mask) + input = self.norm_layer3(input) + input = self.activation(input) + input, mask = self.down3(input, mask) + input = self.norm_layer4(input) + input = self.activation(input) + input, mask = self.down4(input, mask) + input = self.norm_layer5(input) + input = self.activation(input) + return input + + +class ConvEncoder(nn.Module): + def __init__(self, input_nc, ngf=32, n_downsampling=4, n_blocks=4, norm_layer=nn.BatchNorm2d, + padding_type='reflect'): + super(ConvEncoder, self).__init__() + activation = nn.ReLU(True) + # print("input_nc",input_nc) + model = [nn.ReflectionPad2d(3), nn.Conv2d( + input_nc, ngf, kernel_size=7, padding=0), norm_layer(ngf), activation] + # downsample + for i in range(n_downsampling): + stride = 2 + + mult = 2 ** i + model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=stride, padding=1), + norm_layer(ngf * mult * 2), activation] + self.model = nn.Sequential(*model) + + def forward(self, input): + return self.model(input) + + +class AttGenerator(nn.Module): + def __init__(self, output_nc, ngf=32, n_blocks=4, n_downsampling=4, padding_type='reflect'): + super(AttGenerator, self).__init__() + mult = 2 ** n_downsampling + model = [] + for i in range(n_blocks): + model += [ResnetBlock(ngf * mult * 2, + norm_type='in', padding_type=padding_type)] + + self.model = nn.Sequential(*model) + self.upsampling = [] + self.out_channels = [] + self.AttNorm = [] + # upsampling + norm_layer = nn.BatchNorm2d + activation = nn.ReLU(True) + + for i in range(n_downsampling): + mult = 2 ** (n_downsampling - i) + up_module = [nn.ConvTranspose2d(ngf * mult * 2, int(ngf * mult / 2) * 2, kernel_size=3, stride=2, padding=1, + output_padding=1), + norm_layer(int(ngf * mult / 2) * 2), activation + ] + up_module = nn.Sequential(*up_module) + self.upsampling += [up_module] + self.out_channels += [int(ngf * mult / 2) * 2] + self.upsampling = nn.Sequential(*self.upsampling) + + # + self.AttNorm += [AttentionNorm(5, self.out_channels[0], 2, 4)] + self.AttNorm += [AttentionNorm(5, self.out_channels[1], 2, 2)] + self.AttNorm += [AttentionNorm(5, self.out_channels[2], 1, 2)] + self.AttNorm += [AttentionNorm(5, self.out_channels[3], 1, 1)] + self.AttNorm = nn.Sequential(*self.AttNorm) + self.last_conv = [nn.ReflectionPad2d(3), nn.Conv2d( + ngf * 2, output_nc, kernel_size=7, padding=0), nn.Tanh()] + self.last_conv = nn.Sequential(*self.last_conv) + + def forward(self, input, unattended): + up = self.model(unattended) + for i in range(4): + # print(i) + up = self.upsampling[i](up) + if i == 3: + break + up = self.AttNorm[i](input, up) + return self.last_conv(up) + + +class PartialConv(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride=1, + padding=0, dilation=1, groups=1, bias=True): + super(PartialConv, self).__init__() + self.input_conv = nn.Conv2d(in_channels, out_channels, kernel_size, + stride, padding, dilation, groups, bias) + self.mask_conv = nn.Conv2d(in_channels, out_channels, kernel_size, + stride, padding, dilation, groups, False) + self.input_conv.apply(weights_init) + + torch.nn.init.constant_(self.mask_conv.weight, 1.0) + + # mask is not updated + for param in self.mask_conv.parameters(): + param.requires_grad = False + + def forward(self, input, mask): + # http://masc.cs.gmu.edu/wiki/partialconv + # C(X) = W^T * X + b, C(0) = b, D(M) = 1 * M + 0 = sum(M) + # W^T* (M .* X) / sum(M) + b = [C(M .* X) – C(0)] / D(M) + C(0) + + output = self.input_conv(input * mask) + if self.input_conv.bias is not None: + output_bias = self.input_conv.bias.view(1, -1, 1, 1).expand_as( + output) + else: + output_bias = torch.zeros_like(output) + + with torch.no_grad(): + output_mask = self.mask_conv(mask) + + no_update_holes = output_mask == 0 + mask_sum = output_mask.masked_fill_(no_update_holes, 1.0) + + output_pre = (output - output_bias) / mask_sum + output_bias + output = output_pre.masked_fill_(no_update_holes, 0.0) + + new_mask = torch.ones_like(output) + new_mask = new_mask.masked_fill_(no_update_holes, 0.0) + + return output, new_mask + + +class AttentionNorm(nn.Module): + def __init__(self, ref_channels, out_channels, first_rate, second_rate): + super(AttentionNorm, self).__init__() + self.first = first_rate + self.second = second_rate + mid_channels = int(out_channels / 2) + self.conv_1time_f = nn.Conv2d( + ref_channels, mid_channels, kernel_size=3, stride=1, padding=1) + self.conv_2times_f = nn.Conv2d( + ref_channels, mid_channels, kernel_size=3, stride=2, padding=1) + self.conv_4times_f = nn.Conv2d( + ref_channels, mid_channels, kernel_size=3, stride=4, padding=1) + + self.conv_1time_s = nn.Conv2d( + mid_channels, out_channels, kernel_size=3, stride=1, padding=1) + self.conv_2times_s = nn.Conv2d( + mid_channels, out_channels, kernel_size=3, stride=2, padding=1) + self.conv_4times_s = nn.Conv2d( + mid_channels, out_channels, kernel_size=3, stride=4, padding=1) + + self.conv_1time_m = nn.Conv2d( + mid_channels, out_channels, kernel_size=3, stride=1, padding=1) + self.conv_2times_m = nn.Conv2d( + mid_channels, out_channels, kernel_size=3, stride=2, padding=1) + self.conv_4times_m = nn.Conv2d( + mid_channels, out_channels, kernel_size=3, stride=4, padding=1) + self.norm = nn.BatchNorm2d(out_channels) + self.conv = nn.Conv2d(out_channels, out_channels, + kernel_size=3, stride=1, padding=1) + + def forward(self, input, unattended): + # attention weights + # print(input.shape,unattended.shape) + if self.first == 1: + input = self.conv_1time_f(input) + elif self.first == 2: + input = self.conv_2times_f(input) + elif self.first == 4: + input = self.conv_4times_f(input) + mask = None + if self.second == 1: + bias = self.conv_1time_s(input) + mask = self.conv_1time_m(input) + elif self.second == 2: + bias = self.conv_2times_s(input) + mask = self.conv_2times_m(input) + elif self.second == 4: + bias = self.conv_4times_s(input) + mask = self.conv_4times_m(input) + mask = torch.sigmoid(mask) + attended = self.norm(unattended) + # print(attended.shape,mask.shape,bias.shape) + attended = attended * mask + bias + attended = torch.relu(attended) + attended = self.conv(attended) + output = attended + unattended + return output + + +class UnetMask(nn.Module): + def __init__(self, input_nc, output_nc=3): + super(UnetMask, self).__init__() + self.stn = STNNet() + nl = nn.InstanceNorm2d + self.conv1 = nn.Sequential(*[nn.Conv2d(input_nc, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU()]) + self.pool1 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv2 = nn.Sequential(*[nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU(), + nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU()]) + self.pool2 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv3 = nn.Sequential(*[nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU(), + nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU()]) + self.pool3 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv4 = nn.Sequential(*[nn.Conv2d(256, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU(), + nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU()]) + self.drop4 = nn.Dropout(0.5) + self.pool4 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv5 = nn.Sequential(*[nn.Conv2d(512, 1024, kernel_size=3, stride=1, padding=1), nl(1024), nn.ReLU(), + nn.Conv2d(1024, 1024, kernel_size=3, stride=1, padding=1), nl(1024), nn.ReLU()]) + self.drop5 = nn.Dropout(0.5) + + self.up6 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), nl(512), + nn.ReLU()]) + + self.conv6 = nn.Sequential(*[nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU(), + nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU()]) + self.up7 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nl(256), + nn.ReLU()]) + self.conv7 = nn.Sequential(*[nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU(), + nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU()]) + + self.up8 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nl(128), + nn.ReLU()]) + + self.conv8 = nn.Sequential(*[nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU(), + nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU()]) + + self.up9 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nl(64), + nn.ReLU()]) + + self.conv9 = nn.Sequential(*[nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1), nl( + 64), nn.ReLU(), + nn.Conv2d( + 64, output_nc, kernel_size=3, stride=1, padding=1) + ]) + + def forward(self, input, refer, mask): + input, warped_mask, rx, ry, cx, cy = self.stn( + input, torch.cat([mask, refer, input], 1), mask) + # ipdb.set_trace()# print(input.shape) + + conv1 = self.conv1(torch.cat([refer.detach(), input.detach()], 1)) + pool1 = self.pool1(conv1) + + conv2 = self.conv2(pool1) + pool2 = self.pool2(conv2) + + conv3 = self.conv3(pool2) + pool3 = self.pool3(conv3) + + conv4 = self.conv4(pool3) + drop4 = self.drop4(conv4) + pool4 = self.pool4(drop4) + + conv5 = self.conv5(pool4) + drop5 = self.drop5(conv5) + + up6 = self.up6(drop5) + conv6 = self.conv6(torch.cat([drop4, up6], 1)) + + up7 = self.up7(conv6) + conv7 = self.conv7(torch.cat([conv3, up7], 1)) + + up8 = self.up8(conv7) + conv8 = self.conv8(torch.cat([conv2, up8], 1)) + + up9 = self.up9(conv8) + conv9 = self.conv9(torch.cat([conv1, up9], 1)) + return conv9, input, warped_mask, rx, ry, cx, cy + + +class Unet(nn.Module): + def __init__(self, input_nc, output_nc=3): + super(Unet, self).__init__() + self.stn = STNNet() + nl = nn.InstanceNorm2d + self.conv1 = nn.Sequential(*[nn.Conv2d(input_nc, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU()]) + self.pool1 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv2 = nn.Sequential(*[nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU(), + nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU()]) + self.pool2 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv3 = nn.Sequential(*[nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU(), + nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU()]) + self.pool3 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv4 = nn.Sequential(*[nn.Conv2d(256, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU(), + nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU()]) + self.drop4 = nn.Dropout(0.5) + self.pool4 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv5 = nn.Sequential(*[nn.Conv2d(512, 1024, kernel_size=3, stride=1, padding=1), nl(1024), nn.ReLU(), + nn.Conv2d(1024, 1024, kernel_size=3, stride=1, padding=1), nl(1024), nn.ReLU()]) + self.drop5 = nn.Dropout(0.5) + + self.up6 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), nl(512), + nn.ReLU()]) + + self.conv6 = nn.Sequential(*[nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU(), + nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU()]) + self.up7 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nl(256), + nn.ReLU()]) + self.conv7 = nn.Sequential(*[nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU(), + nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU()]) + + self.up8 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nl(128), + nn.ReLU()]) + + self.conv8 = nn.Sequential(*[nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU(), + nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU()]) + + self.up9 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nl(64), + nn.ReLU()]) + + self.conv9 = nn.Sequential(*[nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1), nl( + 64), nn.ReLU(), + nn.Conv2d( + 64, output_nc, kernel_size=3, stride=1, padding=1) + ]) + + def forward(self, input, refer, mask): + input, warped_mask, rx, ry, cx, cy = self.stn( + input, torch.cat([mask, refer, input], 1), mask) + # ipdb.set_trace() + conv1 = self.conv1(torch.cat([refer.detach(), input.detach()], 1)) + pool1 = self.pool1(conv1) + + conv2 = self.conv2(pool1) + pool2 = self.pool2(conv2) + + conv3 = self.conv3(pool2) + pool3 = self.pool3(conv3) + + conv4 = self.conv4(pool3) + drop4 = self.drop4(conv4) + pool4 = self.pool4(drop4) + + conv5 = self.conv5(pool4) + drop5 = self.drop5(conv5) + + up6 = self.up6(drop5) + conv6 = self.conv6(torch.cat([drop4, up6], 1)) + + up7 = self.up7(conv6) + conv7 = self.conv7(torch.cat([conv3, up7], 1)) + + up8 = self.up8(conv7) + conv8 = self.conv8(torch.cat([conv2, up8], 1)) + + up9 = self.up9(conv8) + conv9 = self.conv9(torch.cat([conv1, up9], 1)) + return conv9, input, warped_mask, rx, ry, cx, cy + + def refine(self, input): + conv1 = self.conv1(input) + pool1 = self.pool1(conv1) + + conv2 = self.conv2(pool1) + pool2 = self.pool2(conv2) + + conv3 = self.conv3(pool2) + pool3 = self.pool3(conv3) + + conv4 = self.conv4(pool3) + drop4 = self.drop4(conv4) + pool4 = self.pool4(drop4) + + conv5 = self.conv5(pool4) + drop5 = self.drop5(conv5) + + up6 = self.up6(drop5) + conv6 = self.conv6(torch.cat([drop4, up6], 1)) + + up7 = self.up7(conv6) + conv7 = self.conv7(torch.cat([conv3, up7], 1)) + + up8 = self.up8(conv7) + conv8 = self.conv8(torch.cat([conv2, up8], 1)) + + up9 = self.up9(conv8) + conv9 = self.conv9(torch.cat([conv1, up9], 1)) + return conv9 + + +class Refine(nn.Module): + def __init__(self, input_nc, output_nc=3): + super(Refine, self).__init__() + nl = nn.InstanceNorm2d + self.conv1 = nn.Sequential(*[nn.Conv2d(input_nc, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU()]) + self.pool1 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv2 = nn.Sequential(*[nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU(), + nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU()]) + self.pool2 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv3 = nn.Sequential(*[nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU(), + nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU()]) + self.pool3 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv4 = nn.Sequential(*[nn.Conv2d(256, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU(), + nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU()]) + self.drop4 = nn.Dropout(0.5) + self.pool4 = nn.MaxPool2d(kernel_size=(2, 2)) + + self.conv5 = nn.Sequential(*[nn.Conv2d(512, 1024, kernel_size=3, stride=1, padding=1), nl(1024), nn.ReLU(), + nn.Conv2d(1024, 1024, kernel_size=3, stride=1, padding=1), nl(1024), nn.ReLU()]) + self.drop5 = nn.Dropout(0.5) + + self.up6 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), nl(512), + nn.ReLU()]) + + self.conv6 = nn.Sequential(*[nn.Conv2d(1024, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU(), + nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nl(512), nn.ReLU()]) + self.up7 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nl(256), + nn.ReLU()]) + self.conv7 = nn.Sequential(*[nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU(), + nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), nl(256), nn.ReLU()]) + + self.up8 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nl(128), + nn.ReLU()]) + + self.conv8 = nn.Sequential(*[nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU(), + nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1), nl(128), nn.ReLU()]) + + self.up9 = nn.Sequential( + *[nn.UpsamplingNearest2d(scale_factor=2), nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nl(64), + nn.ReLU()]) + + self.conv9 = nn.Sequential(*[nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nl(64), nn.ReLU(), + nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1), nl( + 64), nn.ReLU(), + nn.Conv2d( + 64, output_nc, kernel_size=3, stride=1, padding=1) + ]) + + def refine(self, input): + conv1 = self.conv1(input) + pool1 = self.pool1(conv1) + + conv2 = self.conv2(pool1) + pool2 = self.pool2(conv2) + + conv3 = self.conv3(pool2) + pool3 = self.pool3(conv3) + + conv4 = self.conv4(pool3) + drop4 = self.drop4(conv4) + pool4 = self.pool4(drop4) + + conv5 = self.conv5(pool4) + drop5 = self.drop5(conv5) + + up6 = self.up6(drop5) + conv6 = self.conv6(torch.cat([drop4, up6], 1)) + + up7 = self.up7(conv6) + conv7 = self.conv7(torch.cat([conv3, up7], 1)) + + up8 = self.up8(conv7) + conv8 = self.conv8(torch.cat([conv2, up8], 1)) + + up9 = self.up9(conv8) + conv9 = self.conv9(torch.cat([conv1, up9], 1)) + return conv9 + + +class GlobalGenerator(nn.Module): + def __init__(self, input_nc, output_nc, L, S, ngf=64, n_downsampling=3, n_blocks=9, norm_layer=nn.BatchNorm2d, + padding_type='reflect'): + assert (n_blocks >= 0) + super(GlobalGenerator, self).__init__() + activation = nn.ReLU(True) + + model = [nn.ReflectionPad2d(3), nn.Conv2d( + input_nc, ngf, kernel_size=7, padding=0), norm_layer(ngf), activation] + # downsample + for i in range(n_downsampling): + mult = 2 ** i + model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1), + norm_layer(ngf * mult * 2), activation] + + # resnet blocks + mult = 2 ** n_downsampling + for i in range(n_blocks): + model += [ResnetBlock(ngf * mult, norm_type='adain', + padding_type=padding_type)] + # upsample + for i in range(n_downsampling): + mult = 2 ** (n_downsampling - i) + model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2), kernel_size=3, stride=2, padding=1, + output_padding=1), + norm_layer(int(ngf * mult / 2)), activation] + model += [nn.ReflectionPad2d(3), nn.Conv2d(ngf, + output_nc, kernel_size=7, padding=0)] + self.model = nn.Sequential(*model) + + # style encoder + self.enc_style = StyleEncoder(5, S, 16, self.get_num_adain_params(self.model), norm='none', activ='relu', + pad_type='reflect') + # label encoder + self.enc_label = LabelEncoder( + 5, L, 16, 64, norm='none', activ='relu', pad_type='reflect') + + def assign_adain_params(self, adain_params, model): + # assign the adain_params to the AdaIN layers in model + for m in model.modules(): + if m.__class__.__name__ == "AdaptiveInstanceNorm2d": + mean = adain_params[:, :m.num_features] + std = adain_params[:, m.num_features:2 * m.num_features] + m.bias = mean.contiguous().view(-1) + m.weight = std.contiguous().view(-1) + if adain_params.size(1) > 2 * m.num_features: + adain_params = adain_params[:, 2 * m.num_features:] + + def get_num_adain_params(self, model): + # return the number of AdaIN parameters needed by the model + num_adain_params = 0 + for m in model.modules(): + if m.__class__.__name__ == "AdaptiveInstanceNorm2d": + num_adain_params += 2 * m.num_features + return num_adain_params + + def forward(self, input, input_ref, image_ref): + fea1, fea2 = self.enc_label(input_ref) + adain_params = self.enc_style((image_ref, fea1, fea2)) + self.assign_adain_params(adain_params, self.model) + return self.model(input) + + +class BlendGenerator(nn.Module): + def __init__(self, input_nc, output_nc, ngf=64, n_downsampling=3, n_blocks=3, norm_layer=nn.BatchNorm2d, + padding_type='reflect'): + assert (n_blocks >= 0) + super(BlendGenerator, self).__init__() + activation = nn.ReLU(True) + + model = [nn.ReflectionPad2d(3), nn.Conv2d( + input_nc, ngf, kernel_size=7, padding=0), norm_layer(ngf), activation] + # downsample + for i in range(n_downsampling): + mult = 2 ** i + model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1), + norm_layer(ngf * mult * 2), activation] + + # resnet blocks + mult = 2 ** n_downsampling + for i in range(n_blocks): + model += [ResnetBlock(ngf * mult, norm_type='in', + padding_type=padding_type)] + + # upsample + for i in range(n_downsampling): + mult = 2 ** (n_downsampling - i) + model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2), kernel_size=3, stride=2, padding=1, + output_padding=1), + norm_layer(int(ngf * mult / 2)), activation] + model += [nn.ReflectionPad2d(3), nn.Conv2d(ngf, + output_nc, kernel_size=7, padding=0), nn.Sigmoid()] + self.model = nn.Sequential(*model) + + def forward(self, input1, input2): + m = self.model(torch.cat([input1, input2], 1)) + return input1 * m + input2 * (1 - m), m + + # Define the Multiscale Discriminator. + + +class MultiscaleDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d, + use_sigmoid=False, num_D=3, getIntermFeat=False): + super(MultiscaleDiscriminator, self).__init__() + self.num_D = num_D + self.n_layers = n_layers + self.getIntermFeat = getIntermFeat + + for i in range(num_D): + netD = NLayerDiscriminator( + input_nc, ndf, n_layers, norm_layer, use_sigmoid, getIntermFeat) + if getIntermFeat: + for j in range(n_layers + 2): + setattr(self, 'scale' + str(i) + '_layer' + + str(j), getattr(netD, 'model' + str(j))) + else: + setattr(self, 'layer' + str(i), netD.model) + + self.downsample = nn.AvgPool2d( + 3, stride=2, padding=[1, 1], count_include_pad=False) + + def singleD_forward(self, model, input): + if self.getIntermFeat: + result = [input] + for i in range(len(model)): + result.append(model[i](result[-1])) + return result[1:] + else: + return [model(input)] + + def forward(self, input): + num_D = self.num_D + result = [] + input_downsampled = input + for i in range(num_D): + if self.getIntermFeat: + model = [getattr(self, 'scale' + str(num_D - 1 - i) + '_layer' + str(j)) for j in + range(self.n_layers + 2)] + else: + model = getattr(self, 'layer' + str(num_D - 1 - i)) + result.append(self.singleD_forward(model, input_downsampled)) + if i != (num_D - 1): + input_downsampled = self.downsample(input_downsampled) + return result + + +# Define the PatchGAN discriminator with the specified arguments. +class NLayerDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d, use_sigmoid=False, getIntermFeat=False): + super(NLayerDiscriminator, self).__init__() + self.getIntermFeat = getIntermFeat + self.n_layers = n_layers + + kw = 4 + padw = int(np.ceil((kw - 1.0) / 2)) + sequence = [[nn.Conv2d(input_nc, ndf, kernel_size=kw, + stride=2, padding=padw), nn.LeakyReLU(0.2, True)]] + + nf = ndf + for n in range(1, n_layers): + nf_prev = nf + nf = min(nf * 2, 512) + sequence += [[ + nn.Conv2d(nf_prev, nf, kernel_size=kw, stride=2, padding=padw), + norm_layer(nf), nn.LeakyReLU(0.2, True) + ]] + + nf_prev = nf + nf = min(nf * 2, 512) + sequence += [[ + nn.Conv2d(nf_prev, nf, kernel_size=kw, stride=1, padding=padw), + norm_layer(nf), + nn.LeakyReLU(0.2, True) + ]] + + sequence += [[nn.Conv2d(nf, 1, kernel_size=kw, + stride=1, padding=padw)]] + + if use_sigmoid: + sequence += [[nn.Sigmoid()]] + + if getIntermFeat: + for n in range(len(sequence)): + setattr(self, 'model' + str(n), nn.Sequential(*sequence[n])) + else: + sequence_stream = [] + for n in range(len(sequence)): + sequence_stream += sequence[n] + self.model = nn.Sequential(*sequence_stream) + + def forward(self, input): + if self.getIntermFeat: + res = [input] + for n in range(self.n_layers + 2): + model = getattr(self, 'model' + str(n)) + res.append(model(res[-1])) + return res[1:] + else: + return self.model(input) + + +class Vgg19(torch.nn.Module): + def __init__(self, requires_grad=False): + super(Vgg19, self).__init__() + vgg = models.vgg19(pretrained=False) + vgg.load_state_dict(torch.load(os.path.dirname( + os.path.realpath(__file__)) + "/vgg19-dcbb9e9d.pth")) + vgg_pretrained_features = vgg.features + self.vgg = vgg + self.slice1 = torch.nn.Sequential() + self.slice2 = torch.nn.Sequential() + self.slice3 = torch.nn.Sequential() + self.slice4 = torch.nn.Sequential() + self.slice5 = torch.nn.Sequential() + for x in range(2): + self.slice1.add_module(str(x), vgg_pretrained_features[x]) + for x in range(2, 7): + self.slice2.add_module(str(x), vgg_pretrained_features[x]) + for x in range(7, 12): + self.slice3.add_module(str(x), vgg_pretrained_features[x]) + for x in range(12, 21): + self.slice4.add_module(str(x), vgg_pretrained_features[x]) + for x in range(21, 30): + self.slice5.add_module(str(x), vgg_pretrained_features[x]) + if not requires_grad: + for param in self.parameters(): + param.requires_grad = False + + def forward(self, X): + h_relu1 = self.slice1(X) + h_relu2 = self.slice2(h_relu1) + h_relu3 = self.slice3(h_relu2) + h_relu4 = self.slice4(h_relu3) + h_relu5 = self.slice5(h_relu4) + out = [h_relu1, h_relu2, h_relu3, h_relu4, h_relu5] + return out + + def extract(self, x): + x = self.vgg.features(x) + x = self.vgg.avgpool(x) + return x + + +# Define the MaskVAE +class VAE(nn.Module): + def __init__(self, nc, ngf, ndf, latent_variable_size): + super(VAE, self).__init__() + # self.cuda = True + self.nc = nc + self.ngf = ngf + self.ndf = ndf + self.latent_variable_size = latent_variable_size + + # encoder + self.e1 = nn.Conv2d(nc, ndf, 4, 2, 1) + self.bn1 = nn.BatchNorm2d(ndf) + + self.e2 = nn.Conv2d(ndf, ndf * 2, 4, 2, 1) + self.bn2 = nn.BatchNorm2d(ndf * 2) + + self.e3 = nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1) + self.bn3 = nn.BatchNorm2d(ndf * 4) + + self.e4 = nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1) + self.bn4 = nn.BatchNorm2d(ndf * 8) + + self.e5 = nn.Conv2d(ndf * 8, ndf * 16, 4, 2, 1) + self.bn5 = nn.BatchNorm2d(ndf * 16) + + self.e6 = nn.Conv2d(ndf * 16, ndf * 32, 4, 2, 1) + self.bn6 = nn.BatchNorm2d(ndf * 32) + + self.e7 = nn.Conv2d(ndf * 32, ndf * 64, 4, 2, 1) + self.bn7 = nn.BatchNorm2d(ndf * 64) + + self.fc1 = nn.Linear(ndf * 64 * 4 * 4, latent_variable_size) + self.fc2 = nn.Linear(ndf * 64 * 4 * 4, latent_variable_size) + + # decoder + self.d1 = nn.Linear(latent_variable_size, ngf * 64 * 4 * 4) + + self.up1 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd1 = nn.ReplicationPad2d(1) + self.d2 = nn.Conv2d(ngf * 64, ngf * 32, 3, 1) + self.bn8 = nn.BatchNorm2d(ngf * 32, 1.e-3) + + self.up2 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd2 = nn.ReplicationPad2d(1) + self.d3 = nn.Conv2d(ngf * 32, ngf * 16, 3, 1) + self.bn9 = nn.BatchNorm2d(ngf * 16, 1.e-3) + + self.up3 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd3 = nn.ReplicationPad2d(1) + self.d4 = nn.Conv2d(ngf * 16, ngf * 8, 3, 1) + self.bn10 = nn.BatchNorm2d(ngf * 8, 1.e-3) + + self.up4 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd4 = nn.ReplicationPad2d(1) + self.d5 = nn.Conv2d(ngf * 8, ngf * 4, 3, 1) + self.bn11 = nn.BatchNorm2d(ngf * 4, 1.e-3) + + self.up5 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd5 = nn.ReplicationPad2d(1) + self.d6 = nn.Conv2d(ngf * 4, ngf * 2, 3, 1) + self.bn12 = nn.BatchNorm2d(ngf * 2, 1.e-3) + + self.up6 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd6 = nn.ReplicationPad2d(1) + self.d7 = nn.Conv2d(ngf * 2, ngf, 3, 1) + self.bn13 = nn.BatchNorm2d(ngf, 1.e-3) + + self.up7 = nn.UpsamplingNearest2d(scale_factor=2) + self.pd7 = nn.ReplicationPad2d(1) + self.d8 = nn.Conv2d(ngf, nc, 3, 1) + + self.leakyrelu = nn.LeakyReLU(0.2) + self.relu = nn.ReLU() + # self.sigmoid = nn.Sigmoid() + self.maxpool = nn.MaxPool2d((2, 2), (2, 2)) + + def encode(self, x): + h1 = self.leakyrelu(self.bn1(self.e1(x))) + h2 = self.leakyrelu(self.bn2(self.e2(h1))) + h3 = self.leakyrelu(self.bn3(self.e3(h2))) + h4 = self.leakyrelu(self.bn4(self.e4(h3))) + h5 = self.leakyrelu(self.bn5(self.e5(h4))) + h6 = self.leakyrelu(self.bn6(self.e6(h5))) + h7 = self.leakyrelu(self.bn7(self.e7(h6))) + h7 = h7.view(-1, self.ndf * 64 * 4 * 4) + return self.fc1(h7), self.fc2(h7) + + def reparametrize(self, mu, logvar): + std = logvar.mul(0.5).exp_() + # if self.cuda: + eps = torch.cuda.FloatTensor(std.size()).normal_() + # else: + # eps = torch.FloatTensor(std.size()).normal_() + eps = Variable(eps) + return eps.mul(std).add_(mu) + + def decode(self, z): + h1 = self.relu(self.d1(z)) + h1 = h1.view(-1, self.ngf * 64, 4, 4) + h2 = self.leakyrelu(self.bn8(self.d2(self.pd1(self.up1(h1))))) + h3 = self.leakyrelu(self.bn9(self.d3(self.pd2(self.up2(h2))))) + h4 = self.leakyrelu(self.bn10(self.d4(self.pd3(self.up3(h3))))) + h5 = self.leakyrelu(self.bn11(self.d5(self.pd4(self.up4(h4))))) + h6 = self.leakyrelu(self.bn12(self.d6(self.pd5(self.up5(h5))))) + h7 = self.leakyrelu(self.bn13(self.d7(self.pd6(self.up6(h6))))) + return self.d8(self.pd7(self.up7(h7))) + + def get_latent_var(self, x): + mu, logvar = self.encode(x) + z = self.reparametrize(mu, logvar) + return z, mu, logvar.mul(0.5).exp_() + + def forward(self, x): + mu, logvar = self.encode(x) + z = self.reparametrize(mu, logvar) + res = self.decode(z) + + return res, x, mu, logvar + + +# style encode part +class StyleEncoder(nn.Module): + def __init__(self, n_downsample, input_dim, dim, style_dim, norm, activ, pad_type): + super(StyleEncoder, self).__init__() + self.model = [] + self.model_middle = [] + self.model_last = [] + self.model += [ConvBlock(input_dim, dim, 7, 1, 3, + norm=norm, activation=activ, pad_type=pad_type)] + for i in range(2): + self.model += [ConvBlock(dim, 2 * dim, 4, 2, 1, + norm=norm, activation=activ, pad_type=pad_type)] + dim *= 2 + for i in range(n_downsample - 2): + self.model_middle += [ConvBlock(dim, dim, 4, 2, 1, + norm=norm, activation=activ, pad_type=pad_type)] + self.model_last += [nn.AdaptiveAvgPool2d(1)] # global average pooling + self.model_last += [nn.Conv2d(dim, style_dim, 1, 1, 0)] + + self.model = nn.Sequential(*self.model) + self.model_middle = nn.Sequential(*self.model_middle) + self.model_last = nn.Sequential(*self.model_last) + + self.output_dim = dim + + self.sft1 = SFTLayer() + self.sft2 = SFTLayer() + + def forward(self, x): + fea = self.model(x[0]) + fea = self.sft1((fea, x[1])) + fea = self.model_middle(fea) + fea = self.sft2((fea, x[2])) + return self.model_last(fea) + + +# label encode part +class LabelEncoder(nn.Module): + def __init__(self, n_downsample, input_dim, dim, style_dim, norm, activ, pad_type): + super(LabelEncoder, self).__init__() + self.model = [] + self.model_last = [nn.ReLU()] + self.model += [ConvBlock(input_dim, dim, 7, 1, 3, + norm=norm, activation=activ, pad_type=pad_type)] + self.model += [ConvBlock(dim, 2 * dim, 4, 2, 1, + norm=norm, activation=activ, pad_type=pad_type)] + dim *= 2 + self.model += [ConvBlock(dim, 2 * dim, 4, 2, 1, + norm=norm, activation='none', pad_type=pad_type)] + dim *= 2 + for i in range(n_downsample - 3): + self.model_last += [ConvBlock(dim, dim, 4, 2, 1, + norm=norm, activation=activ, pad_type=pad_type)] + self.model_last += [ConvBlock(dim, dim, 4, 2, 1, + norm=norm, activation='none', pad_type=pad_type)] + self.model = nn.Sequential(*self.model) + self.model_last = nn.Sequential(*self.model_last) + self.output_dim = dim + + def forward(self, x): + fea = self.model(x) + return fea, self.model_last(fea) + + +# Define the basic block +class ConvBlock(nn.Module): + def __init__(self, input_dim, output_dim, kernel_size, stride, + padding=0, norm='none', activation='relu', pad_type='zero'): + super(ConvBlock, self).__init__() + self.use_bias = True + # initialize padding + if pad_type == 'reflect': + self.pad = nn.ReflectionPad2d(padding) + elif pad_type == 'replicate': + self.pad = nn.ReplicationPad2d(padding) + elif pad_type == 'zero': + self.pad = nn.ZeroPad2d(padding) + else: + assert 0, "Unsupported padding type: {}".format(pad_type) + + # initialize normalization + norm_dim = output_dim + if norm == 'bn': + self.norm = nn.BatchNorm2d(norm_dim) + elif norm == 'in': + # self.norm = nn.InstanceNorm2d(norm_dim, track_running_stats=True) + self.norm = nn.InstanceNorm2d(norm_dim) + elif norm == 'ln': + self.norm = LayerNorm(norm_dim) + elif norm == 'adain': + self.norm = AdaptiveInstanceNorm2d(norm_dim) + elif norm == 'none' or norm == 'sn': + self.norm = None + else: + assert 0, "Unsupported normalization: {}".format(norm) + + # initialize activation + if activation == 'relu': + self.activation = nn.ReLU(inplace=True) + elif activation == 'lrelu': + self.activation = nn.LeakyReLU(0.2, inplace=True) + elif activation == 'prelu': + self.activation = nn.PReLU() + elif activation == 'selu': + self.activation = nn.SELU(inplace=True) + elif activation == 'tanh': + self.activation = nn.Tanh() + elif activation == 'none': + self.activation = None + else: + assert 0, "Unsupported activation: {}".format(activation) + + # initialize convolution + if norm == 'sn': + self.conv = SpectralNorm( + nn.Conv2d(input_dim, output_dim, kernel_size, stride, bias=self.use_bias)) + else: + self.conv = nn.Conv2d(input_dim, output_dim, + kernel_size, stride, bias=self.use_bias) + + def forward(self, x): + x = self.conv(self.pad(x)) + if self.norm: + x = self.norm(x) + if self.activation: + x = self.activation(x) + return x + + +class LinearBlock(nn.Module): + def __init__(self, input_dim, output_dim, norm='none', activation='relu'): + super(LinearBlock, self).__init__() + use_bias = True + # initialize fully connected layer + if norm == 'sn': + self.fc = SpectralNorm( + nn.Linear(input_dim, output_dim, bias=use_bias)) + else: + self.fc = nn.Linear(input_dim, output_dim, bias=use_bias) + + # initialize normalization + norm_dim = output_dim + if norm == 'bn': + self.norm = nn.BatchNorm1d(norm_dim) + elif norm == 'in': + self.norm = nn.InstanceNorm1d(norm_dim) + elif norm == 'ln': + self.norm = LayerNorm(norm_dim) + elif norm == 'none' or norm == 'sn': + self.norm = None + else: + assert 0, "Unsupported normalization: {}".format(norm) + + # initialize activation + if activation == 'relu': + self.activation = nn.ReLU(inplace=True) + elif activation == 'lrelu': + self.activation = nn.LeakyReLU(0.2, inplace=True) + elif activation == 'prelu': + self.activation = nn.PReLU() + elif activation == 'selu': + self.activation = nn.SELU(inplace=True) + elif activation == 'tanh': + self.activation = nn.Tanh() + elif activation == 'none': + self.activation = None + else: + assert 0, "Unsupported activation: {}".format(activation) + + def forward(self, x): + out = self.fc(x) + if self.norm: + out = self.norm(out) + if self.activation: + out = self.activation(out) + return out + + +# Define a resnet block +class ResnetBlock(nn.Module): + def __init__(self, dim, norm_type, padding_type, use_dropout=False): + super(ResnetBlock, self).__init__() + self.conv_block = self.build_conv_block( + dim, norm_type, padding_type, use_dropout) + + def build_conv_block(self, dim, norm_type, padding_type, use_dropout): + conv_block = [] + conv_block += [ConvBlock(dim, dim, 3, 1, 1, norm=norm_type, + activation='relu', pad_type=padding_type)] + conv_block += [ConvBlock(dim, dim, 3, 1, 1, norm=norm_type, + activation='none', pad_type=padding_type)] + + return nn.Sequential(*conv_block) + + def forward(self, x): + out = x + self.conv_block(x) + return out + + +class SFTLayer(nn.Module): + def __init__(self): + super(SFTLayer, self).__init__() + self.SFT_scale_conv1 = nn.Conv2d(64, 64, 1) + self.SFT_scale_conv2 = nn.Conv2d(64, 64, 1) + self.SFT_shift_conv1 = nn.Conv2d(64, 64, 1) + self.SFT_shift_conv2 = nn.Conv2d(64, 64, 1) + + def forward(self, x): + scale = self.SFT_scale_conv2(F.leaky_relu( + self.SFT_scale_conv1(x[1]), 0.1, inplace=True)) + shift = self.SFT_shift_conv2(F.leaky_relu( + self.SFT_shift_conv1(x[1]), 0.1, inplace=True)) + return x[0] * scale + shift + + +class ConvBlock_SFT(nn.Module): + def __init__(self, dim, norm_type, padding_type, use_dropout=False): + super(ResnetBlock_SFT, self).__init__() + self.sft1 = SFTLayer() + self.conv1 = ConvBlock( + dim, dim, 4, 2, 1, norm=norm_type, activation='none', pad_type=padding_type) + + def forward(self, x): + fea = self.sft1((x[0], x[1])) + fea = F.relu(self.conv1(fea), inplace=True) + return (x[0] + fea, x[1]) + + +class ConvBlock_SFT_last(nn.Module): + def __init__(self, dim, norm_type, padding_type, use_dropout=False): + super(ResnetBlock_SFT_last, self).__init__() + self.sft1 = SFTLayer() + self.conv1 = ConvBlock( + dim, dim, 4, 2, 1, norm=norm_type, activation='none', pad_type=padding_type) + + def forward(self, x): + fea = self.sft1((x[0], x[1])) + fea = F.relu(self.conv1(fea), inplace=True) + return x[0] + fea + + +# Definition of normalization layer +class AdaptiveInstanceNorm2d(nn.Module): + def __init__(self, num_features, eps=1e-5, momentum=0.1): + super(AdaptiveInstanceNorm2d, self).__init__() + self.num_features = num_features + self.eps = eps + self.momentum = momentum + # weight and bias are dynamically assigned + self.weight = None + self.bias = None + # just dummy buffers, not used + self.register_buffer('running_mean', torch.zeros(num_features)) + self.register_buffer('running_var', torch.ones(num_features)) + + def forward(self, x): + assert self.weight is not None and self.bias is not None, "Please assign weight and bias before calling AdaIN!" + b, c = x.size(0), x.size(1) + running_mean = self.running_mean.repeat(b) + running_var = self.running_var.repeat(b) + + # Apply instance norm + x_reshaped = x.contiguous().view(1, b * c, *x.size()[2:]) + + out = F.batch_norm( + x_reshaped, running_mean, running_var, self.weight, self.bias, + True, self.momentum, self.eps) + + return out.view(b, c, *x.size()[2:]) + + def __repr__(self): + return self.__class__.__name__ + '(' + str(self.num_features) + ')' + + +class LayerNorm(nn.Module): + def __init__(self, num_features, eps=1e-5, affine=True): + super(LayerNorm, self).__init__() + self.num_features = num_features + self.affine = affine + self.eps = eps + + if self.affine: + self.gamma = nn.Parameter(torch.Tensor(num_features).uniform_()) + self.beta = nn.Parameter(torch.zeros(num_features)) + + def forward(self, x): + shape = [-1] + [1] * (x.dim() - 1) + # print(x.size()) + if x.size(0) == 1: + # These two lines run much faster in pytorch 0.4 than the two lines listed below. + mean = x.view(-1).mean().view(*shape) + std = x.view(-1).std().view(*shape) + else: + mean = x.view(x.size(0), -1).mean(1).view(*shape) + std = x.view(x.size(0), -1).std(1).view(*shape) + + x = (x - mean) / (std + self.eps) + + if self.affine: + shape = [1, -1] + [1] * (x.dim() - 2) + x = x * self.gamma.view(*shape) + self.beta.view(*shape) + return x + + +def l2normalize(v, eps=1e-12): + return v / (v.norm() + eps) + + +class SpectralNorm(nn.Module): + """ + Based on the paper "Spectral Normalization for Generative Adversarial Networks" by Takeru Miyato, Toshiki Kataoka, Masanori Koyama, Yuichi Yoshida + and the Pytorch implementation https://github.com/christiancosgrove/pytorch-spectral-normalization-gan + """ + + def __init__(self, module, name='weight', power_iterations=1): + super(SpectralNorm, self).__init__() + self.module = module + self.name = name + self.power_iterations = power_iterations + if not self._made_params(): + self._make_params() + + def _update_u_v(self): + u = getattr(self.module, self.name + "_u") + v = getattr(self.module, self.name + "_v") + w = getattr(self.module, self.name + "_bar") + + height = w.data.shape[0] + for _ in range(self.power_iterations): + v.data = l2normalize( + torch.mv(torch.t(w.view(height, -1).data), u.data)) + u.data = l2normalize(torch.mv(w.view(height, -1).data, v.data)) + + # sigma = torch.dot(u.data, torch.mv(w.view(height,-1).data, v.data)) + sigma = u.dot(w.view(height, -1).mv(v)) + setattr(self.module, self.name, w / sigma.expand_as(w)) + + def _made_params(self): + try: + u = getattr(self.module, self.name + "_u") + v = getattr(self.module, self.name + "_v") + w = getattr(self.module, self.name + "_bar") + return True + except AttributeError: + return False + + def _make_params(self): + w = getattr(self.module, self.name) + + height = w.data.shape[0] + width = w.view(height, -1).data.shape[1] + + u = nn.Parameter(w.data.new(height).normal_(0, 1), requires_grad=False) + v = nn.Parameter(w.data.new(width).normal_(0, 1), requires_grad=False) + u.data = l2normalize(u.data) + v.data = l2normalize(v.data) + w_bar = nn.Parameter(w.data) + + del self.module._parameters[self.name] + + self.module.register_parameter(self.name + "_u", u) + self.module.register_parameter(self.name + "_v", v) + self.module.register_parameter(self.name + "_bar", w_bar) + + def forward(self, *args): + self._update_u_v() + return self.module.forward(*args) + + +# STN TPS + +class CNN(nn.Module): + def __init__(self, num_output, input_nc=5, ngf=8, n_layers=5, norm_layer=nn.InstanceNorm2d, use_dropout=False): + super(CNN, self).__init__() + downconv = nn.Conv2d(5, ngf, kernel_size=4, stride=2, padding=1) + model = [downconv, nn.ReLU(True), norm_layer(ngf)] + for i in range(n_layers): + in_ngf = 2 ** i * ngf if 2 ** i * ngf < 1024 else 1024 + out_ngf = 2 ** (i + 1) * ngf if 2 ** i * ngf < 1024 else 1024 + downconv = nn.Conv2d( + in_ngf, out_ngf, kernel_size=4, stride=2, padding=1) + model += [downconv, norm_layer(out_ngf), nn.ReLU(True)] + model += [nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), + norm_layer(64), nn.ReLU(True)] + model += [nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1), + norm_layer(64), nn.ReLU(True)] + self.maxpool = nn.MaxPool2d(kernel_size=2, stride=2) + self.model = nn.Sequential(*model) + self.fc1 = nn.Linear(512, 128) + self.fc2 = nn.Linear(128, num_output) + + def forward(self, x): + x = self.model(x) + x = self.maxpool(x) + x = x.view(x.shape[0], -1) + x = F.relu(self.fc1(x)) + x = F.dropout(x, training=self.training) + x = self.fc2(x) + + return x + + +class ClsNet(nn.Module): + + def __init__(self): + super(ClsNet, self).__init__() + self.cnn = CNN(10) + + def forward(self, x): + return F.log_softmax(self.cnn(x)) + + +class BoundedGridLocNet(nn.Module): + + def __init__(self, grid_height, grid_width, target_control_points): + super(BoundedGridLocNet, self).__init__() + self.cnn = CNN(grid_height * grid_width * 2) + + bias = torch.from_numpy(np.arctanh(target_control_points.numpy())) + bias = bias.view(-1) + self.cnn.fc2.bias.data.copy_(bias) + self.cnn.fc2.weight.data.zero_() + + def forward(self, x): + batch_size = x.size(0) + points = F.tanh(self.cnn(x)) + # ipdb.set_trace() + coor = points.view(batch_size, -1, 2) + row = self.get_row(coor, 5) + col = self.get_col(coor, 5) + rx, ry, cx, cy = torch.tensor(0.08).cuda(), torch.tensor( + 0.08).cuda(), torch.tensor(0.08).cuda(), torch.tensor(0.08).cuda() + row_x, row_y = row[:, :, 0], row[:, :, 1] + col_x, col_y = col[:, :, 0], col[:, :, 1] + rx_loss = torch.max(rx, row_x).mean() + ry_loss = torch.max(ry, row_y).mean() + cx_loss = torch.max(cx, col_x).mean() + cy_loss = torch.max(cy, col_y).mean() + + return coor, rx_loss, ry_loss, cx_loss, cy_loss + + def get_row(self, coor, num): + sec_dic = [] + for j in range(num): + sum = 0 + buffer = 0 + flag = False + max = -1 + for i in range(num-1): + differ = (coor[:, j*num+i+1, :]-coor[:, j*num+i, :])**2 + if not flag: + second_dif = 0 + flag = True + else: + second_dif = torch.abs(differ-buffer) + sec_dic.append(second_dif) + + buffer = differ + sum += second_dif + return torch.stack(sec_dic, dim=1) + + def get_col(self, coor, num): + sec_dic = [] + for i in range(num): + sum = 0 + buffer = 0 + flag = False + max = -1 + for j in range(num - 1): + differ = (coor[:, (j+1) * num + i, :] - + coor[:, j * num + i, :]) ** 2 + if not flag: + second_dif = 0 + flag = True + else: + second_dif = torch.abs(differ-buffer) + sec_dic.append(second_dif) + buffer = differ + sum += second_dif + return torch.stack(sec_dic, dim=1) + + +class UnBoundedGridLocNet(nn.Module): + + def __init__(self, grid_height, grid_width, target_control_points): + super(UnBoundedGridLocNet, self).__init__() + self.cnn = CNN(grid_height * grid_width * 2) + + bias = target_control_points.view(-1) + self.cnn.fc2.bias.data.copy_(bias) + self.cnn.fc2.weight.data.zero_() + + def forward(self, x): + batch_size = x.size(0) + points = self.cnn(x) + return points.view(batch_size, -1, 2) + + +class STNNet(nn.Module): + + def __init__(self): + super(STNNet, self).__init__() + range = 0.9 + r1 = range + r2 = range + grid_size_h = 5 + grid_size_w = 5 + + assert r1 < 1 and r2 < 1 # if >= 1, arctanh will cause error in BoundedGridLocNet + target_control_points = torch.Tensor(list(itertools.product( + np.arange(-r1, r1 + 0.00001, 2.0 * r1 / (grid_size_h - 1)), + np.arange(-r2, r2 + 0.00001, 2.0 * r2 / (grid_size_w - 1)), + ))) + # ipdb.set_trace() + Y, X = target_control_points.split(1, dim=1) + target_control_points = torch.cat([X, Y], dim=1) + # self.get_row(target_control_points,5) + GridLocNet = { + 'unbounded_stn': UnBoundedGridLocNet, + 'bounded_stn': BoundedGridLocNet, + }['bounded_stn'] + self.loc_net = GridLocNet( + grid_size_h, grid_size_w, target_control_points) + + self.tps = TPSGridGen(256, 192, target_control_points) + + def get_row(self, coor, num): + for j in range(num): + sum = 0 + buffer = 0 + flag = False + max = -1 + for i in range(num - 1): + differ = (coor[j * num + i + 1, :] - coor[j * num + i, :]) ** 2 + if not flag: + second_dif = 0 + flag = True + else: + second_dif = torch.abs(differ - buffer) + + buffer = differ + sum += second_dif + print(sum / num) + + def get_col(self, coor, num): + for i in range(num): + sum = 0 + buffer = 0 + flag = False + max = -1 + for j in range(num - 1): + differ = (coor[(j + 1) * num + i, :] - + coor[j * num + i, :]) ** 2 + if not flag: + second_dif = 0 + flag = True + else: + second_dif = torch.abs(differ-buffer) + + buffer = differ + sum += second_dif + print(sum) + + def forward(self, x, reference, mask): + batch_size = x.size(0) + source_control_points, rx, ry, cx, cy = self.loc_net(reference) + source_control_points = (source_control_points) + # print('control points',source_control_points.shape) + source_coordinate = self.tps(source_control_points) + grid = source_coordinate.view(batch_size, 256, 192, 2) + # print('grid size',grid.shape) + transformed_x = grid_sample(x, grid, canvas=0) + warped_mask = grid_sample(mask, grid, canvas=0) + return transformed_x, warped_mask, rx, ry, cx, cy diff --git a/models/pix2pixHD_model.py b/models/pix2pixHD_model.py new file mode 100644 index 0000000000000000000000000000000000000000..3ffddf455618f3b5e6b66c508a854c3fdaa78157 --- /dev/null +++ b/models/pix2pixHD_model.py @@ -0,0 +1,493 @@ +import numpy as np +import torch +import os +from torch.autograd import Variable +from util.image_pool import ImagePool +import torch.nn as nn + +import cv2 +from .base_model import BaseModel +from . import networks +import torch.nn.functional as F + +NC = 20 + + +def generate_discrete_label(inputs, label_nc, onehot=True, encode=True): + pred_batch = [] + size = inputs.size() + for input in inputs: + input = input.view(1, label_nc, size[2], size[3]) + pred = np.squeeze(input.data.max(1)[1].cpu().numpy(), axis=0) + pred_batch.append(pred) + + pred_batch = np.array(pred_batch) + pred_batch = torch.from_numpy(pred_batch) + label_map = [] + for p in pred_batch: + p = p.view(1, 256, 192) + label_map.append(p) + label_map = torch.stack(label_map, 0) + if not onehot: + return label_map.float().cuda() + size = label_map.size() + oneHot_size = (size[0], label_nc, size[2], size[3]) + input_label = torch.cuda.FloatTensor(torch.Size(oneHot_size)).zero_() + input_label = input_label.scatter_(1, label_map.data.long().cuda(), 1.0) + + return input_label + + +def morpho(mask, iter, bigger=True): + kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3)) + new = [] + for i in range(len(mask)): + tem = mask[i].cpu().detach().numpy().squeeze().reshape(256, 192, 1)*255 + tem = tem.astype(np.uint8) + if bigger: + tem = cv2.dilate(tem, kernel, iterations=iter) + else: + tem = cv2.erode(tem, kernel, iterations=iter) + tem = tem.astype(np.float64) + tem = tem.reshape(1, 256, 192) + new.append(tem.astype(np.float64)/255.0) + new = np.stack(new) + new = torch.FloatTensor(new).cuda() + return new + + +def morpho_smaller(mask, iter, bigger=True): + kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (1, 1)) + new = [] + for i in range(len(mask)): + tem = mask[i].cpu().detach().numpy().squeeze().reshape(256, 192, 1)*255 + tem = tem.astype(np.uint8) + if bigger: + tem = cv2.dilate(tem, kernel, iterations=iter) + else: + tem = cv2.erode(tem, kernel, iterations=iter) + tem = tem.astype(np.float64) + tem = tem.reshape(1, 256, 192) + new.append(tem.astype(np.float64)/255.0) + new = np.stack(new) + new = torch.FloatTensor(new).cuda() + return new + + +def encode(label_map, size): + label_nc = 14 + oneHot_size = (size[0], label_nc, size[2], size[3]) + input_label = torch.cuda.FloatTensor(torch.Size(oneHot_size)).zero_() + input_label = input_label.scatter_(1, label_map.data.long().cuda(), 1.0) + return input_label + + +class Pix2PixHDModel(BaseModel): + def name(self): + return 'Pix2PixHDModel' + + def init_loss_filter(self, use_gan_feat_loss, use_vgg_loss): + flags = (True, use_gan_feat_loss, use_vgg_loss, True, True) + + def loss_filter(g_gan, g_gan_feat, g_vgg, d_real, d_fake): + return [l for (l, f) in zip((g_gan, g_gan_feat, g_vgg, d_real, d_fake), flags) if f] + + return loss_filter + + def get_G(self, in_C, out_c, n_blocks, opt, L=1, S=1): + return networks.define_G(in_C, out_c, opt.ngf, opt.netG, L, S, + opt.n_downsample_global, n_blocks, opt.n_local_enhancers, + opt.n_blocks_local, opt.norm, gpu_ids=self.gpu_ids) + + def get_D(self, inc, opt): + netD = networks.define_D(inc, opt.ndf, opt.n_layers_D, opt.norm, opt.no_lsgan, + opt.num_D, not opt.no_ganFeat_loss, gpu_ids=self.gpu_ids) + return netD + + def cross_entropy2d(self, input, target, weight=None, size_average=True): + n, c, h, w = input.size() + nt, ht, wt = target.size() + + # Handle inconsistent size between input and target + if h != ht or w != wt: + input = F.interpolate(input, size=( + ht, wt), mode="bilinear", align_corners=True) + + input = input.transpose(1, 2).transpose(2, 3).contiguous().view(-1, c) + target = target.view(-1) + loss = F.cross_entropy( + input, target, weight=weight, size_average=size_average, ignore_index=250 + ) + + return loss + + def ger_average_color(self, mask, arms): + color = torch.zeros(arms.shape).cuda() + for i in range(arms.shape[0]): + count = len(torch.nonzero(mask[i, :, :, :])) + if count < 10: + color[i, 0, :, :] = 0 + color[i, 1, :, :] = 0 + color[i, 2, :, :] = 0 + + else: + color[i, 0, :, :] = arms[i, 0, :, :].sum() / count + color[i, 1, :, :] = arms[i, 1, :, :].sum() / count + color[i, 2, :, :] = arms[i, 2, :, :].sum() / count + return color + + def initialize(self, opt): + BaseModel.initialize(self, opt) + if opt.resize_or_crop != 'none' or not opt.isTrain: # when training at full res this causes OOM + torch.backends.cudnn.benchmark = True + self.isTrain = opt.isTrain + input_nc = opt.label_nc if opt.label_nc != 0 else opt.input_nc + self.count = 0 + # define networks + # Generator network + netG_input_nc = input_nc + # Main Generator + with torch.no_grad(): + self.Unet = networks.define_UnetMask(4, self.gpu_ids).eval() + self.G1 = networks.define_Refine_ResUnet(37, 14, self.gpu_ids).eval() + self.G2 = networks.define_Refine(19+18, 1, self.gpu_ids).eval() + self.G = networks.define_Refine(24, 3, self.gpu_ids).eval() + + self.tanh = nn.Tanh() + self.sigmoid = nn.Sigmoid() + self.BCE = torch.nn.BCEWithLogitsLoss() + + # Discriminator network + if self.isTrain: + use_sigmoid = opt.no_lsgan + netD_input_nc = input_nc + opt.output_nc + netB_input_nc = opt.output_nc * 2 + # self.D1 = self.get_D(17, opt) + # self.D2 = self.get_D(4, opt) + # self.D3=self.get_D(7+3,opt) + # self.D = self.get_D(20, opt) + # self.netB = networks.define_B(netB_input_nc, opt.output_nc, 32, 3, 3, opt.norm, gpu_ids=self.gpu_ids) + + if self.opt.verbose: + print('---------- Networks initialized -------------') + + # load networks + if not self.isTrain or opt.continue_train or opt.load_pretrain: + pretrained_path = '' if not self.isTrain else opt.load_pretrain + self.load_network(self.Unet, 'U', opt.which_epoch, pretrained_path) + self.load_network(self.G1, 'G1', opt.which_epoch, pretrained_path) + self.load_network(self.G2, 'G2', opt.which_epoch, pretrained_path) + self.load_network(self.G, 'G', opt.which_epoch, pretrained_path) + # set loss functions and optimizers + if self.isTrain: + if opt.pool_size > 0 and (len(self.gpu_ids)) > 1: + raise NotImplementedError( + "Fake Pool Not Implemented for MultiGPU") + self.fake_pool = ImagePool(opt.pool_size) + self.old_lr = opt.lr + + # define loss functions + self.loss_filter = self.init_loss_filter( + not opt.no_ganFeat_loss, not opt.no_vgg_loss) + + self.criterionGAN = networks.GANLoss( + use_lsgan=not opt.no_lsgan, tensor=self.Tensor) + self.criterionFeat = torch.nn.L1Loss() + if not opt.no_vgg_loss: + self.criterionVGG = networks.VGGLoss(self.gpu_ids) + self.criterionStyle = networks.StyleLoss(self.gpu_ids) + # Names so we can breakout loss + self.loss_names = self.loss_filter( + 'G_GAN', 'G_GAN_Feat', 'G_VGG', 'D_real', 'D_fake') + # initialize optimizers + # optimizer G + if opt.niter_fix_global > 0: + import sys + if sys.version_info >= (3, 0): + finetune_list = set() + else: + from sets import Set + finetune_list = Set() + + params_dict = dict(self.netG.named_parameters()) + params = [] + for key, value in params_dict.items(): + if key.startswith('model' + str(opt.n_local_enhancers)): + params += [value] + finetune_list.add(key.split('.')[0]) + print( + '------------- Only training the local enhancer ork (for %d epochs) ------------' % opt.niter_fix_global) + print('The layers that are finetuned are ', + sorted(finetune_list)) + + def encode_input(self, label_map, clothes_mask, all_clothes_label): + + size = label_map.size() + oneHot_size = (size[0], 14, size[2], size[3]) + input_label = torch.cuda.FloatTensor(torch.Size(oneHot_size)).zero_() + input_label = input_label.scatter_( + 1, label_map.data.long().cuda(), 1.0) + + masked_label = torch.cuda.FloatTensor(torch.Size(oneHot_size)).zero_() + masked_label = masked_label.scatter_( + 1, (label_map * (1 - clothes_mask)).data.long().cuda(), 1.0) + + c_label = torch.cuda.FloatTensor(torch.Size(oneHot_size)).zero_() + c_label = c_label.scatter_( + 1, all_clothes_label.data.long().cuda(), 1.0) + + input_label = Variable(input_label) + + return input_label, masked_label, c_label + + def encode_input_test(self, label_map, label_map_ref, real_image_ref, infer=False): + + if self.opt.label_nc == 0: + input_label = label_map.data.cuda() + input_label_ref = label_map_ref.data.cuda() + else: + # create one-hot vector for label map + size = label_map.size() + oneHot_size = (size[0], self.opt.label_nc, size[2], size[3]) + input_label = torch.cuda.FloatTensor( + torch.Size(oneHot_size)).zero_() + input_label = input_label.scatter_( + 1, label_map.data.long().cuda(), 1.0) + input_label_ref = torch.cuda.FloatTensor( + torch.Size(oneHot_size)).zero_() + input_label_ref = input_label_ref.scatter_( + 1, label_map_ref.data.long().cuda(), 1.0) + if self.opt.data_type == 16: + input_label = input_label.half() + input_label_ref = input_label_ref.half() + + input_label = Variable(input_label, volatile=infer) + input_label_ref = Variable(input_label_ref, volatile=infer) + real_image_ref = Variable(real_image_ref.data.cuda()) + + return input_label, input_label_ref, real_image_ref + + def discriminate(self, netD, input_label, test_image, use_pool=False): + input_concat = torch.cat((input_label, test_image.detach()), dim=1) + if use_pool: + fake_query = self.fake_pool.query(input_concat) + return netD.forward(fake_query) + else: + return netD.forward(input_concat) + + def gen_noise(self, shape): + noise = np.zeros(shape, dtype=np.uint8) + # noise + noise = cv2.randn(noise, 0, 255) + noise = np.asarray(noise / 255, dtype=np.uint8) + noise = torch.tensor(noise, dtype=torch.float32) + return noise.cuda() + + def multi_scale_blend(self, fake_img, fake_c, mask, number=4): + alpha = [0, 0.1, 0.3, 0.6, 0.9] + smaller = mask + out = 0 + for i in range(1, number+1): + bigger = smaller + smaller = morpho(smaller, 2, False) + mid = bigger-smaller + out += mid*(alpha[i]*fake_c+(1-alpha[i])*fake_img) + out += smaller*fake_c + out += (1-mask)*fake_img + return out + + def forward(self, label, pre_clothes_mask, img_fore, clothes_mask, clothes, all_clothes_label, real_image, pose, grid, mask_fore): + # Encode Inputs + input_label, masked_label, all_clothes_label = self.encode_input( + label, clothes_mask, all_clothes_label) + arm1_mask = torch.FloatTensor( + (label.cpu().numpy() == 11).astype(np.float)).cuda() + arm2_mask = torch.FloatTensor( + (label.cpu().numpy() == 13).astype(np.float)).cuda() + pre_clothes_mask = torch.FloatTensor( + (pre_clothes_mask.detach().cpu().numpy() > 0.5).astype(np.float)).cuda() + clothes = clothes * pre_clothes_mask + + shape = pre_clothes_mask.shape + + G1_in = torch.cat([pre_clothes_mask, clothes, + all_clothes_label, pose, self.gen_noise(shape)], dim=1) + arm_label = self.G1.refine(G1_in) + + arm_label = self.sigmoid(arm_label) + CE_loss = self.cross_entropy2d( + arm_label, (label * (1 - clothes_mask)).transpose(0, 1)[0].long()) * 10 + + armlabel_map = generate_discrete_label(arm_label.detach(), 14, False) + dis_label = generate_discrete_label(arm_label.detach(), 14) + G2_in = torch.cat([pre_clothes_mask, clothes, + dis_label, pose, self.gen_noise(shape)], 1) + fake_cl = self.G2.refine(G2_in) + fake_cl = self.sigmoid(fake_cl) + CE_loss += self.BCE(fake_cl, clothes_mask) * 10 + + fake_cl_dis = torch.FloatTensor( + (fake_cl.detach().cpu().numpy() > 0.5).astype(np.float)).cuda() + fake_cl_dis = morpho(fake_cl_dis, 1, True) + + new_arm1_mask = torch.FloatTensor( + (armlabel_map.cpu().numpy() == 11).astype(np.float)).cuda() + new_arm2_mask = torch.FloatTensor( + (armlabel_map.cpu().numpy() == 13).astype(np.float)).cuda() + fake_cl_dis = fake_cl_dis*(1 - new_arm1_mask)*(1-new_arm2_mask) + fake_cl_dis *= mask_fore + + arm1_occ = clothes_mask * new_arm1_mask + arm2_occ = clothes_mask * new_arm2_mask + bigger_arm1_occ = morpho(arm1_occ, 10) + bigger_arm2_occ = morpho(arm2_occ, 10) + arm1_full = arm1_occ + (1 - clothes_mask) * arm1_mask + arm2_full = arm2_occ + (1 - clothes_mask) * arm2_mask + armlabel_map *= (1 - new_arm1_mask) + armlabel_map *= (1 - new_arm2_mask) + armlabel_map = armlabel_map * (1 - arm1_full) + arm1_full * 11 + armlabel_map = armlabel_map * (1 - arm2_full) + arm2_full * 13 + armlabel_map *= (1-fake_cl_dis) + dis_label = encode(armlabel_map, armlabel_map.shape) + + fake_c, warped, warped_mask, warped_grid = self.Unet( + clothes, fake_cl_dis, pre_clothes_mask, grid) + mask = fake_c[:, 3, :, :] + mask = self.sigmoid(mask)*fake_cl_dis + fake_c = self.tanh(fake_c[:, 0:3, :, :]) + fake_c = fake_c*(1-mask)+mask*warped + skin_color = self.ger_average_color((arm1_mask + arm2_mask - arm2_mask * arm1_mask), + (arm1_mask + arm2_mask - arm2_mask * arm1_mask) * real_image) + occlude = (1 - bigger_arm1_occ * (arm2_mask + arm1_mask+clothes_mask)) * \ + (1 - bigger_arm2_occ * (arm2_mask + arm1_mask+clothes_mask)) + img_hole_hand = img_fore * \ + (1 - clothes_mask) * occlude * (1 - fake_cl_dis) + + G_in = torch.cat([img_hole_hand, dis_label, fake_c, + skin_color, self.gen_noise(shape)], 1) + fake_image = self.G.refine(G_in.detach()) + fake_image = self.tanh(fake_image) + + loss_D_fake = 0 + loss_D_real = 0 + loss_G_GAN = 0 + loss_G_VGG = 0 + + L1_loss = 0 + + style_loss = L1_loss + + return [self.loss_filter(loss_G_GAN, 0, loss_G_VGG, loss_D_real, loss_D_fake), fake_image, + clothes, arm_label, L1_loss, style_loss, fake_cl, CE_loss, real_image, warped_grid] + + def inference(self, label, pre_clothes_mask, img_fore, clothes_mask, clothes, all_clothes_label, real_image, pose, grid, mask_fore): + # Encode Inputs + input_label, masked_label, all_clothes_label = self.encode_input( + label, clothes_mask, all_clothes_label) + arm1_mask = torch.FloatTensor( + (label.cpu().numpy() == 11).astype(np.float)).cuda() + arm2_mask = torch.FloatTensor( + (label.cpu().numpy() == 13).astype(np.float)).cuda() + pre_clothes_mask = torch.FloatTensor( + (pre_clothes_mask.detach().cpu().numpy() > 0.5).astype(np.float)).cuda() + clothes = clothes * pre_clothes_mask + + shape = pre_clothes_mask.shape + + G1_in = torch.cat([pre_clothes_mask, clothes, + all_clothes_label, pose, self.gen_noise(shape)], dim=1) + arm_label = self.G1.refine(G1_in) + + arm_label = self.sigmoid(arm_label) + + armlabel_map = generate_discrete_label(arm_label.detach(), 14, False) + dis_label = generate_discrete_label(arm_label.detach(), 14) + G2_in = torch.cat([pre_clothes_mask, clothes, + dis_label, pose, self.gen_noise(shape)], 1) + fake_cl = self.G2.refine(G2_in) + fake_cl = self.sigmoid(fake_cl) + + fake_cl_dis = torch.FloatTensor( + (fake_cl.detach().cpu().numpy() > 0.5).astype(np.float)).cuda() + fake_cl_dis = morpho(fake_cl_dis, 1, True) + + new_arm1_mask = torch.FloatTensor( + (armlabel_map.cpu().numpy() == 11).astype(np.float)).cuda() + new_arm2_mask = torch.FloatTensor( + (armlabel_map.cpu().numpy() == 13).astype(np.float)).cuda() + fake_cl_dis = fake_cl_dis*(1 - new_arm1_mask)*(1-new_arm2_mask) + fake_cl_dis *= mask_fore + + arm1_occ = clothes_mask * new_arm1_mask + arm2_occ = clothes_mask * new_arm2_mask + bigger_arm1_occ = morpho(arm1_occ, 10) + bigger_arm2_occ = morpho(arm2_occ, 10) + arm1_full = arm1_occ + (1 - clothes_mask) * arm1_mask + arm2_full = arm2_occ + (1 - clothes_mask) * arm2_mask + armlabel_map *= (1 - new_arm1_mask) + armlabel_map *= (1 - new_arm2_mask) + armlabel_map = armlabel_map * (1 - arm1_full) + arm1_full * 11 + armlabel_map = armlabel_map * (1 - arm2_full) + arm2_full * 13 + armlabel_map *= (1-fake_cl_dis) + dis_label = encode(armlabel_map, armlabel_map.shape) + + fake_c, warped, warped_mask, warped_grid = self.Unet( + clothes, fake_cl_dis, pre_clothes_mask, grid) + mask = fake_c[:, 3, :, :] + mask = self.sigmoid(mask)*fake_cl_dis + fake_c = self.tanh(fake_c[:, 0:3, :, :]) + fake_c = fake_c*(1-mask)+mask*warped + skin_color = self.ger_average_color((arm1_mask + arm2_mask - arm2_mask * arm1_mask), + (arm1_mask + arm2_mask - arm2_mask * arm1_mask) * real_image) + occlude = (1 - bigger_arm1_occ * (arm2_mask + arm1_mask+clothes_mask)) * \ + (1 - bigger_arm2_occ * (arm2_mask + arm1_mask+clothes_mask)) + img_hole_hand = img_fore * \ + (1 - clothes_mask) * occlude * (1 - fake_cl_dis) + + G_in = torch.cat([img_hole_hand, dis_label, fake_c, + skin_color, self.gen_noise(shape)], 1) + fake_image = self.G.refine(G_in.detach()) + fake_image = self.tanh(fake_image) + + return [fake_image, warped, fake_c] + + def save(self, which_epoch): + # self.save_network(self.Unet, 'U', which_epoch, self.gpu_ids) + # self.save_network(self.G, 'G', which_epoch, self.gpu_ids) + # self.save_network(self.G1, 'G1', which_epoch, self.gpu_ids) + # self.save_network(self.G2, 'G2', which_epoch, self.gpu_ids) + # # self.save_network(self.G3, 'G3', which_epoch, self.gpu_ids) + # self.save_network(self.D, 'D', which_epoch, self.gpu_ids) + # self.save_network(self.D1, 'D1', which_epoch, self.gpu_ids) + # self.save_network(self.D2, 'D2', which_epoch, self.gpu_ids) + # self.save_network(self.D3, 'D3', which_epoch, self.gpu_ids) + + pass + + # self.save_network(self.netB, 'B', which_epoch, self.gpu_ids) + + def update_fixed_params(self): + # after fixing the global generator for a number of iterations, also start finetuning it + params = list(self.netG.parameters()) + if self.gen_features: + params += list(self.netE.parameters()) + self.optimizer_G = torch.optim.Adam( + params, lr=self.opt.lr, betas=(self.opt.beta1, 0.999)) + if self.opt.verbose: + print('------------ Now also finetuning global generator -----------') + + def update_learning_rate(self): + lrd = self.opt.lr / self.opt.niter_decay + lr = self.old_lr - lrd + for param_group in self.optimizer_D.param_groups: + param_group['lr'] = lr + for param_group in self.optimizer_G.param_groups: + param_group['lr'] = lr + if self.opt.verbose: + print('update learning rate: %f -> %f' % (self.old_lr, lr)) + self.old_lr = lr + + +class InferenceModel(Pix2PixHDModel): + def forward(self, label, pre_clothes_mask, img_fore, clothes_mask, clothes, all_clothes_label, real_image, pose, grid, mask_fore): + return self.inference(label, pre_clothes_mask, img_fore, clothes_mask, clothes, all_clothes_label, real_image, pose, grid, mask_fore) diff --git a/models/test.py b/models/test.py new file mode 100644 index 0000000000000000000000000000000000000000..73ac80667478c93e02c3e5483899b63d5b53f873 --- /dev/null +++ b/models/test.py @@ -0,0 +1,7 @@ +import numpy +num = 5 +for i in range(num): + row = [] + print() + for i in range(num): + print(j * num + i) diff --git a/options/base_options.py b/options/base_options.py new file mode 100644 index 0000000000000000000000000000000000000000..a932ef7da2b6da110d6c989252979c4400925bc7 --- /dev/null +++ b/options/base_options.py @@ -0,0 +1,125 @@ +import argparse +import os +from util import util +import torch + + +class BaseOptions(): + def __init__(self): + self.parser = argparse.ArgumentParser() + self.initialized = False + + def initialize(self): + # experiment specifics + self.parser.add_argument('--name', type=str, default='label2city', + help='name of the experiment. It decides where to store samples and models') + self.parser.add_argument('--gpu_ids', type=str, default='0', + help='gpu ids: e.g. 0 0,1,2, 0,2. use -1 for CPU') + self.parser.add_argument('--checkpoints_dir', type=str, + default='./checkpoints', help='models are saved here') + self.parser.add_argument( + '--model', type=str, default='pix2pixHD', help='which model to use') + self.parser.add_argument('--norm', type=str, default='instance', + help='instance normalization or batch normalization') + self.parser.add_argument( + '--use_dropout', action='store_true', help='use dropout for the generator') + self.parser.add_argument('--data_type', default=32, type=int, choices=[ + 8, 16, 32], help="Supported data type i.e. 8, 16, 32 bit") + self.parser.add_argument( + '--verbose', action='store_true', default=False, help='toggles verbose') + + # input/output sizes + self.parser.add_argument( + '--batchSize', type=int, default=1, help='input batch size') + self.parser.add_argument( + '--loadSize', type=int, default=512, help='scale images to this size') + self.parser.add_argument( + '--fineSize', type=int, default=512, help='then crop to this size') + self.parser.add_argument( + '--label_nc', type=int, default=20, help='# of input label channels') + self.parser.add_argument( + '--input_nc', type=int, default=3, help='# of input image channels') + self.parser.add_argument( + '--output_nc', type=int, default=3, help='# of output image channels') + + # for setting inputs + self.parser.add_argument( + '--dataroot', type=str, default='Data_preprocessing/') + self.parser.add_argument('--datapairs', type=str, default='test_pairs.txt', + help='train_pairs.txt/test_pairs.txt/test_pairs_same.txt etc.') + + self.parser.add_argument('--resize_or_crop', type=str, default='scale_width', + help='scaling and cropping of images at load time [resize_and_crop|crop|scale_width|scale_width_and_crop]') + ''' + self.parser.add_argument('--serial_batches', action='store_true', + help='if true, takes images in order to make batches, otherwise takes them randomly') + ''' + self.parser.add_argument('--no_flip', action='store_true', + help='if specified, do not flip the images for data argumentation') + self.parser.add_argument( + '--nThreads', default=1, type=int, help='# threads for loading data') + self.parser.add_argument('--max_dataset_size', type=int, default=float( + "inf"), help='Maximum number of samples allowed per dataset. If the dataset directory contains more than max_dataset_size, only a subset is loaded.') + + # for displays + self.parser.add_argument( + '--display_winsize', type=int, default=512, help='display window size') + self.parser.add_argument('--tf_log', action='store_true', + help='if specified, use tensorboard logging. Requires tensorflow installed') + + # for generator + self.parser.add_argument( + '--netG', type=str, default='global', help='selects model to use for netG') + self.parser.add_argument( + '--ngf', type=int, default=64, help='# of gen filters in first conv layer') + self.parser.add_argument('--n_downsample_global', type=int, + default=4, help='number of downsampling layers in netG') + self.parser.add_argument('--n_blocks_global', type=int, default=4, + help='number of residual blocks in the global generator network') + self.parser.add_argument('--n_blocks_local', type=int, default=3, + help='number of residual blocks in the local enhancer network') + self.parser.add_argument( + '--n_local_enhancers', type=int, default=1, help='number of local enhancers to use') + self.parser.add_argument('--niter_fix_global', type=int, default=0, + help='number of epochs that we only train the outmost local enhancer') + + self.parser.add_argument('--continue_train', action='store_true', + help='continue training: load the latest model') + + self.initialized = True + + def parse(self, save=True): + if not self.initialized: + self.initialize() + self.opt = self.parser.parse_args() + self.opt.isTrain = self.isTrain # train or test + + str_ids = self.opt.gpu_ids.split(',') + self.opt.gpu_ids = [] + for str_id in str_ids: + id = int(str_id) + if id >= 0: + self.opt.gpu_ids.append(id) + + # set gpu ids + if len(self.opt.gpu_ids) > 0: + torch.cuda.set_device(self.opt.gpu_ids[0]) + + args = vars(self.opt) + + print('------------ Options -------------') + for k, v in sorted(args.items()): + print('%s: %s' % (str(k), str(v))) + print('-------------- End ----------------') + + # save to the disk + expr_dir = os.path.join(self.opt.checkpoints_dir, self.opt.name) + util.mkdirs(expr_dir) + if save and not self.opt.continue_train: + file_name = os.path.join(expr_dir, 'opt.txt') + with open(file_name, 'wt') as opt_file: + opt_file.write('------------ Options -------------\n') + for k, v in sorted(args.items()): + opt_file.write('%s: %s\n' % (str(k), str(v))) + opt_file.write('-------------- End ----------------\n') + return self.opt diff --git a/options/test_options.py b/options/test_options.py new file mode 100644 index 0000000000000000000000000000000000000000..88a50551a110f275ccd472cbc60e283e756ef638 --- /dev/null +++ b/options/test_options.py @@ -0,0 +1,31 @@ +from .base_options import BaseOptions + + +class TestOptions(BaseOptions): + def initialize(self): + BaseOptions.initialize(self) + self.parser.add_argument( + '--ntest', type=int, default=float("inf"), help='# of test examples.') + self.parser.add_argument( + '--results_dir', type=str, default='./results/', help='saves results here.') + self.parser.add_argument( + '--aspect_ratio', type=float, default=1.0, help='aspect ratio of result images') + self.parser.add_argument( + '--phase', type=str, default='test', help='train, val, test, etc') + self.parser.add_argument('--which_epoch', type=str, default='latest', + help='which epoch to load? set to latest to use latest cached model') + self.parser.add_argument( + '--how_many', type=int, default=1000, help='how many test images to run') + self.parser.add_argument('--serial_batches', action='store_false', + help='if true, takes images in order to make batches, otherwise takes them randomly') + self.parser.add_argument('--cluster_path', type=str, default='features_clustered_010.npy', + help='the path for clustered results of encoded features') + self.parser.add_argument('--use_encoded_image', action='store_true', + help='if specified, encode the real image to get the feature map') + self.parser.add_argument( + "--export_onnx", type=str, help="export ONNX model to a given file") + self.parser.add_argument("--engine", type=str, + help="run serialized TRT engine") + self.parser.add_argument( + "--onnx", type=str, help="run ONNX model via TRT") + self.isTrain = False diff --git a/options/train_options.py b/options/train_options.py new file mode 100644 index 0000000000000000000000000000000000000000..da707e674a3a8c1f0e2e0c7d20099ece09d17b37 --- /dev/null +++ b/options/train_options.py @@ -0,0 +1,57 @@ +from .base_options import BaseOptions + + +class TrainOptions(BaseOptions): + def initialize(self): + BaseOptions.initialize(self) + # for displays + self.parser.add_argument('--display_freq', type=int, default=100, + help='frequency of showing training results on screen') + self.parser.add_argument('--print_freq', type=int, default=100, + help='frequency of showing training results on console') + self.parser.add_argument('--save_latest_freq', type=int, + default=1000, help='frequency of saving the latest results') + self.parser.add_argument('--save_epoch_freq', type=int, default=10, + help='frequency of saving checkpoints at the end of epochs') + self.parser.add_argument('--no_html', action='store_true', + help='do not save intermediate training results to [opt.checkpoints_dir]/[opt.name]/web/') + self.parser.add_argument('--debug', action='store_true', + help='only do one epoch and displays at each iteration') + + # for training + self.parser.add_argument('--load_pretrain', type=str, default='./checkpoints/label2city', + help='load the pretrained model from the specified location') + self.parser.add_argument('--which_epoch', type=str, default='latest', + help='which epoch to load? set to latest to use latest cached model') + self.parser.add_argument( + '--phase', type=str, default='test', help='train, val, test, etc') + self.parser.add_argument('--serial_batches', action='store_true', + help='if true, takes images in order to make batches, otherwise takes them randomly') + self.parser.add_argument( + '--niter', type=int, default=100, help='# of iter at starting learning rate') + self.parser.add_argument('--niter_decay', type=int, default=100, + help='# of iter to linearly decay learning rate to zero') + self.parser.add_argument( + '--beta1', type=float, default=0.5, help='momentum term of adam') + self.parser.add_argument( + '--lr', type=float, default=0.0002, help='initial learning rate for adam') + + # for discriminators + self.parser.add_argument( + '--num_D', type=int, default=2, help='number of discriminators to use') + self.parser.add_argument( + '--n_layers_D', type=int, default=3, help='only used if which_model_netD==n_layers') + self.parser.add_argument( + '--ndf', type=int, default=64, help='# of discrim filters in first conv layer') + self.parser.add_argument( + '--lambda_feat', type=float, default=10.0, help='weight for feature matching loss') + self.parser.add_argument('--no_ganFeat_loss', action='store_true', + help='if specified, do *not* use discriminator feature matching loss') + self.parser.add_argument('--no_vgg_loss', action='store_true', + help='if specified, do *not* use VGG feature matching loss') + self.parser.add_argument('--no_lsgan', action='store_true', + help='do *not* use least square GAN, if false, use vanilla GAN') + self.parser.add_argument('--pool_size', type=int, default=0, + help='the size of image buffer that stores previously generated images') + + self.isTrain = True diff --git a/predict_pose.py b/predict_pose.py new file mode 100644 index 0000000000000000000000000000000000000000..ba9cd450d5c437673468c138af1b9e4dc339b4a1 --- /dev/null +++ b/predict_pose.py @@ -0,0 +1,105 @@ +import cv2 +import numpy as np +import os +import json + +class general_pose_model(object): + def __init__(self, modelpath): + # Specify the model to be used + # Body25: 25 points + # COCO: 18 points + # MPI: 15 points + self.inWidth = 368 + self.inHeight = 368 + self.threshold = 0.05 + self.pose_net = self.general_coco_model(modelpath) + + def general_coco_model(self, modelpath): + self.points_name = { + "Nose": 0, "Neck": 1, + "RShoulder": 2, "RElbow": 3, "RWrist": 4, + "LShoulder": 5, "LElbow": 6, "LWrist": 7, + "RHip": 8, "RKnee": 9, "RAnkle": 10, + "LHip": 11, "LKnee": 12, "LAnkle": 13, + "REye": 14, "LEye": 15, + "REar": 16, "LEar": 17, + "Background": 18} + self.num_points = 18 + self.point_pairs = [[1, 0], [1, 2], [1, 5], + [2, 3], [3, 4], [5, 6], + [6, 7], [1, 8], [8, 9], + [9, 10], [1, 11], [11, 12], + [12, 13], [0, 14], [0, 15], + [14, 16], [15, 17]] + prototxt = os.path.join( + modelpath, + 'pose_deploy_linevec.prototxt') + caffemodel = os.path.join( + modelpath, + 'pose_iter_440000.caffemodel') + coco_model = cv2.dnn.readNetFromCaffe(prototxt, caffemodel) + + return coco_model + + def predict(self, imgfile): + img_cv2 = cv2.imread(imgfile) + img_height, img_width, _ = img_cv2.shape + inpBlob = cv2.dnn.blobFromImage(img_cv2, + 1.0 / 255, + (self.inWidth, self.inHeight), + (0, 0, 0), + swapRB=False, + crop=False) + self.pose_net.setInput(inpBlob) + self.pose_net.setPreferableBackend(cv2.dnn.DNN_BACKEND_OPENCV) + self.pose_net.setPreferableTarget(cv2.dnn.DNN_TARGET_OPENCL) + + output = self.pose_net.forward() + + H = output.shape[2] + W = output.shape[3] + + points = [] + for idx in range(self.num_points): + probMap = output[0, idx, :, :] # confidence map. + + # Find global maxima of the probMap. + minVal, prob, minLoc, point = cv2.minMaxLoc(probMap) + + # Scale the point to fit on the original image + x = (img_width * point[0]) / W + y = (img_height * point[1]) / H + + if prob > self.threshold: + points.append(x) + points.append(y) + points.append(prob) + else: + points.append(0) + points.append(0) + points.append(0) + + return points + +def generate_pose_keypoints(img_file, pose_file): + + modelpath = 'pose' + pose_model = general_pose_model(modelpath) + + res_points = pose_model.predict(img_file) + + pose_data = {"version": 1, + "people": [ + {"pose_keypoints": res_points} + ] + } + + pose_keypoints_path = pose_file + + json_object = json.dumps(pose_data, indent = 4) + + # Writing to sample.json + with open(pose_keypoints_path, "w") as outfile: + outfile.write(json_object) + print('File saved at {}'.format(pose_keypoints_path)) + diff --git a/rembg/__init__.py b/rembg/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..afdc5405843f1d962cef9f0165280385c1b45ab3 --- /dev/null +++ b/rembg/__init__.py @@ -0,0 +1,11 @@ +import sys +import warnings + +if not (sys.version_info.major == 3 and sys.version_info.minor == 9): + warnings.warn("This library is only for Python 3.9", RuntimeWarning) + +from . import _version + +__version__ = _version.get_versions()["version"] + +from .bg import remove diff --git a/rembg/_version.py b/rembg/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..429a70b15299225c4f7768187b76c107628d0501 --- /dev/null +++ b/rembg/_version.py @@ -0,0 +1,677 @@ +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.21 (https://github.com/python-versioneer/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys +from typing import Callable, Dict + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = " (HEAD -> main)" + git_full = "a11480edac808cafa8fa86673ef87a284756b519" + git_date = "2022-08-06 03:59:55 -0300" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440" + cfg.tag_prefix = "v" + cfg.parentdir_prefix = "rembg-" + cfg.versionfile_source = "rembg/_version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} + + +def register_vcs_handler(vcs, method): # decorator + """Create decorator to mark a method as the handler of a VCS.""" + + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + process = None + for command in commands: + try: + dispcmd = str([command] + args) + # remember shell=False, so use git.cmd on windows, not just git + process = subprocess.Popen( + [command] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + ) + break + except OSError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, process.returncode + return stdout, process.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for _ in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = {r.strip() for r in refnames.strip("()").split(",")} + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = {r for r in refs if re.search(r"\d", r)} + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix) :] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r"\d", r): + continue + if verbose: + print("picking %s" % r) + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + TAG_PREFIX_REGEX = "*" + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + TAG_PREFIX_REGEX = r"\*" + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = runner( + GITS, + [ + "describe", + "--tags", + "--dirty", + "--always", + "--long", + "--match", + "%s%s" % (tag_prefix, TAG_PREFIX_REGEX), + ], + cwd=root, + ) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[: git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + if not mo: + # unparsable. Maybe git-describe is misbehaving? + pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix) :] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). + + Exceptions: + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + if pieces["distance"]: + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] + else: + # exception #1 + rendered = "0.post0.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for _ in cfg.versionfile_source.split("/"): + root = os.path.dirname(root) + except NameError: + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None, + } + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } diff --git a/rembg/bg.py b/rembg/bg.py new file mode 100644 index 0000000000000000000000000000000000000000..57ac6b591b1af37b7ffb3959033409bc5a6e9ef3 --- /dev/null +++ b/rembg/bg.py @@ -0,0 +1,176 @@ +import io +from enum import Enum +from typing import List, Optional, Union + +import numpy as np +from cv2 import ( + BORDER_DEFAULT, + MORPH_ELLIPSE, + MORPH_OPEN, + GaussianBlur, + getStructuringElement, + morphologyEx, +) +from PIL import Image +from PIL.Image import Image as PILImage +from pymatting.alpha.estimate_alpha_cf import estimate_alpha_cf +from pymatting.foreground.estimate_foreground_ml import estimate_foreground_ml +from pymatting.util.util import stack_images +from scipy.ndimage.morphology import binary_erosion + +from .session_base import BaseSession +from .session_factory import new_session + +kernel = getStructuringElement(MORPH_ELLIPSE, (3, 3)) + + +class ReturnType(Enum): + BYTES = 0 + PILLOW = 1 + NDARRAY = 2 + + +def alpha_matting_cutout( + img: PILImage, + mask: PILImage, + foreground_threshold: int, + background_threshold: int, + erode_structure_size: int, +) -> PILImage: + + if img.mode == "RGBA" or img.mode == "CMYK": + img = img.convert("RGB") + + img = np.asarray(img) + mask = np.asarray(mask) + + is_foreground = mask > foreground_threshold + is_background = mask < background_threshold + + structure = None + if erode_structure_size > 0: + structure = np.ones( + (erode_structure_size, erode_structure_size), dtype=np.uint8 + ) + + is_foreground = binary_erosion(is_foreground, structure=structure) + is_background = binary_erosion(is_background, structure=structure, border_value=1) + + trimap = np.full(mask.shape, dtype=np.uint8, fill_value=128) + trimap[is_foreground] = 255 + trimap[is_background] = 0 + + img_normalized = img / 255.0 + trimap_normalized = trimap / 255.0 + + alpha = estimate_alpha_cf(img_normalized, trimap_normalized) + foreground = estimate_foreground_ml(img_normalized, alpha) + cutout = stack_images(foreground, alpha) + + cutout = np.clip(cutout * 255, 0, 255).astype(np.uint8) + cutout = Image.fromarray(cutout) + + return cutout + + +def naive_cutout(img: PILImage, mask: PILImage) -> PILImage: + empty = Image.new("RGBA", (img.size), 0) + cutout = Image.composite(img, empty, mask) + return cutout + + +def get_concat_v_multi(imgs: List[PILImage]) -> PILImage: + pivot = imgs.pop(0) + for im in imgs: + pivot = get_concat_v(pivot, im) + return pivot + + +def get_concat_v(img1: PILImage, img2: PILImage) -> PILImage: + dst = Image.new("RGBA", (img1.width, img1.height + img2.height)) + dst.paste(img1, (0, 0)) + dst.paste(img2, (0, img1.height)) + return dst + + +def post_process(mask: np.ndarray) -> np.ndarray: + """ + Post Process the mask for a smooth boundary by applying Morphological Operations + Research based on paper: https://www.sciencedirect.com/science/article/pii/S2352914821000757 + args: + mask: Binary Numpy Mask + """ + mask = morphologyEx(mask, MORPH_OPEN, kernel) + mask = GaussianBlur(mask, (5, 5), sigmaX=2, sigmaY=2, borderType=BORDER_DEFAULT) + mask = np.where(mask < 127, 0, 255).astype(np.uint8) # convert again to binary + return mask + + +def remove( + data: Union[bytes, PILImage, np.ndarray], + alpha_matting: bool = False, + alpha_matting_foreground_threshold: int = 240, + alpha_matting_background_threshold: int = 10, + alpha_matting_erode_size: int = 10, + session: Optional[BaseSession] = None, + only_mask: bool = False, + post_process_mask: bool = False, +) -> Union[bytes, PILImage, np.ndarray]: + + if isinstance(data, PILImage): + return_type = ReturnType.PILLOW + img = data + elif isinstance(data, bytes): + return_type = ReturnType.BYTES + img = Image.open(io.BytesIO(data)) + elif isinstance(data, np.ndarray): + return_type = ReturnType.NDARRAY + img = Image.fromarray(data) + else: + raise ValueError("Input type {} is not supported.".format(type(data))) + + if session is None: + session = new_session("u2net") + + masks = session.predict(img) + cutouts = [] + + for mask in masks: + if post_process_mask: + mask = Image.fromarray(post_process(np.array(mask))) + + if only_mask: + cutout = mask + + elif alpha_matting: + try: + cutout = alpha_matting_cutout( + img, + mask, + alpha_matting_foreground_threshold, + alpha_matting_background_threshold, + alpha_matting_erode_size, + ) + except ValueError: + cutout = naive_cutout(img, mask) + + else: + cutout = naive_cutout(img, mask) + + cutouts.append(cutout) + + cutout = img + if len(cutouts) > 0: + cutout = get_concat_v_multi(cutouts) + + if ReturnType.PILLOW == return_type: + return cutout + + if ReturnType.NDARRAY == return_type: + return np.asarray(cutout) + + bio = io.BytesIO() + cutout.save(bio, "PNG") + bio.seek(0) + + return bio.read() diff --git a/rembg/cli.py b/rembg/cli.py new file mode 100644 index 0000000000000000000000000000000000000000..6210ef7c10c73ba38a9fa997df4efeb07b08987a --- /dev/null +++ b/rembg/cli.py @@ -0,0 +1,419 @@ +import pathlib +import sys +import time +from enum import Enum +from typing import IO, cast + +import aiohttp +import click +import filetype +import uvicorn +from asyncer import asyncify +from fastapi import Depends, FastAPI, File, Form, Query +from fastapi.middleware.cors import CORSMiddleware +from starlette.responses import Response +from tqdm import tqdm +from watchdog.events import FileSystemEvent, FileSystemEventHandler +from watchdog.observers import Observer + +from . import _version +from .bg import remove +from .session_base import BaseSession +from .session_factory import new_session + + +@click.group() +@click.version_option(version=_version.get_versions()["version"]) +def main() -> None: + pass + + +@main.command(help="for a file as input") +@click.option( + "-m", + "--model", + default="u2net", + type=click.Choice(["u2net", "u2netp", "u2net_human_seg", "u2net_cloth_seg"]), + show_default=True, + show_choices=True, + help="model name", +) +@click.option( + "-a", + "--alpha-matting", + is_flag=True, + show_default=True, + help="use alpha matting", +) +@click.option( + "-af", + "--alpha-matting-foreground-threshold", + default=240, + type=int, + show_default=True, + help="trimap fg threshold", +) +@click.option( + "-ab", + "--alpha-matting-background-threshold", + default=10, + type=int, + show_default=True, + help="trimap bg threshold", +) +@click.option( + "-ae", + "--alpha-matting-erode-size", + default=10, + type=int, + show_default=True, + help="erode size", +) +@click.option( + "-om", + "--only-mask", + is_flag=True, + show_default=True, + help="output only the mask", +) +@click.option( + "-ppm", + "--post-process-mask", + is_flag=True, + show_default=True, + help="post process the mask", +) +@click.argument( + "input", default=(None if sys.stdin.isatty() else "-"), type=click.File("rb") +) +@click.argument( + "output", + default=(None if sys.stdin.isatty() else "-"), + type=click.File("wb", lazy=True), +) +def i(model: str, input: IO, output: IO, **kwargs) -> None: + output.write(remove(input.read(), session=new_session(model), **kwargs)) + + +@main.command(help="for a folder as input") +@click.option( + "-m", + "--model", + default="u2net", + type=click.Choice(["u2net", "u2netp", "u2net_human_seg", "u2net_cloth_seg"]), + show_default=True, + show_choices=True, + help="model name", +) +@click.option( + "-a", + "--alpha-matting", + is_flag=True, + show_default=True, + help="use alpha matting", +) +@click.option( + "-af", + "--alpha-matting-foreground-threshold", + default=240, + type=int, + show_default=True, + help="trimap fg threshold", +) +@click.option( + "-ab", + "--alpha-matting-background-threshold", + default=10, + type=int, + show_default=True, + help="trimap bg threshold", +) +@click.option( + "-ae", + "--alpha-matting-erode-size", + default=10, + type=int, + show_default=True, + help="erode size", +) +@click.option( + "-om", + "--only-mask", + is_flag=True, + show_default=True, + help="output only the mask", +) +@click.option( + "-ppm", + "--post-process-mask", + is_flag=True, + show_default=True, + help="post process the mask", +) +@click.option( + "-w", + "--watch", + default=False, + is_flag=True, + show_default=True, + help="watches a folder for changes", +) +@click.argument( + "input", + type=click.Path( + exists=True, + path_type=pathlib.Path, + file_okay=False, + dir_okay=True, + readable=True, + ), +) +@click.argument( + "output", + type=click.Path( + exists=False, + path_type=pathlib.Path, + file_okay=False, + dir_okay=True, + writable=True, + ), +) +def p( + model: str, input: pathlib.Path, output: pathlib.Path, watch: bool, **kwargs +) -> None: + session = new_session(model) + + def process(each_input: pathlib.Path) -> None: + try: + mimetype = filetype.guess(each_input) + if mimetype is None: + return + if mimetype.mime.find("image") < 0: + return + + each_output = (output / each_input.name).with_suffix(".png") + each_output.parents[0].mkdir(parents=True, exist_ok=True) + + if not each_output.exists(): + each_output.write_bytes( + cast( + bytes, + remove(each_input.read_bytes(), session=session, **kwargs), + ) + ) + + if watch: + print( + f"processed: {each_input.absolute()} -> {each_output.absolute()}" + ) + except Exception as e: + print(e) + + inputs = list(input.glob("**/*")) + if not watch: + inputs = tqdm(inputs) + + for each_input in inputs: + if not each_input.is_dir(): + process(each_input) + + if watch: + observer = Observer() + + class EventHandler(FileSystemEventHandler): + def on_any_event(self, event: FileSystemEvent) -> None: + if not ( + event.is_directory or event.event_type in ["deleted", "closed"] + ): + process(pathlib.Path(event.src_path)) + + event_handler = EventHandler() + observer.schedule(event_handler, input, recursive=False) + observer.start() + + try: + while True: + time.sleep(1) + + finally: + observer.stop() + observer.join() + + +@main.command(help="for a http server") +@click.option( + "-p", + "--port", + default=5000, + type=int, + show_default=True, + help="port", +) +@click.option( + "-l", + "--log_level", + default="info", + type=str, + show_default=True, + help="log level", +) +def s(port: int, log_level: str) -> None: + sessions: dict[str, BaseSession] = {} + tags_metadata = [ + { + "name": "Background Removal", + "description": "Endpoints that perform background removal with different image sources.", + "externalDocs": { + "description": "GitHub Source", + "url": "https://github.com/danielgatis/rembg", + }, + }, + ] + app = FastAPI( + title="Rembg", + description="Rembg is a tool to remove images background. That is it.", + version=_version.get_versions()["version"], + contact={ + "name": "Daniel Gatis", + "url": "https://github.com/danielgatis", + "email": "danielgatis@gmail.com", + }, + license_info={ + "name": "MIT License", + "url": "https://github.com/danielgatis/rembg/blob/main/LICENSE.txt", + }, + openapi_tags=tags_metadata, + ) + + app.add_middleware( + CORSMiddleware, + allow_credentials=True, + allow_origins=["*"], + allow_methods=["*"], + allow_headers=["*"], + ) + + class ModelType(str, Enum): + u2net = "u2net" + u2netp = "u2netp" + u2net_human_seg = "u2net_human_seg" + u2net_cloth_seg = "u2net_cloth_seg" + + class CommonQueryParams: + def __init__( + self, + model: ModelType = Query( + default=ModelType.u2net, + description="Model to use when processing image", + ), + a: bool = Query(default=False, description="Enable Alpha Matting"), + af: int = Query( + default=240, + ge=0, + le=255, + description="Alpha Matting (Foreground Threshold)", + ), + ab: int = Query( + default=10, + ge=0, + le=255, + description="Alpha Matting (Background Threshold)", + ), + ae: int = Query( + default=10, ge=0, description="Alpha Matting (Erode Structure Size)" + ), + om: bool = Query(default=False, description="Only Mask"), + ppm: bool = Query(default=False, description="Post Process Mask"), + ): + self.model = model + self.a = a + self.af = af + self.ab = ab + self.ae = ae + self.om = om + self.ppm = ppm + + class CommonQueryPostParams: + def __init__( + self, + model: ModelType = Form( + default=ModelType.u2net, + description="Model to use when processing image", + ), + a: bool = Form(default=False, description="Enable Alpha Matting"), + af: int = Form( + default=240, + ge=0, + le=255, + description="Alpha Matting (Foreground Threshold)", + ), + ab: int = Form( + default=10, + ge=0, + le=255, + description="Alpha Matting (Background Threshold)", + ), + ae: int = Form( + default=10, ge=0, description="Alpha Matting (Erode Structure Size)" + ), + om: bool = Form(default=False, description="Only Mask"), + ppm: bool = Form(default=False, description="Post Process Mask"), + ): + self.model = model + self.a = a + self.af = af + self.ab = ab + self.ae = ae + self.om = om + self.ppm = ppm + + def im_without_bg(content: bytes, commons: CommonQueryParams) -> Response: + return Response( + remove( + content, + session=sessions.setdefault( + commons.model.value, new_session(commons.model.value) + ), + alpha_matting=commons.a, + alpha_matting_foreground_threshold=commons.af, + alpha_matting_background_threshold=commons.ab, + alpha_matting_erode_size=commons.ae, + only_mask=commons.om, + post_process_mask=commons.ppm, + ), + media_type="image/png", + ) + + @app.get( + path="/", + tags=["Background Removal"], + summary="Remove from URL", + description="Removes the background from an image obtained by retrieving an URL.", + ) + async def get_index( + url: str = Query( + default=..., description="URL of the image that has to be processed." + ), + commons: CommonQueryParams = Depends(), + ): + async with aiohttp.ClientSession() as session: + async with session.get(url) as response: + file = await response.read() + return await asyncify(im_without_bg)(file, commons) + + @app.post( + path="/", + tags=["Background Removal"], + summary="Remove from Stream", + description="Removes the background from an image sent within the request itself.", + ) + async def post_index( + file: bytes = File( + default=..., + description="Image file (byte stream) that has to be processed.", + ), + commons: CommonQueryPostParams = Depends(), + ): + return await asyncify(im_without_bg)(file, commons) + + uvicorn.run(app, host="0.0.0.0", port=port, log_level=log_level) diff --git a/rembg/session_base.py b/rembg/session_base.py new file mode 100644 index 0000000000000000000000000000000000000000..aa98693bc299f673fe6220f18b4b6d20c2c87d3a --- /dev/null +++ b/rembg/session_base.py @@ -0,0 +1,40 @@ +from typing import Dict, List, Tuple + +import numpy as np +import onnxruntime as ort +from PIL import Image +from PIL.Image import Image as PILImage + + +class BaseSession: + def __init__(self, model_name: str, inner_session: ort.InferenceSession): + self.model_name = model_name + self.inner_session = inner_session + + def normalize( + self, + img: PILImage, + mean: Tuple[float, float, float], + std: Tuple[float, float, float], + size: Tuple[int, int], + ) -> Dict[str, np.ndarray]: + im = img.convert("RGB").resize(size, Image.LANCZOS) + + im_ary = np.array(im) + im_ary = im_ary / np.max(im_ary) + + tmpImg = np.zeros((im_ary.shape[0], im_ary.shape[1], 3)) + tmpImg[:, :, 0] = (im_ary[:, :, 0] - mean[0]) / std[0] + tmpImg[:, :, 1] = (im_ary[:, :, 1] - mean[1]) / std[1] + tmpImg[:, :, 2] = (im_ary[:, :, 2] - mean[2]) / std[2] + + tmpImg = tmpImg.transpose((2, 0, 1)) + + return { + self.inner_session.get_inputs()[0] + .name: np.expand_dims(tmpImg, 0) + .astype(np.float32) + } + + def predict(self, img: PILImage) -> List[PILImage]: + raise NotImplementedError diff --git a/rembg/session_cloth.py b/rembg/session_cloth.py new file mode 100644 index 0000000000000000000000000000000000000000..11bcef74378be4d64058772c29ac45240f60a85b --- /dev/null +++ b/rembg/session_cloth.py @@ -0,0 +1,88 @@ +from typing import List + +import numpy as np +from PIL import Image +from PIL.Image import Image as PILImage +from scipy.special import log_softmax + +from .session_base import BaseSession + +pallete1 = [ + 0, + 0, + 0, + 255, + 255, + 255, + 0, + 0, + 0, + 0, + 0, + 0, +] + +pallete2 = [ + 0, + 0, + 0, + 0, + 0, + 0, + 255, + 255, + 255, + 0, + 0, + 0, +] + +pallete3 = [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 255, + 255, + 255, +] + + +class ClothSession(BaseSession): + def predict(self, img: PILImage) -> List[PILImage]: + ort_outs = self.inner_session.run( + None, self.normalize(img, (0.5, 0.5, 0.5), (0.5, 0.5, 0.5), (768, 768)) + ) + + pred = ort_outs + pred = log_softmax(pred[0], 1) + pred = np.argmax(pred, axis=1, keepdims=True) + pred = np.squeeze(pred, 0) + pred = np.squeeze(pred, 0) + + mask = Image.fromarray(pred.astype("uint8"), mode="L") + mask = mask.resize(img.size, Image.LANCZOS) + + masks = [] + + mask1 = mask.copy() + mask1.putpalette(pallete1) + mask1 = mask1.convert("RGB").convert("L") + masks.append(mask1) + + mask2 = mask.copy() + mask2.putpalette(pallete2) + mask2 = mask2.convert("RGB").convert("L") + masks.append(mask2) + + mask3 = mask.copy() + mask3.putpalette(pallete3) + mask3 = mask3.convert("RGB").convert("L") + masks.append(mask3) + + return masks diff --git a/rembg/session_factory.py b/rembg/session_factory.py new file mode 100644 index 0000000000000000000000000000000000000000..0f96a0580e6877f449a58bb0e7b19e64c28c609b --- /dev/null +++ b/rembg/session_factory.py @@ -0,0 +1,63 @@ +import hashlib +import os +import sys +from contextlib import redirect_stdout +from pathlib import Path +from typing import Type + +import gdown +import onnxruntime as ort + +from .session_base import BaseSession +from .session_cloth import ClothSession +from .session_simple import SimpleSession + + +def new_session(model_name: str) -> BaseSession: + session_class: Type[BaseSession] + + if model_name == "u2netp": + md5 = "8e83ca70e441ab06c318d82300c84806" + url = "https://drive.google.com/uc?id=1tNuFmLv0TSNDjYIkjEdeH1IWKQdUA4HR" + session_class = SimpleSession + elif model_name == "u2net": + md5 = "60024c5c889badc19c04ad937298a77b" + url = "https://drive.google.com/uc?id=1tCU5MM1LhRgGou5OpmpjBQbSrYIUoYab" + session_class = SimpleSession + elif model_name == "u2net_human_seg": + md5 = "c09ddc2e0104f800e3e1bb4652583d1f" + url = "https://drive.google.com/uc?id=1ZfqwVxu-1XWC1xU1GHIP-FM_Knd_AX5j" + session_class = SimpleSession + elif model_name == "u2net_cloth_seg": + md5 = "2434d1f3cb744e0e49386c906e5a08bb" + url = "https://drive.google.com/uc?id=15rKbQSXQzrKCQurUjZFg8HqzZad8bcyz" + session_class = ClothSession + else: + assert AssertionError( + "Choose between u2net, u2netp, u2net_human_seg or u2net_cloth_seg" + ) + + home = os.getenv("U2NET_HOME", os.path.join("~", ".u2net")) + path = Path(home).expanduser() / f"{model_name}.onnx" + path.parents[0].mkdir(parents=True, exist_ok=True) + + if not path.exists(): + with redirect_stdout(sys.stderr): + gdown.download(url, str(path), use_cookies=False) + else: + hashing = hashlib.new("md5", path.read_bytes(), usedforsecurity=False) + if hashing.hexdigest() != md5: + with redirect_stdout(sys.stderr): + gdown.download(url, str(path), use_cookies=False) + + sess_opts = ort.SessionOptions() + + if "OMP_NUM_THREADS" in os.environ: + sess_opts.inter_op_num_threads = int(os.environ["OMP_NUM_THREADS"]) + + return session_class( + model_name, + ort.InferenceSession( + str(path), providers=ort.get_available_providers(), sess_options=sess_opts + ), + ) diff --git a/rembg/session_simple.py b/rembg/session_simple.py new file mode 100644 index 0000000000000000000000000000000000000000..7ec31813f2e14e80856803d2335671c9f50ca84f --- /dev/null +++ b/rembg/session_simple.py @@ -0,0 +1,30 @@ +from typing import List + +import numpy as np +from PIL import Image +from PIL.Image import Image as PILImage + +from .session_base import BaseSession + + +class SimpleSession(BaseSession): + def predict(self, img: PILImage) -> List[PILImage]: + ort_outs = self.inner_session.run( + None, + self.normalize( + img, (0.485, 0.456, 0.406), (0.229, 0.224, 0.225), (320, 320) + ), + ) + + pred = ort_outs[0][:, 0, :, :] + + ma = np.max(pred) + mi = np.min(pred) + + pred = (pred - mi) / (ma - mi) + pred = np.squeeze(pred) + + mask = Image.fromarray((pred * 255).astype("uint8"), mode="L") + mask = mask.resize(img.size, Image.LANCZOS) + + return [mask] diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..edc6c83fd2f106773ce39c0e45557b4cf2fe4718 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,21 @@ +torch +torchvision +aiohttp==3.8.1 +asyncer==0.0.1 +click==8.0.3 +fastapi==0.72.0 +filetype==1.0.9 +gdown==4.4.0 +numpy==1.22.3 +onnxruntime==1.10.0 +pillow==9.0.1 +pymatting==1.1.5 +python-multipart==0.0.5 +scikit-image==0.19.1 +scipy==1.8.0 +tqdm==4.62.3 +uvicorn==0.17.0 +watchdog==2.1.7 +ninja +opencv-python +tensorboardX \ No newline at end of file diff --git a/results/test/refined_cloth/person.png b/results/test/refined_cloth/person.png new file mode 100644 index 0000000000000000000000000000000000000000..f05740e0109464a73b0c93a9c2f91e0bc52800e2 Binary files /dev/null and b/results/test/refined_cloth/person.png differ diff --git a/results/test/try-on/person.png b/results/test/try-on/person.png new file mode 100644 index 0000000000000000000000000000000000000000..5cc50a56010bb9b8e6f4948f3b42254a59a0b953 Binary files /dev/null and b/results/test/try-on/person.png differ diff --git a/results/test/warped_cloth/person.png b/results/test/warped_cloth/person.png new file mode 100644 index 0000000000000000000000000000000000000000..2f09c31308ea60154773abeeb95092d43a538365 Binary files /dev/null and b/results/test/warped_cloth/person.png differ diff --git a/saved_models/face_detection_cv2/haarcascade_frontalface_default.xml b/saved_models/face_detection_cv2/haarcascade_frontalface_default.xml new file mode 100644 index 0000000000000000000000000000000000000000..cbd1aa89e927d8d54b49fe666bf17244c3c46a7b --- /dev/null +++ b/saved_models/face_detection_cv2/haarcascade_frontalface_default.xml @@ -0,0 +1,33314 @@ + + + +BOOST + HAAR + 24 + 24 + + 211 + + 0 + 25 + + <_> + 9 + -5.0425500869750977e+00 + + <_> + + 0 -1 0 -3.1511999666690826e-02 + + 2.0875380039215088e+00 -2.2172100543975830e+00 + <_> + + 0 -1 1 1.2396000325679779e-02 + + -1.8633940219879150e+00 1.3272049427032471e+00 + <_> + + 0 -1 2 2.1927999332547188e-02 + + -1.5105249881744385e+00 1.0625729560852051e+00 + <_> + + 0 -1 3 5.7529998011887074e-03 + + -8.7463897466659546e-01 1.1760339736938477e+00 + <_> + + 0 -1 4 1.5014000236988068e-02 + + -7.7945697307586670e-01 1.2608419656753540e+00 + <_> + + 0 -1 5 9.9371001124382019e-02 + + 5.5751299858093262e-01 -1.8743000030517578e+00 + <_> + + 0 -1 6 2.7340000960975885e-03 + + -1.6911929845809937e+00 4.4009700417518616e-01 + <_> + + 0 -1 7 -1.8859000876545906e-02 + + -1.4769539833068848e+00 4.4350099563598633e-01 + <_> + + 0 -1 8 5.9739998541772366e-03 + + -8.5909199714660645e-01 8.5255599021911621e-01 + <_> + 16 + -4.9842400550842285e+00 + + <_> + + 0 -1 9 -2.1110000088810921e-02 + + 1.2435649633407593e+00 -1.5713009834289551e+00 + <_> + + 0 -1 10 2.0355999469757080e-02 + + -1.6204780340194702e+00 1.1817760467529297e+00 + <_> + + 0 -1 11 2.1308999508619308e-02 + + -1.9415930509567261e+00 7.0069098472595215e-01 + <_> + + 0 -1 12 9.1660000383853912e-02 + + -5.5670100450515747e-01 1.7284419536590576e+00 + <_> + + 0 -1 13 3.6288000643253326e-02 + + 2.6763799786567688e-01 -2.1831810474395752e+00 + <_> + + 0 -1 14 -1.9109999760985374e-02 + + -2.6730210781097412e+00 4.5670801401138306e-01 + <_> + + 0 -1 15 8.2539999857544899e-03 + + -1.0852910280227661e+00 5.3564202785491943e-01 + <_> + + 0 -1 16 1.8355000764131546e-02 + + -3.5200199484825134e-01 9.3339198827743530e-01 + <_> + + 0 -1 17 -7.0569999516010284e-03 + + 9.2782098054885864e-01 -6.6349899768829346e-01 + <_> + + 0 -1 18 -9.8770000040531158e-03 + + 1.1577470302581787e+00 -2.9774799942970276e-01 + <_> + + 0 -1 19 1.5814000740647316e-02 + + -4.1960600018501282e-01 1.3576040267944336e+00 + <_> + + 0 -1 20 -2.0700000226497650e-02 + + 1.4590020179748535e+00 -1.9739399850368500e-01 + <_> + + 0 -1 21 -1.3760800659656525e-01 + + 1.1186759471893311e+00 -5.2915501594543457e-01 + <_> + + 0 -1 22 1.4318999834358692e-02 + + -3.5127198696136475e-01 1.1440860033035278e+00 + <_> + + 0 -1 23 1.0253000073134899e-02 + + -6.0850602388381958e-01 7.7098500728607178e-01 + <_> + + 0 -1 24 9.1508001089096069e-02 + + 3.8817799091339111e-01 -1.5122940540313721e+00 + <_> + 27 + -4.6551899909973145e+00 + + <_> + + 0 -1 25 6.9747000932693481e-02 + + -1.0130879878997803e+00 1.4687349796295166e+00 + <_> + + 0 -1 26 3.1502999365329742e-02 + + -1.6463639736175537e+00 1.0000629425048828e+00 + <_> + + 0 -1 27 1.4260999858379364e-02 + + 4.6480301022529602e-01 -1.5959889888763428e+00 + <_> + + 0 -1 28 1.4453000389039516e-02 + + -6.5511900186538696e-01 8.3021801710128784e-01 + <_> + + 0 -1 29 -3.0509999487549067e-03 + + -1.3982310295104980e+00 4.2550599575042725e-01 + <_> + + 0 -1 30 3.2722998410463333e-02 + + -5.0702601671218872e-01 1.0526109933853149e+00 + <_> + + 0 -1 31 -7.2960001416504383e-03 + + 3.6356899142265320e-01 -1.3464889526367188e+00 + <_> + + 0 -1 32 5.0425000488758087e-02 + + -3.0461400747299194e-01 1.4504129886627197e+00 + <_> + + 0 -1 33 4.6879000961780548e-02 + + -4.0286201238632202e-01 1.2145609855651855e+00 + <_> + + 0 -1 34 -6.9358997046947479e-02 + + 1.0539360046386719e+00 -4.5719701051712036e-01 + <_> + + 0 -1 35 -4.9033999443054199e-02 + + -1.6253089904785156e+00 1.5378999710083008e-01 + <_> + + 0 -1 36 8.4827996790409088e-02 + + 2.8402999043464661e-01 -1.5662059783935547e+00 + <_> + + 0 -1 37 -1.7229999648407102e-03 + + -1.0147459506988525e+00 2.3294800519943237e-01 + <_> + + 0 -1 38 1.1562199890613556e-01 + + -1.6732899844646454e-01 1.2804069519042969e+00 + <_> + + 0 -1 39 -5.1279999315738678e-02 + + 1.5162390470504761e+00 -3.0271100997924805e-01 + <_> + + 0 -1 40 -4.2706999927759171e-02 + + 1.7631920576095581e+00 -5.1832001656293869e-02 + <_> + + 0 -1 41 3.7178099155426025e-01 + + -3.1389200687408447e-01 1.5357979536056519e+00 + <_> + + 0 -1 42 1.9412999972701073e-02 + + -1.0017599910497665e-01 9.3655401468276978e-01 + <_> + + 0 -1 43 1.7439000308513641e-02 + + -4.0379899740219116e-01 9.6293002367019653e-01 + <_> + + 0 -1 44 3.9638999849557877e-02 + + 1.7039099335670471e-01 -2.9602990150451660e+00 + <_> + + 0 -1 45 -9.1469995677471161e-03 + + 8.8786798715591431e-01 -4.3818700313568115e-01 + <_> + + 0 -1 46 1.7219999572262168e-03 + + -3.7218600511550903e-01 4.0018901228904724e-01 + <_> + + 0 -1 47 3.0231000855565071e-02 + + 6.5924003720283508e-02 -2.6469180583953857e+00 + <_> + + 0 -1 48 -7.8795999288558960e-02 + + -1.7491459846496582e+00 2.8475299477577209e-01 + <_> + + 0 -1 49 2.1110000088810921e-03 + + -9.3908101320266724e-01 2.3205199837684631e-01 + <_> + + 0 -1 50 2.7091000229120255e-02 + + -5.2664000540971756e-02 1.0756820440292358e+00 + <_> + + 0 -1 51 -4.4964998960494995e-02 + + -1.8294479846954346e+00 9.9561996757984161e-02 + <_> + 32 + -4.4531588554382324e+00 + + <_> + + 0 -1 52 -6.5701000392436981e-02 + + 1.1558510065078735e+00 -1.0716359615325928e+00 + <_> + + 0 -1 53 1.5839999541640282e-02 + + -1.5634720325469971e+00 7.6877099275588989e-01 + <_> + + 0 -1 54 1.4570899307727814e-01 + + -5.7450097799301147e-01 1.3808720111846924e+00 + <_> + + 0 -1 55 6.1389999464154243e-03 + + -1.4570560455322266e+00 5.1610302925109863e-01 + <_> + + 0 -1 56 6.7179999314248562e-03 + + -8.3533602952957153e-01 5.8522200584411621e-01 + <_> + + 0 -1 57 1.8518000841140747e-02 + + -3.1312099099159241e-01 1.1696679592132568e+00 + <_> + + 0 -1 58 1.9958000630140305e-02 + + -4.3442600965499878e-01 9.5446902513504028e-01 + <_> + + 0 -1 59 -2.7755001187324524e-01 + + 1.4906179904937744e+00 -1.3815900683403015e-01 + <_> + + 0 -1 60 9.1859996318817139e-03 + + -9.6361500024795532e-01 2.7665498852729797e-01 + <_> + + 0 -1 61 -3.7737999111413956e-02 + + -2.4464108943939209e+00 2.3619599640369415e-01 + <_> + + 0 -1 62 1.8463000655174255e-02 + + 1.7539200186729431e-01 -1.3423130512237549e+00 + <_> + + 0 -1 63 -1.1114999651908875e-02 + + 4.8710799217224121e-01 -8.9851897954940796e-01 + <_> + + 0 -1 64 3.3927999436855316e-02 + + 1.7874200642108917e-01 -1.6342279911041260e+00 + <_> + + 0 -1 65 -3.5649001598358154e-02 + + -1.9607399702072144e+00 1.8102499842643738e-01 + <_> + + 0 -1 66 -1.1438000015914440e-02 + + 9.9010699987411499e-01 -3.8103199005126953e-01 + <_> + + 0 -1 67 -6.5236002206802368e-02 + + -2.5794160366058350e+00 2.4753600358963013e-01 + <_> + + 0 -1 68 -4.2272001504898071e-02 + + 1.4411840438842773e+00 -2.9508298635482788e-01 + <_> + + 0 -1 69 1.9219999667257071e-03 + + -4.9608600139617920e-01 6.3173598051071167e-01 + <_> + + 0 -1 70 -1.2921799719333649e-01 + + -2.3314270973205566e+00 5.4496999830007553e-02 + <_> + + 0 -1 71 2.2931000217795372e-02 + + -8.4447097778320312e-01 3.8738098740577698e-01 + <_> + + 0 -1 72 -3.4120000898838043e-02 + + -1.4431500434875488e+00 9.8422996699810028e-02 + <_> + + 0 -1 73 2.6223000138998032e-02 + + 1.8223099410533905e-01 -1.2586519718170166e+00 + <_> + + 0 -1 74 2.2236999124288559e-02 + + 6.9807998836040497e-02 -2.3820950984954834e+00 + <_> + + 0 -1 75 -5.8240001089870930e-03 + + 3.9332500100135803e-01 -2.7542799711227417e-01 + <_> + + 0 -1 76 4.3653000146150589e-02 + + 1.4832699298858643e-01 -1.1368780136108398e+00 + <_> + + 0 -1 77 5.7266999036073685e-02 + + 2.4628099799156189e-01 -1.2687400579452515e+00 + <_> + + 0 -1 78 2.3409998975694180e-03 + + -7.5448900461196899e-01 2.7163800597190857e-01 + <_> + + 0 -1 79 1.2996000237762928e-02 + + -3.6394900083541870e-01 7.0959198474884033e-01 + <_> + + 0 -1 80 -2.6517000049352646e-02 + + -2.3221859931945801e+00 3.5744000226259232e-02 + <_> + + 0 -1 81 -5.8400002308189869e-03 + + 4.2194300889968872e-01 -4.8184998333454132e-02 + <_> + + 0 -1 82 -1.6568999737501144e-02 + + 1.1099940538406372e+00 -3.4849700331687927e-01 + <_> + + 0 -1 83 -6.8157002329826355e-02 + + -3.3269989490509033e+00 2.1299000084400177e-01 + <_> + 52 + -4.3864588737487793e+00 + + <_> + + 0 -1 84 3.9974000304937363e-02 + + -1.2173449993133545e+00 1.0826710462570190e+00 + <_> + + 0 -1 85 1.8819500505924225e-01 + + -4.8289400339126587e-01 1.4045250415802002e+00 + <_> + + 0 -1 86 7.8027002513408661e-02 + + -1.0782150030136108e+00 7.4040299654006958e-01 + <_> + + 0 -1 87 1.1899999663000926e-04 + + -1.2019979953765869e+00 3.7749201059341431e-01 + <_> + + 0 -1 88 8.5056997835636139e-02 + + -4.3939098715782166e-01 1.2647340297698975e+00 + <_> + + 0 -1 89 8.9720003306865692e-03 + + -1.8440499901771545e-01 4.5726400613784790e-01 + <_> + + 0 -1 90 8.8120000436902046e-03 + + 3.0396699905395508e-01 -9.5991098880767822e-01 + <_> + + 0 -1 91 -2.3507999256253242e-02 + + 1.2487529516220093e+00 4.6227999031543732e-02 + <_> + + 0 -1 92 7.0039997808635235e-03 + + -5.9442102909088135e-01 5.3963297605514526e-01 + <_> + + 0 -1 93 3.3851999789476395e-02 + + 2.8496098518371582e-01 -1.4895249605178833e+00 + <_> + + 0 -1 94 -3.2530000898987055e-03 + + 4.8120799660682678e-01 -5.2712398767471313e-01 + <_> + + 0 -1 95 2.9097000136971474e-02 + + 2.6743900775909424e-01 -1.6007850170135498e+00 + <_> + + 0 -1 96 -8.4790000692009926e-03 + + -1.3107639551162720e+00 1.5243099629878998e-01 + <_> + + 0 -1 97 -1.0795000009238720e-02 + + 4.5613598823547363e-01 -7.2050899267196655e-01 + <_> + + 0 -1 98 -2.4620000272989273e-02 + + -1.7320619821548462e+00 6.8363003432750702e-02 + <_> + + 0 -1 99 3.7380000576376915e-03 + + -1.9303299486637115e-01 6.8243497610092163e-01 + <_> + + 0 -1 100 -1.2264000251889229e-02 + + -1.6095290184020996e+00 7.5268000364303589e-02 + <_> + + 0 -1 101 -4.8670000396668911e-03 + + 7.4286502599716187e-01 -2.1510200202465057e-01 + <_> + + 0 -1 102 7.6725997030735016e-02 + + -2.6835098862648010e-01 1.3094140291213989e+00 + <_> + + 0 -1 103 2.8578000143170357e-02 + + -5.8793000876903534e-02 1.2196329832077026e+00 + <_> + + 0 -1 104 1.9694000482559204e-02 + + -3.5142898559570312e-01 8.4926998615264893e-01 + <_> + + 0 -1 105 -2.9093999415636063e-02 + + -1.0507299900054932e+00 2.9806300997734070e-01 + <_> + + 0 -1 106 -2.9144000262022018e-02 + + 8.2547801733016968e-01 -3.2687199115753174e-01 + <_> + + 0 -1 107 1.9741000607609749e-02 + + 2.0452600717544556e-01 -8.3760201930999756e-01 + <_> + + 0 -1 108 4.3299999088048935e-03 + + 2.0577900111675262e-01 -6.6829800605773926e-01 + <_> + + 0 -1 109 -3.5500999540090561e-02 + + -1.2969900369644165e+00 1.3897499442100525e-01 + <_> + + 0 -1 110 -1.6172999516129494e-02 + + -1.3110569715499878e+00 7.5751997530460358e-02 + <_> + + 0 -1 111 -2.2151000797748566e-02 + + -1.0524389743804932e+00 1.9241100549697876e-01 + <_> + + 0 -1 112 -2.2707000374794006e-02 + + -1.3735309839248657e+00 6.6780999302864075e-02 + <_> + + 0 -1 113 1.6607999801635742e-02 + + -3.7135999649763107e-02 7.7846401929855347e-01 + <_> + + 0 -1 114 -1.3309000059962273e-02 + + -9.9850702285766602e-01 1.2248100340366364e-01 + <_> + + 0 -1 115 -3.3732000738382339e-02 + + 1.4461359977722168e+00 1.3151999562978745e-02 + <_> + + 0 -1 116 1.6935000196099281e-02 + + -3.7121298909187317e-01 5.2842199802398682e-01 + <_> + + 0 -1 117 3.3259999472647905e-03 + + -5.7568502426147461e-01 3.9261901378631592e-01 + <_> + + 0 -1 118 8.3644002676010132e-02 + + 1.6116000711917877e-02 -2.1173279285430908e+00 + <_> + + 0 -1 119 2.5785198807716370e-01 + + -8.1609003245830536e-02 9.8782497644424438e-01 + <_> + + 0 -1 120 -3.6566998809576035e-02 + + -1.1512110233306885e+00 9.6459001302719116e-02 + <_> + + 0 -1 121 -1.6445999965071678e-02 + + 3.7315499782562256e-01 -1.4585399627685547e-01 + <_> + + 0 -1 122 -3.7519999314099550e-03 + + 2.6179298758506775e-01 -5.8156698942184448e-01 + <_> + + 0 -1 123 -6.3660000450909138e-03 + + 7.5477397441864014e-01 -1.7055200040340424e-01 + <_> + + 0 -1 124 -3.8499999791383743e-03 + + 2.2653999924659729e-01 -6.3876402378082275e-01 + <_> + + 0 -1 125 -4.5494001358747482e-02 + + -1.2640299797058105e+00 2.5260698795318604e-01 + <_> + + 0 -1 126 -2.3941000923514366e-02 + + 8.7068402767181396e-01 -2.7104699611663818e-01 + <_> + + 0 -1 127 -7.7558003365993500e-02 + + -1.3901610374450684e+00 2.3612299561500549e-01 + <_> + + 0 -1 128 2.3614000529050827e-02 + + 6.6140003502368927e-02 -1.2645419836044312e+00 + <_> + + 0 -1 129 -2.5750000495463610e-03 + + -5.3841698169708252e-01 3.0379098653793335e-01 + <_> + + 0 -1 130 1.2010800093412399e-01 + + -3.5343000292778015e-01 5.2866202592849731e-01 + <_> + + 0 -1 131 2.2899999748915434e-03 + + -5.8701997995376587e-01 2.4061000347137451e-01 + <_> + + 0 -1 132 6.9716997444629669e-02 + + -3.3348900079727173e-01 5.1916301250457764e-01 + <_> + + 0 -1 133 -4.6670001000165939e-02 + + 6.9795399904251099e-01 -1.4895999804139137e-02 + <_> + + 0 -1 134 -5.0129000097513199e-02 + + 8.6146199703216553e-01 -2.5986000895500183e-01 + <_> + + 0 -1 135 3.0147999525070190e-02 + + 1.9332799315452576e-01 -5.9131097793579102e-01 + <_> + 53 + -4.1299300193786621e+00 + + <_> + + 0 -1 136 9.1085001826286316e-02 + + -8.9233100414276123e-01 1.0434230566024780e+00 + <_> + + 0 -1 137 1.2818999588489532e-02 + + -1.2597670555114746e+00 5.5317097902297974e-01 + <_> + + 0 -1 138 1.5931999310851097e-02 + + -8.6254400014877319e-01 6.3731801509857178e-01 + <_> + + 0 -1 139 2.2780001163482666e-03 + + -7.4639201164245605e-01 5.3155601024627686e-01 + <_> + + 0 -1 140 3.1840998679399490e-02 + + -1.2650489807128906e+00 3.6153900623321533e-01 + <_> + + 0 -1 141 2.6960000395774841e-03 + + -9.8290401697158813e-01 3.6013001203536987e-01 + <_> + + 0 -1 142 -1.2055000290274620e-02 + + 6.4068400859832764e-01 -5.0125002861022949e-01 + <_> + + 0 -1 143 2.1324999630451202e-02 + + -2.4034999310970306e-01 8.5448002815246582e-01 + <_> + + 0 -1 144 3.0486000701785088e-02 + + -3.4273600578308105e-01 1.1428849697113037e+00 + <_> + + 0 -1 145 -4.5079998672008514e-02 + + 1.0976949930191040e+00 -1.7974600195884705e-01 + <_> + + 0 -1 146 -7.1700997650623322e-02 + + 1.5735000371932983e+00 -3.1433498859405518e-01 + <_> + + 0 -1 147 5.9218000620603561e-02 + + -2.7582401037216187e-01 1.0448570251464844e+00 + <_> + + 0 -1 148 6.7010000348091125e-03 + + -1.0974019765853882e+00 1.9801199436187744e-01 + <_> + + 0 -1 149 4.1046999394893646e-02 + + 3.0547699332237244e-01 -1.3287999629974365e+00 + <_> + + 0 -1 150 -8.5499999113380909e-04 + + 2.5807100534439087e-01 -7.0052897930145264e-01 + <_> + + 0 -1 151 -3.0360000208020210e-02 + + -1.2306419610977173e+00 2.2609399259090424e-01 + <_> + + 0 -1 152 -1.2930000200867653e-02 + + 4.0758600831031799e-01 -5.1234501600265503e-01 + <_> + + 0 -1 153 3.7367999553680420e-02 + + -9.4755001366138458e-02 6.1765098571777344e-01 + <_> + + 0 -1 154 2.4434000253677368e-02 + + -4.1100600361824036e-01 4.7630500793457031e-01 + <_> + + 0 -1 155 5.7007998228073120e-02 + + 2.5249299407005310e-01 -6.8669801950454712e-01 + <_> + + 0 -1 156 -1.6313999891281128e-02 + + -9.3928402662277222e-01 1.1448100209236145e-01 + <_> + + 0 -1 157 -1.7648899555206299e-01 + + 1.2451089620590210e+00 -5.6519001722335815e-02 + <_> + + 0 -1 158 1.7614600062370300e-01 + + -3.2528200745582581e-01 8.2791501283645630e-01 + <_> + + 0 -1 159 -7.3910001665353775e-03 + + 3.4783700108528137e-01 -1.7929099500179291e-01 + <_> + + 0 -1 160 6.0890998691320419e-02 + + 5.5098000913858414e-02 -1.5480779409408569e+00 + <_> + + 0 -1 161 -2.9123000800609589e-02 + + -1.0255639553070068e+00 2.4106900393962860e-01 + <_> + + 0 -1 162 -4.5648999512195587e-02 + + 1.0301599502563477e+00 -3.1672099232673645e-01 + <_> + + 0 -1 163 3.7333000451326370e-02 + + 2.1620599925518036e-01 -8.2589900493621826e-01 + <_> + + 0 -1 164 -2.4411000311374664e-02 + + -1.5957959890365601e+00 5.1139000803232193e-02 + <_> + + 0 -1 165 -5.9806998819112778e-02 + + -1.0312290191650391e+00 1.3092300295829773e-01 + <_> + + 0 -1 166 -3.0106000602245331e-02 + + -1.4781630039215088e+00 3.7211999297142029e-02 + <_> + + 0 -1 167 7.4209999293088913e-03 + + -2.4024100601673126e-01 4.9333998560905457e-01 + <_> + + 0 -1 168 -2.1909999195486307e-03 + + 2.8941500186920166e-01 -5.7259601354598999e-01 + <_> + + 0 -1 169 2.0860999822616577e-02 + + -2.3148399591445923e-01 6.3765901327133179e-01 + <_> + + 0 -1 170 -6.6990000195801258e-03 + + -1.2107750177383423e+00 6.4018003642559052e-02 + <_> + + 0 -1 171 1.8758000805974007e-02 + + 2.4461300671100616e-01 -9.9786698818206787e-01 + <_> + + 0 -1 172 -4.4323001056909561e-02 + + -1.3699189424514771e+00 3.6051999777555466e-02 + <_> + + 0 -1 173 2.2859999909996986e-02 + + 2.1288399398326874e-01 -1.0397620201110840e+00 + <_> + + 0 -1 174 -9.8600005730986595e-04 + + 3.2443600893020630e-01 -5.4291802644729614e-01 + <_> + + 0 -1 175 1.7239000648260117e-02 + + -2.8323900699615479e-01 4.4468200206756592e-01 + <_> + + 0 -1 176 -3.4531001001596451e-02 + + -2.3107020854949951e+00 -3.1399999279528856e-03 + <_> + + 0 -1 177 6.7006997764110565e-02 + + 2.8715699911117554e-01 -6.4481002092361450e-01 + <_> + + 0 -1 178 2.3776899278163910e-01 + + -2.7174800634384155e-01 8.0219101905822754e-01 + <_> + + 0 -1 179 -1.2903000228106976e-02 + + -1.5317620038986206e+00 2.1423600614070892e-01 + <_> + + 0 -1 180 1.0514999739825726e-02 + + 7.7037997543811798e-02 -1.0581140518188477e+00 + <_> + + 0 -1 181 1.6969000920653343e-02 + + 1.4306700229644775e-01 -8.5828399658203125e-01 + <_> + + 0 -1 182 -7.2460002265870571e-03 + + -1.1020129919052124e+00 6.4906999468803406e-02 + <_> + + 0 -1 183 1.0556999593973160e-02 + + 1.3964000158011913e-02 6.3601499795913696e-01 + <_> + + 0 -1 184 6.1380001716315746e-03 + + -3.4545901417732239e-01 5.6296801567077637e-01 + <_> + + 0 -1 185 1.3158000074326992e-02 + + 1.9927300512790680e-01 -1.5040320158004761e+00 + <_> + + 0 -1 186 3.1310000922530890e-03 + + -4.0903699398040771e-01 3.7796398997306824e-01 + <_> + + 0 -1 187 -1.0920699685811996e-01 + + -2.2227079868316650e+00 1.2178199738264084e-01 + <_> + + 0 -1 188 8.1820003688335419e-03 + + -2.8652000427246094e-01 6.7890799045562744e-01 + <_> + 62 + -4.0218091011047363e+00 + + <_> + + 0 -1 189 3.1346999108791351e-02 + + -8.8884598016738892e-01 9.4936800003051758e-01 + <_> + + 0 -1 190 3.1918000429868698e-02 + + -1.1146880388259888e+00 4.8888999223709106e-01 + <_> + + 0 -1 191 6.5939999185502529e-03 + + -1.0097689628601074e+00 4.9723801016807556e-01 + <_> + + 0 -1 192 2.6148000732064247e-02 + + 2.5991299748420715e-01 -1.2537480592727661e+00 + <_> + + 0 -1 193 1.2845000252127647e-02 + + -5.7138597965240479e-01 5.9659498929977417e-01 + <_> + + 0 -1 194 2.6344999670982361e-02 + + -5.5203199386596680e-01 3.0217400193214417e-01 + <_> + + 0 -1 195 -1.5083000063896179e-02 + + -1.2871240377426147e+00 2.2354200482368469e-01 + <_> + + 0 -1 196 -3.8887001574039459e-02 + + 1.7425049543380737e+00 -9.9747002124786377e-02 + <_> + + 0 -1 197 -5.7029998861253262e-03 + + -1.0523240566253662e+00 1.8362599611282349e-01 + <_> + + 0 -1 198 -1.4860000228509307e-03 + + 5.6784200668334961e-01 -4.6742001175880432e-01 + <_> + + 0 -1 199 -2.8486000373959541e-02 + + 1.3082909584045410e+00 -2.6460900902748108e-01 + <_> + + 0 -1 200 6.6224999725818634e-02 + + -4.6210700273513794e-01 4.1749599575996399e-01 + <_> + + 0 -1 201 8.8569996878504753e-03 + + -4.1474899649620056e-01 5.9204798936843872e-01 + <_> + + 0 -1 202 1.1355999857187271e-02 + + 3.6103099584579468e-01 -4.5781201124191284e-01 + <_> + + 0 -1 203 -2.7679998893290758e-03 + + -8.9238899946212769e-01 1.4199000597000122e-01 + <_> + + 0 -1 204 1.1246999725699425e-02 + + 2.9353401064872742e-01 -9.7330600023269653e-01 + <_> + + 0 -1 205 7.1970000863075256e-03 + + -7.9334902763366699e-01 1.8313400447368622e-01 + <_> + + 0 -1 206 3.1768999993801117e-02 + + 1.5523099899291992e-01 -1.3245639801025391e+00 + <_> + + 0 -1 207 2.5173999369144440e-02 + + 3.4214999526739120e-02 -2.0948131084442139e+00 + <_> + + 0 -1 208 7.5360001064836979e-03 + + -3.9450600743293762e-01 5.1333999633789062e-01 + <_> + + 0 -1 209 3.2873000949621201e-02 + + 8.8372997939586639e-02 -1.2814120054244995e+00 + <_> + + 0 -1 210 -2.7379998937249184e-03 + + 5.5286502838134766e-01 -4.6384999155998230e-01 + <_> + + 0 -1 211 -3.8075000047683716e-02 + + -1.8497270345687866e+00 4.5944001525640488e-02 + <_> + + 0 -1 212 -3.8984000682830811e-02 + + -4.8223701119422913e-01 3.4760600328445435e-01 + <_> + + 0 -1 213 2.8029999230057001e-03 + + -4.5154699683189392e-01 4.2806300520896912e-01 + <_> + + 0 -1 214 -5.4145999252796173e-02 + + -8.4520798921585083e-01 1.6674900054931641e-01 + <_> + + 0 -1 215 -8.3280000835657120e-03 + + 3.5348299145698547e-01 -4.7163200378417969e-01 + <_> + + 0 -1 216 3.3778000622987747e-02 + + 1.8463100492954254e-01 -1.6686669588088989e+00 + <_> + + 0 -1 217 -1.1238099634647369e-01 + + -1.2521569728851318e+00 3.5992000252008438e-02 + <_> + + 0 -1 218 -1.0408000089228153e-02 + + -8.1620401144027710e-01 2.3428599536418915e-01 + <_> + + 0 -1 219 -4.9439999274909496e-03 + + -9.2584699392318726e-01 1.0034800320863724e-01 + <_> + + 0 -1 220 -9.3029998242855072e-03 + + 5.6499302387237549e-01 -1.8881900608539581e-01 + <_> + + 0 -1 221 -1.1749999597668648e-02 + + 8.0302399396896362e-01 -3.8277000188827515e-01 + <_> + + 0 -1 222 -2.3217000067234039e-02 + + -8.4926998615264893e-01 1.9671200215816498e-01 + <_> + + 0 -1 223 1.6866000369191170e-02 + + -4.0591898560523987e-01 5.0695300102233887e-01 + <_> + + 0 -1 224 -2.4031000211834908e-02 + + -1.5297520160675049e+00 2.3344999551773071e-01 + <_> + + 0 -1 225 -3.6945998668670654e-02 + + 6.3007700443267822e-01 -3.1780400872230530e-01 + <_> + + 0 -1 226 -6.1563998460769653e-02 + + 5.8627897500991821e-01 -1.2107999995350838e-02 + <_> + + 0 -1 227 2.1661000326275826e-02 + + -2.5623700022697449e-01 1.0409849882125854e+00 + <_> + + 0 -1 228 -3.6710000131279230e-03 + + 2.9171100258827209e-01 -8.3287298679351807e-01 + <_> + + 0 -1 229 4.4849000871181488e-02 + + -3.9633199572563171e-01 4.5662000775337219e-01 + <_> + + 0 -1 230 5.7195000350475311e-02 + + 2.1023899316787720e-01 -1.5004800558090210e+00 + <_> + + 0 -1 231 -1.1342000216245651e-02 + + 4.4071298837661743e-01 -3.8653799891471863e-01 + <_> + + 0 -1 232 -1.2004000134766102e-02 + + 9.3954598903656006e-01 -1.0589499771595001e-01 + <_> + + 0 -1 233 2.2515999153256416e-02 + + 9.4480002298951149e-03 -1.6799509525299072e+00 + <_> + + 0 -1 234 -1.9809000194072723e-02 + + -1.0133639574050903e+00 2.4146600067615509e-01 + <_> + + 0 -1 235 1.5891000628471375e-02 + + -3.7507599592208862e-01 4.6614098548889160e-01 + <_> + + 0 -1 236 -9.1420002281665802e-03 + + -8.0484098196029663e-01 1.7816999554634094e-01 + <_> + + 0 -1 237 -4.4740000739693642e-03 + + -1.0562069416046143e+00 7.3305003345012665e-02 + <_> + + 0 -1 238 1.2742500007152557e-01 + + 2.0165599882602692e-01 -1.5467929840087891e+00 + <_> + + 0 -1 239 4.7703001648187637e-02 + + -3.7937799096107483e-01 3.7885999679565430e-01 + <_> + + 0 -1 240 5.3608000278472900e-02 + + 2.1220499277114868e-01 -1.2399710416793823e+00 + <_> + + 0 -1 241 -3.9680998772382736e-02 + + -1.0257550477981567e+00 5.1282998174428940e-02 + <_> + + 0 -1 242 -6.7327000200748444e-02 + + -1.0304750204086304e+00 2.3005299270153046e-01 + <_> + + 0 -1 243 1.3337600231170654e-01 + + -2.0869000256061554e-01 1.2272510528564453e+00 + <_> + + 0 -1 244 -2.0919300615787506e-01 + + 8.7929898500442505e-01 -4.4254999607801437e-02 + <_> + + 0 -1 245 -6.5589003264904022e-02 + + 1.0443429946899414e+00 -2.1682099997997284e-01 + <_> + + 0 -1 246 6.1882998794317245e-02 + + 1.3798199594020844e-01 -1.9009059667587280e+00 + <_> + + 0 -1 247 -2.5578999891877174e-02 + + -1.6607600450515747e+00 5.8439997956156731e-03 + <_> + + 0 -1 248 -3.4827001392841339e-02 + + 7.9940402507781982e-01 -8.2406997680664062e-02 + <_> + + 0 -1 249 -1.8209999427199364e-02 + + -9.6073997020721436e-01 6.6320002079010010e-02 + <_> + + 0 -1 250 1.5070999972522259e-02 + + 1.9899399578571320e-01 -7.6433002948760986e-01 + <_> + 72 + -3.8832089900970459e+00 + + <_> + + 0 -1 251 4.6324998140335083e-02 + + -1.0362670421600342e+00 8.2201498746871948e-01 + <_> + + 0 -1 252 1.5406999737024307e-02 + + -1.2327589988708496e+00 2.9647698998451233e-01 + <_> + + 0 -1 253 1.2808999978005886e-02 + + -7.5852298736572266e-01 5.7985502481460571e-01 + <_> + + 0 -1 254 4.9150999635457993e-02 + + -3.8983899354934692e-01 8.9680302143096924e-01 + <_> + + 0 -1 255 1.2621000409126282e-02 + + -7.1799302101135254e-01 5.0440901517868042e-01 + <_> + + 0 -1 256 -1.8768999725580215e-02 + + 5.5147600173950195e-01 -7.0555400848388672e-01 + <_> + + 0 -1 257 4.1965000331401825e-02 + + -4.4782099127769470e-01 7.0985502004623413e-01 + <_> + + 0 -1 258 -5.1401998847723007e-02 + + -1.0932120084762573e+00 2.6701900362968445e-01 + <_> + + 0 -1 259 -7.0960998535156250e-02 + + 8.3618402481079102e-01 -3.8318100571632385e-01 + <_> + + 0 -1 260 1.6745999455451965e-02 + + -2.5733101367950439e-01 2.5966501235961914e-01 + <_> + + 0 -1 261 -6.2400000169873238e-03 + + 3.1631499528884888e-01 -5.8796900510787964e-01 + <_> + + 0 -1 262 -3.9397999644279480e-02 + + -1.0491210222244263e+00 1.6822400689125061e-01 + <_> + + 0 -1 263 0. + + 1.6144199669361115e-01 -8.7876898050308228e-01 + <_> + + 0 -1 264 -2.2307999432086945e-02 + + -6.9053500890731812e-01 2.3607000708580017e-01 + <_> + + 0 -1 265 1.8919999711215496e-03 + + 2.4989199638366699e-01 -5.6583297252655029e-01 + <_> + + 0 -1 266 1.0730000212788582e-03 + + -5.0415802001953125e-01 3.8374501466751099e-01 + <_> + + 0 -1 267 3.9230998605489731e-02 + + 4.2619001120328903e-02 -1.3875889778137207e+00 + <_> + + 0 -1 268 6.2238000333309174e-02 + + 1.4119400084018707e-01 -1.0688860416412354e+00 + <_> + + 0 -1 269 2.1399999968707561e-03 + + -8.9622402191162109e-01 1.9796399772167206e-01 + <_> + + 0 -1 270 9.1800000518560410e-04 + + -4.5337298512458801e-01 4.3532699346542358e-01 + <_> + + 0 -1 271 -6.9169998168945312e-03 + + 3.3822798728942871e-01 -4.4793000817298889e-01 + <_> + + 0 -1 272 -2.3866999894380569e-02 + + -7.8908598423004150e-01 2.2511799633502960e-01 + <_> + + 0 -1 273 -1.0262800008058548e-01 + + -2.2831439971923828e+00 -5.3960001096129417e-03 + <_> + + 0 -1 274 -9.5239998772740364e-03 + + 3.9346700906753540e-01 -5.2242201566696167e-01 + <_> + + 0 -1 275 3.9877001196146011e-02 + + 3.2799001783132553e-02 -1.5079489946365356e+00 + <_> + + 0 -1 276 -1.3144999742507935e-02 + + -1.0839990377426147e+00 1.8482400476932526e-01 + <_> + + 0 -1 277 -5.0590999424457550e-02 + + -1.8822289705276489e+00 -2.2199999075382948e-03 + <_> + + 0 -1 278 2.4917000904679298e-02 + + 1.4593400061130524e-01 -2.2196519374847412e+00 + <_> + + 0 -1 279 -7.6370001770555973e-03 + + -1.0164569616317749e+00 5.8797001838684082e-02 + <_> + + 0 -1 280 4.2911998927593231e-02 + + 1.5443000197410583e-01 -1.1843889951705933e+00 + <_> + + 0 -1 281 2.3000000510364771e-04 + + -7.7305799722671509e-01 1.2189900130033493e-01 + <_> + + 0 -1 282 9.0929996222257614e-03 + + -1.1450099945068359e-01 7.1091300249099731e-01 + <_> + + 0 -1 283 1.1145000346004963e-02 + + 7.0000998675823212e-02 -1.0534820556640625e+00 + <_> + + 0 -1 284 -5.2453000098466873e-02 + + -1.7594360113143921e+00 1.9523799419403076e-01 + <_> + + 0 -1 285 -2.3020699620246887e-01 + + 9.5840299129486084e-01 -2.5045698881149292e-01 + <_> + + 0 -1 286 -1.6365999355912209e-02 + + 4.6731901168823242e-01 -2.1108399331569672e-01 + <_> + + 0 -1 287 -1.7208000645041466e-02 + + 7.0835697650909424e-01 -2.8018298745155334e-01 + <_> + + 0 -1 288 -3.6648001521825790e-02 + + -1.1013339757919312e+00 2.4341100454330444e-01 + <_> + + 0 -1 289 -1.0304999537765980e-02 + + -1.0933129787445068e+00 5.6258998811244965e-02 + <_> + + 0 -1 290 -1.3713000342249870e-02 + + -2.6438099145889282e-01 1.9821000099182129e-01 + <_> + + 0 -1 291 2.9308000579476357e-02 + + -2.2142399847507477e-01 1.0525950193405151e+00 + <_> + + 0 -1 292 2.4077000096440315e-02 + + 1.8485699594020844e-01 -1.7203969955444336e+00 + <_> + + 0 -1 293 6.1280000954866409e-03 + + -9.2721498012542725e-01 5.8752998709678650e-02 + <_> + + 0 -1 294 -2.2377999499440193e-02 + + 1.9646559953689575e+00 2.7785999700427055e-02 + <_> + + 0 -1 295 -7.0440000854432583e-03 + + 2.1427600085735321e-01 -4.8407599329948425e-01 + <_> + + 0 -1 296 -4.0603000670671463e-02 + + -1.1754349470138550e+00 1.6061200201511383e-01 + <_> + + 0 -1 297 -2.4466000497341156e-02 + + -1.1239900588989258e+00 4.1110001504421234e-02 + <_> + + 0 -1 298 2.5309999473392963e-03 + + -1.7169700562953949e-01 3.2178801298141479e-01 + <_> + + 0 -1 299 -1.9588999450206757e-02 + + 8.2720202207565308e-01 -2.6376700401306152e-01 + <_> + + 0 -1 300 -2.9635999351739883e-02 + + -1.1524770259857178e+00 1.4999300241470337e-01 + <_> + + 0 -1 301 -1.5030000358819962e-02 + + -1.0491830110549927e+00 4.0160998702049255e-02 + <_> + + 0 -1 302 -6.0715001076459885e-02 + + -1.0903840065002441e+00 1.5330800414085388e-01 + <_> + + 0 -1 303 -1.2790000066161156e-02 + + 4.2248600721359253e-01 -4.2399200797080994e-01 + <_> + + 0 -1 304 -2.0247999578714371e-02 + + -9.1866999864578247e-01 1.8485699594020844e-01 + <_> + + 0 -1 305 -3.0683999881148338e-02 + + -1.5958670377731323e+00 2.5760000571608543e-03 + <_> + + 0 -1 306 -2.0718000829219818e-02 + + -6.6299998760223389e-01 3.1037199497222900e-01 + <_> + + 0 -1 307 -1.7290000105276704e-03 + + 1.9183400273323059e-01 -6.5084999799728394e-01 + <_> + + 0 -1 308 -3.1394001096487045e-02 + + -6.3643002510070801e-01 1.5408399701118469e-01 + <_> + + 0 -1 309 1.9003000110387802e-02 + + -1.8919399380683899e-01 1.5294510126113892e+00 + <_> + + 0 -1 310 6.1769997701048851e-03 + + -1.0597900301218033e-01 6.4859598875045776e-01 + <_> + + 0 -1 311 -1.0165999643504620e-02 + + -1.0802700519561768e+00 3.7176001816987991e-02 + <_> + + 0 -1 312 -1.4169999631121755e-03 + + 3.4157499670982361e-01 -9.7737997770309448e-02 + <_> + + 0 -1 313 -4.0799998678267002e-03 + + 4.7624599933624268e-01 -3.4366300702095032e-01 + <_> + + 0 -1 314 -4.4096998870372772e-02 + + 9.7634297609329224e-01 -1.9173000007867813e-02 + <_> + + 0 -1 315 -6.0669999569654465e-02 + + -2.1752851009368896e+00 -2.8925999999046326e-02 + <_> + + 0 -1 316 -3.2931998372077942e-02 + + -6.4383101463317871e-01 1.6494099795818329e-01 + <_> + + 0 -1 317 -1.4722800254821777e-01 + + -1.4745830297470093e+00 2.5839998852461576e-03 + <_> + + 0 -1 318 -1.1930000036954880e-02 + + 4.2441400885581970e-01 -1.7712600529193878e-01 + <_> + + 0 -1 319 1.4517900347709656e-01 + + 2.5444999337196350e-02 -1.2779400348663330e+00 + <_> + + 0 -1 320 5.1447998732328415e-02 + + 1.5678399801254272e-01 -1.5188430547714233e+00 + <_> + + 0 -1 321 3.1479999888688326e-03 + + -4.0424400568008423e-01 3.2429701089859009e-01 + <_> + + 0 -1 322 -4.3600000441074371e-02 + + -1.9932260513305664e+00 1.5018600225448608e-01 + <_> + 83 + -3.8424909114837646e+00 + + <_> + + 0 -1 323 1.2899599969387054e-01 + + -6.2161999940872192e-01 1.1116520166397095e+00 + <_> + + 0 -1 324 -9.1261997818946838e-02 + + 1.0143059492111206e+00 -6.1335200071334839e-01 + <_> + + 0 -1 325 1.4271999709308147e-02 + + -1.0261659622192383e+00 3.9779999852180481e-01 + <_> + + 0 -1 326 3.2889999449253082e-02 + + -1.1386079788208008e+00 2.8690800070762634e-01 + <_> + + 0 -1 327 1.2590000405907631e-02 + + -5.6645601987838745e-01 4.5172399282455444e-01 + <_> + + 0 -1 328 1.4661000110208988e-02 + + 3.0505999922752380e-01 -6.8129599094390869e-01 + <_> + + 0 -1 329 -3.3555999398231506e-02 + + -1.7208939790725708e+00 6.1439000070095062e-02 + <_> + + 0 -1 330 1.4252699911594391e-01 + + 2.3192200064659119e-01 -1.7297149896621704e+00 + <_> + + 0 -1 331 -6.2079997733235359e-03 + + -1.2163300514221191e+00 1.2160199880599976e-01 + <_> + + 0 -1 332 1.8178999423980713e-02 + + 3.2553699612617493e-01 -8.1003999710083008e-01 + <_> + + 0 -1 333 2.5036999955773354e-02 + + -3.1698799133300781e-01 6.7361402511596680e-01 + <_> + + 0 -1 334 4.6560999006032944e-02 + + -1.1089800298213959e-01 8.4082502126693726e-01 + <_> + + 0 -1 335 -8.9999996125698090e-03 + + 3.9574500918388367e-01 -4.7624599933624268e-01 + <_> + + 0 -1 336 4.0805999189615250e-02 + + -1.8000000272877514e-04 9.4570702314376831e-01 + <_> + + 0 -1 337 -3.4221999347209930e-02 + + 7.5206297636032104e-01 -3.1531500816345215e-01 + <_> + + 0 -1 338 -3.9716001600027084e-02 + + -8.3139598369598389e-01 1.7744399607181549e-01 + <_> + + 0 -1 339 2.5170000735670328e-03 + + -5.9377998113632202e-01 2.4657000601291656e-01 + <_> + + 0 -1 340 2.7428999543190002e-02 + + 1.5998399257659912e-01 -4.2781999707221985e-01 + <_> + + 0 -1 341 3.4986000508069992e-02 + + 3.5055998712778091e-02 -1.5988600254058838e+00 + <_> + + 0 -1 342 4.4970000162720680e-03 + + -5.2034300565719604e-01 3.7828299403190613e-01 + <_> + + 0 -1 343 2.7699999045580626e-03 + + -5.3182601928710938e-01 2.4951000511646271e-01 + <_> + + 0 -1 344 3.5174001008272171e-02 + + 1.9983400404453278e-01 -1.4446129798889160e+00 + <_> + + 0 -1 345 2.5970999151468277e-02 + + 4.4426999986171722e-02 -1.3622980117797852e+00 + <_> + + 0 -1 346 -1.5783999115228653e-02 + + -9.1020399332046509e-01 2.7190300822257996e-01 + <_> + + 0 -1 347 -7.5880000367760658e-03 + + 9.2064999043941498e-02 -8.1628900766372681e-01 + <_> + + 0 -1 348 2.0754000172019005e-02 + + 2.1185700595378876e-01 -7.4729001522064209e-01 + <_> + + 0 -1 349 5.9829000383615494e-02 + + -2.7301099896430969e-01 8.0923300981521606e-01 + <_> + + 0 -1 350 3.9039000868797302e-02 + + -1.0432299971580505e-01 8.6226201057434082e-01 + <_> + + 0 -1 351 2.1665999665856361e-02 + + 6.2709003686904907e-02 -9.8894298076629639e-01 + <_> + + 0 -1 352 -2.7496999129652977e-02 + + -9.2690998315811157e-01 1.5586300194263458e-01 + <_> + + 0 -1 353 1.0462000034749508e-02 + + 1.3418099284172058e-01 -7.0386397838592529e-01 + <_> + + 0 -1 354 2.4870999157428741e-02 + + 1.9706700742244720e-01 -4.0263301134109497e-01 + <_> + + 0 -1 355 -1.6036000102758408e-02 + + -1.1409829854965210e+00 7.3997996747493744e-02 + <_> + + 0 -1 356 4.8627000302076340e-02 + + 1.6990399360656738e-01 -7.2152197360992432e-01 + <_> + + 0 -1 357 1.2619999470189214e-03 + + -4.7389799356460571e-01 2.6254999637603760e-01 + <_> + + 0 -1 358 -8.8035002350807190e-02 + + -2.1606519222259521e+00 1.4554800093173981e-01 + <_> + + 0 -1 359 1.8356999382376671e-02 + + 4.4750999659299850e-02 -1.0766370296478271e+00 + <_> + + 0 -1 360 3.5275001078844070e-02 + + -3.2919000834226608e-02 1.2153890132904053e+00 + <_> + + 0 -1 361 -2.0392900705337524e-01 + + -1.3187999725341797e+00 1.5503999777138233e-02 + <_> + + 0 -1 362 -1.6619000583887100e-02 + + 3.6850199103355408e-01 -1.5283699333667755e-01 + <_> + + 0 -1 363 3.7739001214504242e-02 + + -2.5727799534797668e-01 7.0655298233032227e-01 + <_> + + 0 -1 364 2.2720000706613064e-03 + + -7.7602997422218323e-02 3.3367800712585449e-01 + <_> + + 0 -1 365 -1.4802999794483185e-02 + + -7.8524798154830933e-01 7.6934002339839935e-02 + <_> + + 0 -1 366 -4.8319000750780106e-02 + + 1.7022320032119751e+00 4.9722000956535339e-02 + <_> + + 0 -1 367 -2.9539000242948532e-02 + + 7.7670699357986450e-01 -2.4534299969673157e-01 + <_> + + 0 -1 368 -4.6169001609086990e-02 + + -1.4922779798507690e+00 1.2340000271797180e-01 + <_> + + 0 -1 369 -2.8064999729394913e-02 + + -2.1345369815826416e+00 -2.5797000154852867e-02 + <_> + + 0 -1 370 -5.7339998893439770e-03 + + 5.6982600688934326e-01 -1.2056600302457809e-01 + <_> + + 0 -1 371 -1.0111000388860703e-02 + + 6.7911398410797119e-01 -2.6638001203536987e-01 + <_> + + 0 -1 372 1.1359999887645245e-02 + + 2.4789799749851227e-01 -6.4493000507354736e-01 + <_> + + 0 -1 373 5.1809001713991165e-02 + + 1.4716000296175480e-02 -1.2395579814910889e+00 + <_> + + 0 -1 374 3.3291999250650406e-02 + + -8.2559995353221893e-03 1.0168470144271851e+00 + <_> + + 0 -1 375 -1.4494000002741814e-02 + + 4.5066800713539124e-01 -3.6250999569892883e-01 + <_> + + 0 -1 376 -3.4221999347209930e-02 + + -9.5292502641677856e-01 2.0684599876403809e-01 + <_> + + 0 -1 377 -8.0654002726078033e-02 + + -2.0139501094818115e+00 -2.3084999993443489e-02 + <_> + + 0 -1 378 -8.9399999706074595e-04 + + 3.9572000503540039e-01 -2.9351300001144409e-01 + <_> + + 0 -1 379 9.7162000834941864e-02 + + -2.4980300664901733e-01 1.0859220027923584e+00 + <_> + + 0 -1 380 3.6614000797271729e-02 + + -5.7844001799821854e-02 1.2162159681320190e+00 + <_> + + 0 -1 381 5.1693998277187347e-02 + + 4.3062999844551086e-02 -1.0636160373687744e+00 + <_> + + 0 -1 382 -2.4557000026106834e-02 + + -4.8946800827980042e-01 1.7182900011539459e-01 + <_> + + 0 -1 383 3.2736799120903015e-01 + + -2.9688599705696106e-01 5.1798301935195923e-01 + <_> + + 0 -1 384 7.6959999278187752e-03 + + -5.9805899858474731e-01 2.4803200364112854e-01 + <_> + + 0 -1 385 1.6172200441360474e-01 + + -2.9613999649882317e-02 -2.3162529468536377e+00 + <_> + + 0 -1 386 -4.7889999113976955e-03 + + 3.7457901239395142e-01 -3.2779198884963989e-01 + <_> + + 0 -1 387 -1.8402999266982079e-02 + + -9.9692702293395996e-01 7.2948001325130463e-02 + <_> + + 0 -1 388 7.7665001153945923e-02 + + 1.4175699651241302e-01 -1.7238730192184448e+00 + <_> + + 0 -1 389 1.8921000882983208e-02 + + -2.1273100376129150e-01 1.0165189504623413e+00 + <_> + + 0 -1 390 -7.9397998750209808e-02 + + -1.3164349794387817e+00 1.4981999993324280e-01 + <_> + + 0 -1 391 -6.8037003278732300e-02 + + 4.9421998858451843e-01 -2.9091000556945801e-01 + <_> + + 0 -1 392 -6.1010001227259636e-03 + + 4.2430499196052551e-01 -3.3899301290512085e-01 + <_> + + 0 -1 393 3.1927000731229782e-02 + + -3.1046999618411064e-02 -2.3459999561309814e+00 + <_> + + 0 -1 394 -2.9843999072909355e-02 + + -7.8989601135253906e-01 1.5417699515819550e-01 + <_> + + 0 -1 395 -8.0541998147964478e-02 + + -2.2509229183197021e+00 -3.0906999483704567e-02 + <_> + + 0 -1 396 3.8109999150037766e-03 + + -2.5577300786972046e-01 2.3785500228404999e-01 + <_> + + 0 -1 397 3.3647000789642334e-02 + + -2.2541399300098419e-01 9.2307400703430176e-01 + <_> + + 0 -1 398 8.2809999585151672e-03 + + -2.8896200656890869e-01 3.1046199798583984e-01 + <_> + + 0 -1 399 1.0104399919509888e-01 + + -3.4864000976085663e-02 -2.7102620601654053e+00 + <_> + + 0 -1 400 -1.0009000077843666e-02 + + 5.9715402126312256e-01 -3.3831000328063965e-02 + <_> + + 0 -1 401 7.1919998154044151e-03 + + -4.7738000750541687e-01 2.2686000168323517e-01 + <_> + + 0 -1 402 2.4969000369310379e-02 + + 2.2877700626850128e-01 -1.0435529947280884e+00 + <_> + + 0 -1 403 2.7908000349998474e-01 + + -2.5818100571632385e-01 7.6780498027801514e-01 + <_> + + 0 -1 404 -4.4213000684976578e-02 + + -5.9798002243041992e-01 2.8039899468421936e-01 + <_> + + 0 -1 405 -1.4136999845504761e-02 + + 7.0987302064895630e-01 -2.5645199418067932e-01 + <_> + 91 + -3.6478610038757324e+00 + + <_> + + 0 -1 406 1.3771200180053711e-01 + + -5.5870598554611206e-01 1.0953769683837891e+00 + <_> + + 0 -1 407 3.4460999071598053e-02 + + -7.1171897649765015e-01 5.2899599075317383e-01 + <_> + + 0 -1 408 1.8580000847578049e-02 + + -1.1157519817352295e+00 4.0593999624252319e-01 + <_> + + 0 -1 409 2.5041999295353889e-02 + + -4.0892499685287476e-01 7.4129998683929443e-01 + <_> + + 0 -1 410 5.7179000228643417e-02 + + -3.8054299354553223e-01 7.3647701740264893e-01 + <_> + + 0 -1 411 1.4932000078260899e-02 + + -6.9945502281188965e-01 3.7950998544692993e-01 + <_> + + 0 -1 412 8.8900001719594002e-03 + + -5.4558598995208740e-01 3.6332499980926514e-01 + <_> + + 0 -1 413 3.0435999855399132e-02 + + -1.0124599933624268e-01 7.9585897922515869e-01 + <_> + + 0 -1 414 -4.4160000979900360e-02 + + 8.4410899877548218e-01 -3.2976400852203369e-01 + <_> + + 0 -1 415 1.8461000174283981e-02 + + 2.6326599717140198e-01 -9.6736502647399902e-01 + <_> + + 0 -1 416 1.0614999569952488e-02 + + 1.5251900255680084e-01 -1.0589870214462280e+00 + <_> + + 0 -1 417 -4.5974001288414001e-02 + + -1.9918340444564819e+00 1.3629099726676941e-01 + <_> + + 0 -1 418 8.2900002598762512e-02 + + -3.2037198543548584e-01 6.0304200649261475e-01 + <_> + + 0 -1 419 -8.9130001142621040e-03 + + 5.9586602449417114e-01 -2.1139599382877350e-01 + <_> + + 0 -1 420 4.2814001441001892e-02 + + 2.2925000637769699e-02 -1.4679330587387085e+00 + <_> + + 0 -1 421 -8.7139997631311417e-03 + + -4.3989500403404236e-01 2.0439699292182922e-01 + <_> + + 0 -1 422 -4.3390002101659775e-03 + + -8.9066797494888306e-01 1.0469999909400940e-01 + <_> + + 0 -1 423 8.0749997869133949e-03 + + 2.1164199709892273e-01 -4.0231600403785706e-01 + <_> + + 0 -1 424 9.6739001572132111e-02 + + 1.3319999910891056e-02 -1.6085360050201416e+00 + <_> + + 0 -1 425 -3.0536999925971031e-02 + + 1.0063740015029907e+00 -1.3413299620151520e-01 + <_> + + 0 -1 426 -6.0855999588966370e-02 + + -1.4689979553222656e+00 9.4240000471472740e-03 + <_> + + 0 -1 427 -3.8162000477313995e-02 + + -8.1636399030685425e-01 2.6171201467514038e-01 + <_> + + 0 -1 428 -9.6960002556443214e-03 + + 1.1561699956655502e-01 -7.1693199872970581e-01 + <_> + + 0 -1 429 4.8902999609708786e-02 + + 1.3050499558448792e-01 -1.6448370218276978e+00 + <_> + + 0 -1 430 -4.1611999273300171e-02 + + -1.1795840263366699e+00 2.5017000734806061e-02 + <_> + + 0 -1 431 -2.0188000053167343e-02 + + 6.3188201189041138e-01 -1.0490400344133377e-01 + <_> + + 0 -1 432 -9.7900000400841236e-04 + + 1.8507799506187439e-01 -5.3565901517868042e-01 + <_> + + 0 -1 433 -3.3622000366449356e-02 + + -9.3127602338790894e-01 2.0071500539779663e-01 + <_> + + 0 -1 434 1.9455999135971069e-02 + + 3.8029000163078308e-02 -1.0112210512161255e+00 + <_> + + 0 -1 435 -3.1800000579096377e-04 + + 3.6457699537277222e-01 -2.7610900998115540e-01 + <_> + + 0 -1 436 -3.8899999344721437e-04 + + 1.9665899872779846e-01 -5.3410500288009644e-01 + <_> + + 0 -1 437 -9.3496002256870270e-02 + + -1.6772350072860718e+00 2.0727099478244781e-01 + <_> + + 0 -1 438 -7.7877998352050781e-02 + + -3.0760629177093506e+00 -3.5803999751806259e-02 + <_> + + 0 -1 439 1.6947999596595764e-02 + + 2.1447399258613586e-01 -7.1376299858093262e-01 + <_> + + 0 -1 440 -2.1459000185132027e-02 + + -1.1468060016632080e+00 1.5855999663472176e-02 + <_> + + 0 -1 441 -1.2865999713540077e-02 + + 8.3812397718429565e-01 -6.5944001078605652e-02 + <_> + + 0 -1 442 7.8220004215836525e-03 + + -2.8026801347732544e-01 7.9376900196075439e-01 + <_> + + 0 -1 443 1.0294400155544281e-01 + + 1.7832300066947937e-01 -6.8412202596664429e-01 + <_> + + 0 -1 444 -3.7487998604774475e-02 + + 9.6189999580383301e-01 -2.1735599637031555e-01 + <_> + + 0 -1 445 2.5505999103188515e-02 + + 1.0103999637067318e-02 1.2461110353469849e+00 + <_> + + 0 -1 446 6.6700001480057836e-04 + + -5.3488200902938843e-01 1.4746299386024475e-01 + <_> + + 0 -1 447 -2.8867900371551514e-01 + + 8.2172799110412598e-01 -1.4948000200092793e-02 + <_> + + 0 -1 448 9.1294996440410614e-02 + + -1.9605399668216705e-01 1.0803170204162598e+00 + <_> + + 0 -1 449 1.2056600302457809e-01 + + -2.3848999291658401e-02 1.1392610073089600e+00 + <_> + + 0 -1 450 -7.3775000870227814e-02 + + -1.3583840131759644e+00 -4.2039998807013035e-03 + <_> + + 0 -1 451 -3.3128000795841217e-02 + + -6.4483201503753662e-01 2.4142199754714966e-01 + <_> + + 0 -1 452 -4.3937001377344131e-02 + + 8.4285402297973633e-01 -2.0624800026416779e-01 + <_> + + 0 -1 453 1.8110199272632599e-01 + + 1.9212099909782410e-01 -1.2222139835357666e+00 + <_> + + 0 -1 454 -1.1850999668240547e-02 + + -7.2677397727966309e-01 5.2687998861074448e-02 + <_> + + 0 -1 455 4.5920000411570072e-03 + + -3.6305201053619385e-01 2.9223799705505371e-01 + <_> + + 0 -1 456 7.0620002225041389e-03 + + 5.8116000145673752e-02 -6.7161601781845093e-01 + <_> + + 0 -1 457 -2.3715000599622726e-02 + + 4.7142100334167480e-01 1.8580000847578049e-02 + <_> + + 0 -1 458 -6.7171998322010040e-02 + + -1.1331889629364014e+00 2.3780999705195427e-02 + <_> + + 0 -1 459 -6.5310001373291016e-02 + + 9.8253500461578369e-01 2.8362000361084938e-02 + <_> + + 0 -1 460 2.2791000083088875e-02 + + -2.8213700652122498e-01 5.8993399143218994e-01 + <_> + + 0 -1 461 -1.9037999212741852e-02 + + -6.3711500167846680e-01 2.6514598727226257e-01 + <_> + + 0 -1 462 -6.8689999170601368e-03 + + 3.7487301230430603e-01 -3.3232098817825317e-01 + <_> + + 0 -1 463 -4.0146000683307648e-02 + + -1.3048729896545410e+00 1.5724299848079681e-01 + <_> + + 0 -1 464 -4.0530998259782791e-02 + + -2.0458049774169922e+00 -2.6925999671220779e-02 + <_> + + 0 -1 465 -1.2253999710083008e-02 + + 7.7649402618408203e-01 -4.2971000075340271e-02 + <_> + + 0 -1 466 -2.7219999581575394e-02 + + 1.7424400150775909e-01 -4.4600901007652283e-01 + <_> + + 0 -1 467 -8.8366001844406128e-02 + + -1.5036419630050659e+00 1.4289900660514832e-01 + <_> + + 0 -1 468 -7.9159997403621674e-03 + + 2.8666698932647705e-01 -3.7923699617385864e-01 + <_> + + 0 -1 469 -4.1960000991821289e-02 + + 1.3846950531005859e+00 6.5026998519897461e-02 + <_> + + 0 -1 470 4.5662999153137207e-02 + + -2.2452299296855927e-01 7.9521000385284424e-01 + <_> + + 0 -1 471 -1.4090600609779358e-01 + + -1.5879319906234741e+00 1.1359000205993652e-01 + <_> + + 0 -1 472 -5.9216000139713287e-02 + + -1.1945960521697998e+00 -7.1640000678598881e-03 + <_> + + 0 -1 473 4.3390002101659775e-03 + + -1.5528699755668640e-01 4.0664499998092651e-01 + <_> + + 0 -1 474 -2.0369999110698700e-03 + + 2.5927901268005371e-01 -3.8368299603462219e-01 + <_> + + 0 -1 475 2.7516499161720276e-01 + + -8.8497996330261230e-02 7.6787501573562622e-01 + <_> + + 0 -1 476 -2.6601999998092651e-02 + + 7.5024497509002686e-01 -2.2621999680995941e-01 + <_> + + 0 -1 477 4.0906000882387161e-02 + + 1.2158600240945816e-01 -1.4566910266876221e+00 + <_> + + 0 -1 478 5.5320002138614655e-03 + + -3.6611500382423401e-01 2.5968599319458008e-01 + <_> + + 0 -1 479 3.1879000365734100e-02 + + -7.5019001960754395e-02 4.8484799265861511e-01 + <_> + + 0 -1 480 -4.1482001543045044e-02 + + 7.8220397233963013e-01 -2.1992200613021851e-01 + <_> + + 0 -1 481 -9.6130996942520142e-02 + + -8.9456301927566528e-01 1.4680700004100800e-01 + <_> + + 0 -1 482 -1.1568999849259853e-02 + + 8.2714098691940308e-01 -2.0275600254535675e-01 + <_> + + 0 -1 483 1.8312999978661537e-02 + + 1.6367999836802483e-02 2.7306801080703735e-01 + <_> + + 0 -1 484 -3.4166000783443451e-02 + + 1.1307320594787598e+00 -1.8810899555683136e-01 + <_> + + 0 -1 485 -2.4476999416947365e-02 + + -5.7791298627853394e-01 1.5812499821186066e-01 + <_> + + 0 -1 486 4.8957001417875290e-02 + + -2.2564999759197235e-02 -1.6373280286788940e+00 + <_> + + 0 -1 487 -2.0702999085187912e-02 + + -5.4512101411819458e-01 2.4086999893188477e-01 + <_> + + 0 -1 488 -2.3002000525593758e-02 + + -1.2236540317535400e+00 -7.3440000414848328e-03 + <_> + + 0 -1 489 6.4585000276565552e-02 + + 1.4695599675178528e-01 -4.4967499375343323e-01 + <_> + + 0 -1 490 1.2666000053286552e-02 + + -2.7873900532722473e-01 4.3876600265502930e-01 + <_> + + 0 -1 491 -1.2002999894320965e-02 + + -2.4289099872112274e-01 2.5350099802017212e-01 + <_> + + 0 -1 492 -2.6443999260663986e-02 + + -8.5864800214767456e-01 2.6025999337434769e-02 + <_> + + 0 -1 493 -2.5547999888658524e-02 + + 6.9287902116775513e-01 -2.1160000469535589e-03 + <_> + + 0 -1 494 3.9115000516176224e-02 + + -1.6589100658893585e-01 1.5209139585494995e+00 + <_> + + 0 -1 495 -6.0330000706017017e-03 + + 4.3856900930404663e-01 -2.1613700687885284e-01 + <_> + + 0 -1 496 -3.3936999738216400e-02 + + -9.7998398542404175e-01 2.2133000195026398e-02 + <_> + 99 + -3.8700489997863770e+00 + + <_> + + 0 -1 497 4.0672998875379562e-02 + + -9.0474700927734375e-01 6.4410597085952759e-01 + <_> + + 0 -1 498 2.5609999895095825e-02 + + -7.9216998815536499e-01 5.7489997148513794e-01 + <_> + + 0 -1 499 1.9959500432014465e-01 + + -3.0099600553512573e-01 1.3143850564956665e+00 + <_> + + 0 -1 500 1.2404999695718288e-02 + + -8.9882999658584595e-01 2.9205799102783203e-01 + <_> + + 0 -1 501 3.9207998663187027e-02 + + -4.1955199837684631e-01 5.3463298082351685e-01 + <_> + + 0 -1 502 -3.0843999236822128e-02 + + 4.5793399214744568e-01 -4.4629099965095520e-01 + <_> + + 0 -1 503 -3.5523001104593277e-02 + + 9.1310501098632812e-01 -2.7373200654983521e-01 + <_> + + 0 -1 504 -6.1650000512599945e-02 + + -1.4697799682617188e+00 2.0364099740982056e-01 + <_> + + 0 -1 505 -1.1739999987185001e-02 + + -1.0482879877090454e+00 6.7801997065544128e-02 + <_> + + 0 -1 506 6.6933996975421906e-02 + + 2.9274499416351318e-01 -5.2282899618148804e-01 + <_> + + 0 -1 507 -2.0631000399589539e-02 + + -1.2855139970779419e+00 4.4550999999046326e-02 + <_> + + 0 -1 508 -2.2357000038027763e-02 + + -8.5753798484802246e-01 1.8434000015258789e-01 + <_> + + 0 -1 509 1.1500000255182385e-03 + + 1.6405500471591949e-01 -6.9125002622604370e-01 + <_> + + 0 -1 510 3.5872999578714371e-02 + + 1.5756499767303467e-01 -8.4262597560882568e-01 + <_> + + 0 -1 511 3.0659999698400497e-02 + + 2.1637000143527985e-02 -1.3634690046310425e+00 + <_> + + 0 -1 512 5.5559999309480190e-03 + + -1.6737000644207001e-01 2.5888401269912720e-01 + <_> + + 0 -1 513 -6.1160000041127205e-03 + + -9.7271800041198730e-01 6.6100001335144043e-02 + <_> + + 0 -1 514 -3.0316999182105064e-02 + + 9.8474198579788208e-01 -1.6448000445961952e-02 + <_> + + 0 -1 515 -9.7200004383921623e-03 + + 4.7604700922966003e-01 -3.2516700029373169e-01 + <_> + + 0 -1 516 -5.7126998901367188e-02 + + -9.5920699834823608e-01 1.9938200712203979e-01 + <_> + + 0 -1 517 4.0059997700154781e-03 + + -5.2612501382827759e-01 2.2428700327873230e-01 + <_> + + 0 -1 518 3.3734001219272614e-02 + + 1.7070099711418152e-01 -1.0737580060958862e+00 + <_> + + 0 -1 519 -3.4641999751329422e-02 + + -1.1343129873275757e+00 3.6540001630783081e-02 + <_> + + 0 -1 520 4.6923000365495682e-02 + + 2.5832301378250122e-01 -7.1535801887512207e-01 + <_> + + 0 -1 521 -8.7660001590847969e-03 + + 1.9640900194644928e-01 -5.3355097770690918e-01 + <_> + + 0 -1 522 6.5627999603748322e-02 + + -5.1194999366998672e-02 9.7610700130462646e-01 + <_> + + 0 -1 523 -4.4165000319480896e-02 + + 1.0631920099258423e+00 -2.3462599515914917e-01 + <_> + + 0 -1 524 1.7304999753832817e-02 + + -1.8582899868488312e-01 4.5889899134635925e-01 + <_> + + 0 -1 525 3.3135998994112015e-02 + + -2.9381999745965004e-02 -2.6651329994201660e+00 + <_> + + 0 -1 526 -2.1029999479651451e-02 + + 9.9979901313781738e-01 2.4937000125646591e-02 + <_> + + 0 -1 527 2.9783999547362328e-02 + + -2.9605999588966370e-02 -2.1695868968963623e+00 + <_> + + 0 -1 528 5.5291999131441116e-02 + + -7.5599999399855733e-04 7.4651998281478882e-01 + <_> + + 0 -1 529 -3.3597998321056366e-02 + + -1.5274159908294678e+00 1.1060000397264957e-02 + <_> + + 0 -1 530 1.9602999091148376e-02 + + 3.3574998378753662e-02 9.9526202678680420e-01 + <_> + + 0 -1 531 -2.0787000656127930e-02 + + 7.6612901687622070e-01 -2.4670800566673279e-01 + <_> + + 0 -1 532 3.2536000013351440e-02 + + 1.6263400018215179e-01 -6.1134302616119385e-01 + <_> + + 0 -1 533 -1.0788000188767910e-02 + + -9.7839701175689697e-01 2.8969999402761459e-02 + <_> + + 0 -1 534 -9.9560003727674484e-03 + + 4.6145799756050110e-01 -1.3510499894618988e-01 + <_> + + 0 -1 535 -3.7489999085664749e-03 + + 2.5458198785781860e-01 -5.1955598592758179e-01 + <_> + + 0 -1 536 -4.1779998689889908e-02 + + -8.0565100908279419e-01 1.5208500623703003e-01 + <_> + + 0 -1 537 -3.4221000969409943e-02 + + -1.3137799501419067e+00 -3.5800000187009573e-03 + <_> + + 0 -1 538 1.0130000300705433e-02 + + 2.0175799727439880e-01 -6.1339598894119263e-01 + <_> + + 0 -1 539 -8.9849002659320831e-02 + + 9.7632801532745361e-01 -2.0884799957275391e-01 + <_> + + 0 -1 540 2.6097999885678291e-02 + + -1.8807999789714813e-01 4.7705799341201782e-01 + <_> + + 0 -1 541 -3.7539999466389418e-03 + + -6.7980402708053589e-01 1.1288800090551376e-01 + <_> + + 0 -1 542 3.1973000615835190e-02 + + 1.8951700627803802e-01 -1.4967479705810547e+00 + <_> + + 0 -1 543 1.9332999363541603e-02 + + -2.3609900474548340e-01 8.1320500373840332e-01 + <_> + + 0 -1 544 1.9490000559017062e-03 + + 2.4830399453639984e-01 -6.9211997091770172e-02 + <_> + + 0 -1 545 -4.4146999716758728e-02 + + -1.0418920516967773e+00 4.8053000122308731e-02 + <_> + + 0 -1 546 -4.4681999832391739e-02 + + 5.1346302032470703e-01 -7.3799998499453068e-03 + <_> + + 0 -1 547 -1.0757499933242798e-01 + + 1.6202019453048706e+00 -1.8667599558830261e-01 + <_> + + 0 -1 548 -1.2846800684928894e-01 + + 2.9869480133056641e+00 9.5427997410297394e-02 + <_> + + 0 -1 549 -4.4757999479770660e-02 + + 6.0405302047729492e-01 -2.7058699727058411e-01 + <_> + + 0 -1 550 -4.3990999460220337e-02 + + -6.1790502071380615e-01 1.5997199714183807e-01 + <_> + + 0 -1 551 -1.2268999963998795e-01 + + 6.6327202320098877e-01 -2.3636999726295471e-01 + <_> + + 0 -1 552 -1.9982999190688133e-02 + + -1.1228660345077515e+00 1.9616700708866119e-01 + <_> + + 0 -1 553 -1.5527999959886074e-02 + + -1.0770269632339478e+00 2.0693000406026840e-02 + <_> + + 0 -1 554 -4.8971001058816910e-02 + + 8.1168299913406372e-01 -1.7252000048756599e-02 + <_> + + 0 -1 555 5.5975999683141708e-02 + + -2.2529000416398048e-02 -1.7356760501861572e+00 + <_> + + 0 -1 556 -9.8580000922083855e-03 + + 6.7881399393081665e-01 -5.8180000633001328e-02 + <_> + + 0 -1 557 1.3481000438332558e-02 + + 5.7847999036312103e-02 -7.7255302667617798e-01 + <_> + + 0 -1 558 6.5609999001026154e-03 + + -1.3146899640560150e-01 6.7055797576904297e-01 + <_> + + 0 -1 559 7.1149999275803566e-03 + + -3.7880599498748779e-01 3.0978998541831970e-01 + <_> + + 0 -1 560 4.8159998841583729e-03 + + -5.8470398187637329e-01 2.5602099299430847e-01 + <_> + + 0 -1 561 9.5319999381899834e-03 + + -3.0217000842094421e-01 4.1253298521041870e-01 + <_> + + 0 -1 562 -2.7474999427795410e-02 + + 5.9154701232910156e-01 1.7963999882340431e-02 + <_> + + 0 -1 563 -3.9519999176263809e-02 + + 9.6913498640060425e-01 -2.1020300686359406e-01 + <_> + + 0 -1 564 -3.0658999457955360e-02 + + 9.1155898571014404e-01 4.0550000965595245e-02 + <_> + + 0 -1 565 -1.4680000022053719e-03 + + -6.0489797592163086e-01 1.6960899531841278e-01 + <_> + + 0 -1 566 1.9077600538730621e-01 + + 4.3515000492334366e-02 8.1892901659011841e-01 + <_> + + 0 -1 567 5.1790000870823860e-03 + + -9.3617302179336548e-01 2.4937000125646591e-02 + <_> + + 0 -1 568 2.4126000702381134e-02 + + 1.8175500631332397e-01 -3.4185901284217834e-01 + <_> + + 0 -1 569 -2.6383999735116959e-02 + + -1.2912579774856567e+00 -3.4280000254511833e-03 + <_> + + 0 -1 570 5.4139997810125351e-03 + + -4.6291999518871307e-02 2.5269600749015808e-01 + <_> + + 0 -1 571 5.4216001182794571e-02 + + -1.2848000042140484e-02 -1.4304540157318115e+00 + <_> + + 0 -1 572 2.3799999326001853e-04 + + -2.6676699519157410e-01 3.3588299155235291e-01 + <_> + + 0 -1 573 1.5216999687254429e-02 + + -5.1367300748825073e-01 1.3005100190639496e-01 + <_> + + 0 -1 574 1.7007999122142792e-02 + + 4.1575899720191956e-01 -3.1241199374198914e-01 + <_> + + 0 -1 575 3.0496999621391296e-02 + + -2.4820999801158905e-01 7.0828497409820557e-01 + <_> + + 0 -1 576 6.5430002287030220e-03 + + -2.2637000679969788e-01 1.9184599816799164e-01 + <_> + + 0 -1 577 1.4163999259471893e-01 + + 6.5227001905441284e-02 -8.8809502124786377e-01 + <_> + + 0 -1 578 1.9338000565767288e-02 + + 1.8891200423240662e-01 -2.7397701144218445e-01 + <_> + + 0 -1 579 -1.7324000597000122e-02 + + -9.4866698980331421e-01 2.4196999147534370e-02 + <_> + + 0 -1 580 -6.2069999985396862e-03 + + 3.6938399076461792e-01 -1.7494900524616241e-01 + <_> + + 0 -1 581 -1.6109000891447067e-02 + + 9.6159499883651733e-01 -2.0005300641059875e-01 + <_> + + 0 -1 582 -1.0122500360012054e-01 + + -3.0699110031127930e+00 1.1363799870014191e-01 + <_> + + 0 -1 583 -7.5509999878704548e-03 + + 2.2921000421047211e-01 -4.5645099878311157e-01 + <_> + + 0 -1 584 4.4247999787330627e-02 + + -3.1599999056197703e-04 3.9225301146507263e-01 + <_> + + 0 -1 585 -1.1636000126600266e-01 + + 9.5233702659606934e-01 -2.0201599597930908e-01 + <_> + + 0 -1 586 4.7360002063214779e-03 + + -9.9177002906799316e-02 2.0370499789714813e-01 + <_> + + 0 -1 587 2.2459000349044800e-02 + + 8.7280003353953362e-03 -1.0217070579528809e+00 + <_> + + 0 -1 588 -1.2109000235795975e-02 + + 6.4812600612640381e-01 -9.0149000287055969e-02 + <_> + + 0 -1 589 5.6120000779628754e-02 + + -3.6759998649358749e-02 -1.9275590181350708e+00 + <_> + + 0 -1 590 -8.7379999458789825e-03 + + 6.9261300563812256e-01 -6.8374998867511749e-02 + <_> + + 0 -1 591 6.6399998031556606e-03 + + -4.0569800138473511e-01 1.8625700473785400e-01 + <_> + + 0 -1 592 -1.8131999298930168e-02 + + -6.4518201351165771e-01 2.1976399421691895e-01 + <_> + + 0 -1 593 -2.2718999534845352e-02 + + 9.7776198387145996e-01 -1.8654300272464752e-01 + <_> + + 0 -1 594 1.2705000117421150e-02 + + -1.0546600073575974e-01 3.7404099106788635e-01 + <_> + + 0 -1 595 -1.3682999648153782e-02 + + 6.1064100265502930e-01 -2.6881098747253418e-01 + <_> + 115 + -3.7160909175872803e+00 + + <_> + + 0 -1 596 3.1357999891042709e-02 + + -1.0183910131454468e+00 5.7528597116470337e-01 + <_> + + 0 -1 597 9.3050003051757812e-02 + + -4.1297501325607300e-01 1.0091199874877930e+00 + <_> + + 0 -1 598 2.5949999690055847e-02 + + -5.8587902784347534e-01 5.6606197357177734e-01 + <_> + + 0 -1 599 1.6472000628709793e-02 + + -9.2857497930526733e-01 3.0924499034881592e-01 + <_> + + 0 -1 600 -1.8779999809339643e-03 + + 1.1951000243425369e-01 -1.1180130243301392e+00 + <_> + + 0 -1 601 -9.0129999443888664e-03 + + -5.7849502563476562e-01 3.3154401183128357e-01 + <_> + + 0 -1 602 2.2547999396920204e-02 + + -3.8325101137161255e-01 5.2462202310562134e-01 + <_> + + 0 -1 603 -3.7780001759529114e-02 + + 1.1790670156478882e+00 -3.4166999161243439e-02 + <_> + + 0 -1 604 -5.3799999877810478e-03 + + -8.6265897750854492e-01 1.1867900192737579e-01 + <_> + + 0 -1 605 -2.3893000558018684e-02 + + -7.4950599670410156e-01 2.1011400222778320e-01 + <_> + + 0 -1 606 -2.6521999388933182e-02 + + 9.2128598690032959e-01 -2.8252801299095154e-01 + <_> + + 0 -1 607 1.2280000373721123e-02 + + 2.6662799715995789e-01 -7.0013600587844849e-01 + <_> + + 0 -1 608 9.6594996750354767e-02 + + -2.8453999757766724e-01 7.3168998956680298e-01 + <_> + + 0 -1 609 -2.7414999902248383e-02 + + -6.1492699384689331e-01 1.5576200187206268e-01 + <_> + + 0 -1 610 -1.5767000615596771e-02 + + 5.7551199197769165e-01 -3.4362199902534485e-01 + <_> + + 0 -1 611 -2.1100000012665987e-03 + + 3.2599699497222900e-01 -1.3008299469947815e-01 + <_> + + 0 -1 612 1.2006999924778938e-02 + + 8.9322999119758606e-02 -9.6025598049163818e-01 + <_> + + 0 -1 613 -1.5421999618411064e-02 + + 3.4449499845504761e-01 -4.6711999177932739e-01 + <_> + + 0 -1 614 -4.1579999960958958e-03 + + 2.3696300387382507e-01 -5.2563297748565674e-01 + <_> + + 0 -1 615 -2.1185999736189842e-02 + + -7.4267697334289551e-01 2.1702000498771667e-01 + <_> + + 0 -1 616 -1.7077000811696053e-02 + + -9.0471798181533813e-01 6.6012002527713776e-02 + <_> + + 0 -1 617 -4.0849998593330383e-02 + + -3.4446600079536438e-01 2.1503700315952301e-01 + <_> + + 0 -1 618 -8.1930002197623253e-03 + + -9.3388599157333374e-01 5.0471000373363495e-02 + <_> + + 0 -1 619 -1.9238000735640526e-02 + + -5.3203701972961426e-01 1.7240600287914276e-01 + <_> + + 0 -1 620 -4.4192001223564148e-02 + + 9.2075002193450928e-01 -2.2148500382900238e-01 + <_> + + 0 -1 621 -6.2392000108957291e-02 + + -7.1053802967071533e-01 1.8323899805545807e-01 + <_> + + 0 -1 622 -1.0079999919980764e-03 + + -8.7063097953796387e-01 5.5330000817775726e-02 + <_> + + 0 -1 623 2.3870000615715981e-02 + + -2.2854200005531311e-01 5.2415597438812256e-01 + <_> + + 0 -1 624 2.1391000598669052e-02 + + -3.0325898528099060e-01 5.5860602855682373e-01 + <_> + + 0 -1 625 2.0254999399185181e-02 + + 2.6901501417160034e-01 -7.0261800289154053e-01 + <_> + + 0 -1 626 -2.8772000223398209e-02 + + -1.1835030317306519e+00 4.6512000262737274e-02 + <_> + + 0 -1 627 3.4199999645352364e-03 + + -5.4652100801467896e-01 2.5962498784065247e-01 + <_> + + 0 -1 628 5.6983001530170441e-02 + + -2.6982900500297546e-01 5.8170700073242188e-01 + <_> + + 0 -1 629 -9.3892000615596771e-02 + + -9.1046398878097534e-01 1.9677700102329254e-01 + <_> + + 0 -1 630 1.7699999734759331e-02 + + -4.4003298878669739e-01 2.1349500119686127e-01 + <_> + + 0 -1 631 2.2844199836254120e-01 + + 2.3605000227689743e-02 7.7171599864959717e-01 + <_> + + 0 -1 632 -1.8287500739097595e-01 + + 7.9228597879409790e-01 -2.4644799530506134e-01 + <_> + + 0 -1 633 -6.9891996681690216e-02 + + 8.0267798900604248e-01 -3.6072000861167908e-02 + <_> + + 0 -1 634 1.5297000296413898e-02 + + -2.0072300732135773e-01 1.1030600070953369e+00 + <_> + + 0 -1 635 6.7500001750886440e-03 + + -4.5967999845743179e-02 7.2094500064849854e-01 + <_> + + 0 -1 636 -1.5983000397682190e-02 + + -9.0357202291488647e-01 4.4987998902797699e-02 + <_> + + 0 -1 637 1.3088000006973743e-02 + + 3.5297098755836487e-01 -3.7710601091384888e-01 + <_> + + 0 -1 638 1.3061000034213066e-02 + + -1.9583599269390106e-01 1.1198940277099609e+00 + <_> + + 0 -1 639 -3.9907000958919525e-02 + + -1.3998429775238037e+00 1.9145099818706512e-01 + <_> + + 0 -1 640 1.5026999637484550e-02 + + 2.3600000422447920e-03 -1.1611249446868896e+00 + <_> + + 0 -1 641 -2.0517999306321144e-02 + + -4.8908099532127380e-01 1.6743400692939758e-01 + <_> + + 0 -1 642 -2.2359000518918037e-02 + + -1.2202980518341064e+00 -1.1975999921560287e-02 + <_> + + 0 -1 643 -7.9150004312396049e-03 + + 3.7228098511695862e-01 -8.5063003003597260e-02 + <_> + + 0 -1 644 1.5258000232279301e-02 + + -2.9412600398063660e-01 5.9406399726867676e-01 + <_> + + 0 -1 645 -3.1665999442338943e-02 + + -1.4395569562911987e+00 1.3578799366950989e-01 + <_> + + 0 -1 646 -3.0773999169468880e-02 + + -2.2545371055603027e+00 -3.3971000462770462e-02 + <_> + + 0 -1 647 -1.5483000315725803e-02 + + 3.7700700759887695e-01 1.5847999602556229e-02 + <_> + + 0 -1 648 3.5167001187801361e-02 + + -2.9446101188659668e-01 5.3159099817276001e-01 + <_> + + 0 -1 649 -1.7906000837683678e-02 + + -9.9788200855255127e-01 1.6235999763011932e-01 + <_> + + 0 -1 650 -3.1799999997019768e-03 + + 4.7657001763582230e-02 -7.5249898433685303e-01 + <_> + + 0 -1 651 1.5720000490546227e-02 + + 1.4873799681663513e-01 -6.5375399589538574e-01 + <_> + + 0 -1 652 2.9864000156521797e-02 + + -1.4952000230550766e-02 -1.2275190353393555e+00 + <_> + + 0 -1 653 2.9899999499320984e-03 + + -1.4263699948787689e-01 4.3272799253463745e-01 + <_> + + 0 -1 654 8.4749996662139893e-02 + + -1.9280999898910522e-02 -1.1946409940719604e+00 + <_> + + 0 -1 655 -5.8724999427795410e-02 + + -1.7328219413757324e+00 1.4374700188636780e-01 + <_> + + 0 -1 656 4.4755998998880386e-02 + + -2.4140599370002747e-01 5.4019999504089355e-01 + <_> + + 0 -1 657 4.0369000285863876e-02 + + 5.7680001482367516e-03 5.6578099727630615e-01 + <_> + + 0 -1 658 3.7735998630523682e-02 + + 3.8180999457836151e-02 -7.9370397329330444e-01 + <_> + + 0 -1 659 6.0752999037504196e-02 + + 7.6453000307083130e-02 1.4813209772109985e+00 + <_> + + 0 -1 660 -1.9832000136375427e-02 + + -1.6971720457077026e+00 -2.7370000258088112e-02 + <_> + + 0 -1 661 -1.6592699289321899e-01 + + 6.2976002693176270e-01 3.1762998551130295e-02 + <_> + + 0 -1 662 6.9014996290206909e-02 + + -3.3463200926780701e-01 3.0076700448989868e-01 + <_> + + 0 -1 663 1.1358000338077545e-02 + + 2.2741499543190002e-01 -3.8224700093269348e-01 + <_> + + 0 -1 664 1.7000000225380063e-03 + + 1.9223800301551819e-01 -5.2735102176666260e-01 + <_> + + 0 -1 665 7.9769000411033630e-02 + + 9.1491997241973877e-02 2.1049048900604248e+00 + <_> + + 0 -1 666 -5.7144001126289368e-02 + + -1.7452130317687988e+00 -4.0910001844167709e-02 + <_> + + 0 -1 667 7.3830001056194305e-03 + + -2.4214799702167511e-01 3.5577800869941711e-01 + <_> + + 0 -1 668 -1.8040999770164490e-02 + + 1.1779999732971191e+00 -1.7676700651645660e-01 + <_> + + 0 -1 669 9.4503000378608704e-02 + + 1.3936099410057068e-01 -1.2993700504302979e+00 + <_> + + 0 -1 670 5.4210000671446323e-03 + + -5.4608601331710815e-01 1.3916400074958801e-01 + <_> + + 0 -1 671 7.0290002040565014e-03 + + -2.1597200632095337e-01 3.9258098602294922e-01 + <_> + + 0 -1 672 3.4515999257564545e-02 + + 6.3188999891281128e-02 -7.2108101844787598e-01 + <_> + + 0 -1 673 -5.1924999803304672e-02 + + 6.8667602539062500e-01 6.3272997736930847e-02 + <_> + + 0 -1 674 -6.9162003695964813e-02 + + 1.7411810159683228e+00 -1.6619299352169037e-01 + <_> + + 0 -1 675 -5.5229999125003815e-03 + + 3.0694699287414551e-01 -1.6662900149822235e-01 + <_> + + 0 -1 676 6.8599998950958252e-02 + + -2.1405400335788727e-01 7.3185002803802490e-01 + <_> + + 0 -1 677 -6.7038998007774353e-02 + + -7.9360598325729370e-01 2.0525799691677094e-01 + <_> + + 0 -1 678 -2.1005000919103622e-02 + + 3.7344399094581604e-01 -2.9618600010871887e-01 + <_> + + 0 -1 679 2.0278999581933022e-02 + + -1.5200000256299973e-02 4.0555301308631897e-01 + <_> + + 0 -1 680 -4.7107998281717300e-02 + + 1.2116849422454834e+00 -1.7464299499988556e-01 + <_> + + 0 -1 681 1.8768499791622162e-01 + + -2.2909000515937805e-02 6.9645798206329346e-01 + <_> + + 0 -1 682 -4.3228998780250549e-02 + + -1.0602480173110962e+00 -5.5599998449906707e-04 + <_> + + 0 -1 683 2.0004000514745712e-02 + + -3.2751001417636871e-02 5.3805100917816162e-01 + <_> + + 0 -1 684 8.0880001187324524e-03 + + 3.7548001855611801e-02 -7.4768900871276855e-01 + <_> + + 0 -1 685 2.7101000770926476e-02 + + -8.1790000200271606e-02 3.3387100696563721e-01 + <_> + + 0 -1 686 -9.1746002435684204e-02 + + -1.9213509559631348e+00 -3.8952998816967010e-02 + <_> + + 0 -1 687 -1.2454999610781670e-02 + + 4.8360601067543030e-01 1.8168000504374504e-02 + <_> + + 0 -1 688 1.4649000018835068e-02 + + -1.9906699657440186e-01 7.2815400362014771e-01 + <_> + + 0 -1 689 2.9101999476552010e-02 + + 1.9871099293231964e-01 -4.9216800928115845e-01 + <_> + + 0 -1 690 8.7799998000264168e-03 + + -1.9499599933624268e-01 7.7317398786544800e-01 + <_> + + 0 -1 691 -5.4740000516176224e-02 + + 1.8087190389633179e+00 6.8323001265525818e-02 + <_> + + 0 -1 692 -1.4798000454902649e-02 + + 7.8064900636672974e-01 -1.8709599971771240e-01 + <_> + + 0 -1 693 2.5012999773025513e-02 + + 1.5285299718379974e-01 -1.6021020412445068e+00 + <_> + + 0 -1 694 4.6548001468181610e-02 + + -1.6738200187683105e-01 1.1902060508728027e+00 + <_> + + 0 -1 695 1.7624000087380409e-02 + + -1.0285499691963196e-01 3.9175900816917419e-01 + <_> + + 0 -1 696 1.6319599747657776e-01 + + -3.5624001175165176e-02 -1.6098170280456543e+00 + <_> + + 0 -1 697 1.3137999922037125e-02 + + -5.6359000504016876e-02 5.4158902168273926e-01 + <_> + + 0 -1 698 -1.5665000304579735e-02 + + 2.8063100576400757e-01 -3.1708601117134094e-01 + <_> + + 0 -1 699 8.0554001033306122e-02 + + 1.2640400230884552e-01 -1.0297529697418213e+00 + <_> + + 0 -1 700 3.5363998264074326e-02 + + 2.0752999931573868e-02 -7.9105597734451294e-01 + <_> + + 0 -1 701 3.2986998558044434e-02 + + 1.9057099521160126e-01 -8.3839899301528931e-01 + <_> + + 0 -1 702 1.2195000424981117e-02 + + 7.3729000985622406e-02 -6.2780702114105225e-01 + <_> + + 0 -1 703 4.3065998703241348e-02 + + 4.7384999692440033e-02 1.5712939500808716e+00 + <_> + + 0 -1 704 3.0326999723911285e-02 + + -2.7314600348472595e-01 3.8572001457214355e-01 + <_> + + 0 -1 705 3.5493001341819763e-02 + + 5.4593998938798904e-02 5.2583402395248413e-01 + <_> + + 0 -1 706 -1.4596999622881413e-02 + + 3.8152599334716797e-01 -2.8332400321960449e-01 + <_> + + 0 -1 707 1.2606999836862087e-02 + + 1.5455099940299988e-01 -3.0501499772071838e-01 + <_> + + 0 -1 708 1.0172000154852867e-02 + + 2.3637000471353531e-02 -8.7217897176742554e-01 + <_> + + 0 -1 709 2.8843000531196594e-02 + + 1.6090999543666840e-01 -2.0277599990367889e-01 + <_> + + 0 -1 710 5.5100000463426113e-04 + + -6.1545401811599731e-01 8.0935999751091003e-02 + <_> + 127 + -3.5645289421081543e+00 + + <_> + + 0 -1 711 4.8344001173973083e-02 + + -8.4904599189758301e-01 5.6974399089813232e-01 + <_> + + 0 -1 712 3.2460000365972519e-02 + + -8.1417298316955566e-01 4.4781699776649475e-01 + <_> + + 0 -1 713 3.3339999616146088e-02 + + -3.6423799395561218e-01 6.7937397956848145e-01 + <_> + + 0 -1 714 6.4019998535513878e-03 + + -1.1885459423065186e+00 1.9238699972629547e-01 + <_> + + 0 -1 715 -5.6889997795224190e-03 + + 3.3085298538208008e-01 -7.1334099769592285e-01 + <_> + + 0 -1 716 1.2698000296950340e-02 + + -5.0990802049636841e-01 1.1376299709081650e-01 + <_> + + 0 -1 717 6.0549997724592686e-03 + + -1.0470550060272217e+00 2.0222599804401398e-01 + <_> + + 0 -1 718 2.6420000940561295e-03 + + -5.0559401512145996e-01 3.6441200971603394e-01 + <_> + + 0 -1 719 -1.6925999894738197e-02 + + -9.9541902542114258e-01 1.2602199614048004e-01 + <_> + + 0 -1 720 2.8235999867320061e-02 + + -9.4137996435165405e-02 5.7780402898788452e-01 + <_> + + 0 -1 721 1.0428999550640583e-02 + + 2.3272900283336639e-01 -5.2569699287414551e-01 + <_> + + 0 -1 722 9.8860003054141998e-03 + + -1.0316299647092819e-01 4.7657600045204163e-01 + <_> + + 0 -1 723 2.6015000417828560e-02 + + -1.0920000495389104e-03 -1.5581729412078857e+00 + <_> + + 0 -1 724 -2.5537999346852303e-02 + + -6.5451401472091675e-01 1.8843199312686920e-01 + <_> + + 0 -1 725 -3.5310001112520695e-03 + + 2.8140598535537720e-01 -4.4575300812721252e-01 + <_> + + 0 -1 726 9.2449998483061790e-03 + + 1.5612000226974487e-01 -2.1370999515056610e-01 + <_> + + 0 -1 727 2.1030999720096588e-02 + + -2.9170298576354980e-01 5.2234101295471191e-01 + <_> + + 0 -1 728 -5.1063001155853271e-02 + + 1.3661290407180786e+00 3.0465999618172646e-02 + <_> + + 0 -1 729 -6.2330000102519989e-02 + + 1.2207020521163940e+00 -2.2434400022029877e-01 + <_> + + 0 -1 730 -3.2963000237941742e-02 + + -8.2016801834106445e-01 1.4531899988651276e-01 + <_> + + 0 -1 731 -3.7418000400066376e-02 + + -1.2218099832534790e+00 1.9448999315500259e-02 + <_> + + 0 -1 732 1.2402799725532532e-01 + + 1.2082300335168839e-01 -9.8729300498962402e-01 + <_> + + 0 -1 733 -8.9229997247457504e-03 + + -1.1688489913940430e+00 2.1105000749230385e-02 + <_> + + 0 -1 734 -5.9879999607801437e-02 + + -1.0689330101013184e+00 1.9860200583934784e-01 + <_> + + 0 -1 735 6.2620001845061779e-03 + + -3.6229598522186279e-01 3.8000801205635071e-01 + <_> + + 0 -1 736 -1.7673000693321228e-02 + + 4.9094098806381226e-01 -1.4606699347496033e-01 + <_> + + 0 -1 737 1.7579000443220139e-02 + + 5.8728098869323730e-01 -2.7774399518966675e-01 + <_> + + 0 -1 738 5.1560001447796822e-03 + + -7.5194999575614929e-02 6.0193097591400146e-01 + <_> + + 0 -1 739 -1.0599999688565731e-02 + + 2.7637401223182678e-01 -3.7794300913810730e-01 + <_> + + 0 -1 740 2.0884099602699280e-01 + + -5.3599998354911804e-03 1.0317809581756592e+00 + <_> + + 0 -1 741 -2.6412999257445335e-02 + + 8.2336401939392090e-01 -2.2480599582195282e-01 + <_> + + 0 -1 742 5.8892000466585159e-02 + + 1.3098299503326416e-01 -1.1853699684143066e+00 + <_> + + 0 -1 743 -1.1579000391066074e-02 + + -9.0667802095413208e-01 4.4126998633146286e-02 + <_> + + 0 -1 744 4.5988000929355621e-02 + + 1.0143999941647053e-02 1.0740900039672852e+00 + <_> + + 0 -1 745 -2.2838000208139420e-02 + + 1.7791990041732788e+00 -1.7315499484539032e-01 + <_> + + 0 -1 746 -8.1709995865821838e-03 + + 5.7386302947998047e-01 -7.4106000363826752e-02 + <_> + + 0 -1 747 3.5359999164938927e-03 + + -3.2072898745536804e-01 4.0182501077651978e-01 + <_> + + 0 -1 748 4.9444999545812607e-02 + + 1.9288000464439392e-01 -1.2166700363159180e+00 + <_> + + 0 -1 749 3.5139999818056822e-03 + + 6.9568000733852386e-02 -7.1323698759078979e-01 + <_> + + 0 -1 750 -3.0996000394225121e-02 + + -3.8862198591232300e-01 1.8098799884319305e-01 + <_> + + 0 -1 751 8.6452998220920563e-02 + + -2.5792999193072319e-02 -1.5453219413757324e+00 + <_> + + 0 -1 752 -1.3652600347995758e-01 + + -1.9199420213699341e+00 1.6613300144672394e-01 + <_> + + 0 -1 753 -5.7689999230206013e-03 + + -1.2822589874267578e+00 -1.5907999128103256e-02 + <_> + + 0 -1 754 -1.7899999395012856e-02 + + -4.0409898757934570e-01 2.3591600358486176e-01 + <_> + + 0 -1 755 -1.9969999790191650e-02 + + -7.2891902923583984e-01 5.6235000491142273e-02 + <_> + + 0 -1 756 -5.7493001222610474e-02 + + 5.7830798625946045e-01 -1.5796000137925148e-02 + <_> + + 0 -1 757 -8.3056002855300903e-02 + + 9.1511601209640503e-01 -2.1121400594711304e-01 + <_> + + 0 -1 758 -5.3771000355482101e-02 + + -5.1931297779083252e-01 1.8576000630855560e-01 + <_> + + 0 -1 759 -8.3670001477003098e-03 + + 2.4109700322151184e-01 -3.9648601412773132e-01 + <_> + + 0 -1 760 5.5406998842954636e-02 + + 1.6771200299263000e-01 -2.5664970874786377e+00 + <_> + + 0 -1 761 -6.7180998623371124e-02 + + -1.3658570051193237e+00 -1.4232000336050987e-02 + <_> + + 0 -1 762 -2.3900000378489494e-02 + + -1.7084569931030273e+00 1.6507799923419952e-01 + <_> + + 0 -1 763 5.5949999950826168e-03 + + -3.1373998522758484e-01 3.2837900519371033e-01 + <_> + + 0 -1 764 2.1294999867677689e-02 + + 1.4953400194644928e-01 -4.8579800128936768e-01 + <_> + + 0 -1 765 -2.4613000452518463e-02 + + 7.4346399307250977e-01 -2.2305199503898621e-01 + <_> + + 0 -1 766 -1.9626000896096230e-02 + + -4.0918299555778503e-01 1.8893200159072876e-01 + <_> + + 0 -1 767 -5.3266000002622604e-02 + + 8.1381601095199585e-01 -2.0853699743747711e-01 + <_> + + 0 -1 768 7.1290000341832638e-03 + + 3.2996100187301636e-01 -5.9937399625778198e-01 + <_> + + 0 -1 769 -2.2486999630928040e-02 + + -1.2551610469818115e+00 -2.0413000136613846e-02 + <_> + + 0 -1 770 -8.2310996949672699e-02 + + 1.3821430206298828e+00 5.9308998286724091e-02 + <_> + + 0 -1 771 1.3097000122070312e-01 + + -3.5843998193740845e-02 -1.5396369695663452e+00 + <_> + + 0 -1 772 1.4293000102043152e-02 + + -1.8475200235843658e-01 3.7455001473426819e-01 + <_> + + 0 -1 773 6.3479999080300331e-03 + + -4.4901099801063538e-01 1.3876999914646149e-01 + <_> + + 0 -1 774 -4.6055000275373459e-02 + + 6.7832601070404053e-01 -1.7071999609470367e-02 + <_> + + 0 -1 775 5.7693999260663986e-02 + + -1.1955999769270420e-02 -1.2261159420013428e+00 + <_> + + 0 -1 776 -6.0609998181462288e-03 + + 3.3958598971366882e-01 6.2800000887364149e-04 + <_> + + 0 -1 777 -5.2163001149892807e-02 + + -1.0621069669723511e+00 -1.3779999688267708e-02 + <_> + + 0 -1 778 4.6572998166084290e-02 + + 1.4538800716400146e-01 -1.2384550571441650e+00 + <_> + + 0 -1 779 7.5309998355805874e-03 + + -2.4467700719833374e-01 5.1377099752426147e-01 + <_> + + 0 -1 780 2.1615000441670418e-02 + + 1.3072599470615387e-01 -7.0996797084808350e-01 + <_> + + 0 -1 781 -1.7864000052213669e-02 + + -1.0474660396575928e+00 4.9599999329075217e-04 + <_> + + 0 -1 782 -3.7195000797510147e-02 + + -1.5126730203628540e+00 1.4801399409770966e-01 + <_> + + 0 -1 783 -3.1100001069717109e-04 + + 1.3971500098705292e-01 -4.6867498755455017e-01 + <_> + + 0 -1 784 2.5042999535799026e-02 + + 2.8632000088691711e-01 -4.1794699430465698e-01 + <_> + + 0 -1 785 9.3449996784329414e-03 + + -2.7336201071739197e-01 4.3444699048995972e-01 + <_> + + 0 -1 786 3.2363999634981155e-02 + + 1.8438899517059326e-01 -9.5019298791885376e-01 + <_> + + 0 -1 787 -6.2299999408423901e-03 + + 3.2581999897956848e-01 -3.0815601348876953e-01 + <_> + + 0 -1 788 5.1488999277353287e-02 + + 1.1416000127792358e-01 -1.9795479774475098e+00 + <_> + + 0 -1 789 -2.6449000462889671e-02 + + -1.1067299842834473e+00 -8.5519999265670776e-03 + <_> + + 0 -1 790 -1.5420000068843365e-02 + + 8.0138701200485229e-01 -3.2035000622272491e-02 + <_> + + 0 -1 791 1.9456999376416206e-02 + + -2.6449498534202576e-01 3.8753899931907654e-01 + <_> + + 0 -1 792 3.3620998263359070e-02 + + 1.6052000224590302e-02 5.8840900659561157e-01 + <_> + + 0 -1 793 2.8906000778079033e-02 + + 1.5216000378131866e-02 -9.4723600149154663e-01 + <_> + + 0 -1 794 2.0300000323913991e-04 + + -3.0766001343727112e-01 2.1235899627208710e-01 + <_> + + 0 -1 795 -4.9141999334096909e-02 + + -1.6058609485626221e+00 -3.1094999983906746e-02 + <_> + + 0 -1 796 7.6425999402999878e-02 + + 7.4758999049663544e-02 1.1639410257339478e+00 + <_> + + 0 -1 797 2.3897999897599220e-02 + + -6.4320000819861889e-03 -1.1150749921798706e+00 + <_> + + 0 -1 798 3.8970001041889191e-03 + + -2.4105699360370636e-01 2.0858900249004364e-01 + <_> + + 0 -1 799 -8.9445002377033234e-02 + + 1.9157789945602417e+00 -1.5721100568771362e-01 + <_> + + 0 -1 800 -1.5008999966084957e-02 + + -2.5174099206924438e-01 1.8179899454116821e-01 + <_> + + 0 -1 801 -1.1145999655127525e-02 + + -6.9349497556686401e-01 4.4927999377250671e-02 + <_> + + 0 -1 802 9.4578996300697327e-02 + + 1.8102100491523743e-01 -7.4978601932525635e-01 + <_> + + 0 -1 803 5.5038899183273315e-01 + + -3.0974000692367554e-02 -1.6746139526367188e+00 + <_> + + 0 -1 804 4.1381001472473145e-02 + + 6.3910000026226044e-02 7.6561200618743896e-01 + <_> + + 0 -1 805 2.4771999567747116e-02 + + 1.1380000039935112e-02 -8.8559401035308838e-01 + <_> + + 0 -1 806 5.0999000668525696e-02 + + 1.4890299737453461e-01 -2.4634211063385010e+00 + <_> + + 0 -1 807 -1.6893999651074409e-02 + + 3.8870999217033386e-01 -2.9880300164222717e-01 + <_> + + 0 -1 808 -1.2162300199270248e-01 + + -1.5542800426483154e+00 1.6300800442695618e-01 + <_> + + 0 -1 809 -3.6049999762326479e-03 + + 2.1842800080776215e-01 -3.7312099337577820e-01 + <_> + + 0 -1 810 1.1575400084257126e-01 + + -4.7061000019311905e-02 5.9403699636459351e-01 + <_> + + 0 -1 811 3.6903999745845795e-02 + + -2.5508600473403931e-01 5.5397301912307739e-01 + <_> + + 0 -1 812 1.1483999900519848e-02 + + -1.8129499256610870e-01 4.0682798624038696e-01 + <_> + + 0 -1 813 -2.0233999937772751e-02 + + 5.4311197996139526e-01 -2.3822399973869324e-01 + <_> + + 0 -1 814 -2.8765000402927399e-02 + + -6.9172298908233643e-01 1.5943300724029541e-01 + <_> + + 0 -1 815 -5.8320001699030399e-03 + + 2.9447799921035767e-01 -3.4005999565124512e-01 + <_> + + 0 -1 816 -5.5468998849391937e-02 + + 9.2200797796249390e-01 9.4093002378940582e-02 + <_> + + 0 -1 817 -1.4801000244915485e-02 + + -7.9539698362350464e-01 3.1521998345851898e-02 + <_> + + 0 -1 818 -7.0940000005066395e-03 + + 3.3096000552177429e-01 -5.0886999815702438e-02 + <_> + + 0 -1 819 -4.5124001801013947e-02 + + -1.3719749450683594e+00 -2.1408999338746071e-02 + <_> + + 0 -1 820 6.4377002418041229e-02 + + 6.3901998102664948e-02 9.1478300094604492e-01 + <_> + + 0 -1 821 -1.4727000147104263e-02 + + 3.6050599813461304e-01 -2.8614500164985657e-01 + <_> + + 0 -1 822 4.5007001608610153e-02 + + -1.5619699656963348e-01 5.3160297870635986e-01 + <_> + + 0 -1 823 -1.1330000124871731e-03 + + 1.3422900438308716e-01 -4.4358900189399719e-01 + <_> + + 0 -1 824 4.9451000988483429e-02 + + 1.0571800172328949e-01 -2.5589139461517334e+00 + <_> + + 0 -1 825 2.9102999716997147e-02 + + -1.0088000446557999e-02 -1.1073939800262451e+00 + <_> + + 0 -1 826 3.4786000847816467e-02 + + -2.7719999197870493e-03 5.6700998544692993e-01 + <_> + + 0 -1 827 -6.1309998854994774e-03 + + -4.6889400482177734e-01 1.2636399269104004e-01 + <_> + + 0 -1 828 1.5525000169873238e-02 + + -8.4279999136924744e-03 8.7469202280044556e-01 + <_> + + 0 -1 829 2.9249999206513166e-03 + + -3.4434300661087036e-01 2.0851600170135498e-01 + <_> + + 0 -1 830 -5.3571000695228577e-02 + + 1.4982949495315552e+00 5.7328000664710999e-02 + <_> + + 0 -1 831 -1.9217999652028084e-02 + + -9.9234098196029663e-01 -9.3919998034834862e-03 + <_> + + 0 -1 832 -5.5282998830080032e-02 + + -5.7682299613952637e-01 1.6860599815845490e-01 + <_> + + 0 -1 833 5.6336000561714172e-02 + + -3.3775001764297485e-02 -1.3889650106430054e+00 + <_> + + 0 -1 834 -2.3824000731110573e-02 + + 4.0182098746299744e-01 1.8360000103712082e-03 + <_> + + 0 -1 835 1.7810000572353601e-03 + + 1.8145999312400818e-01 -4.1743400692939758e-01 + <_> + + 0 -1 836 -3.7689000368118286e-02 + + 5.4683101177215576e-01 1.8219999969005585e-02 + <_> + + 0 -1 837 -2.4144999682903290e-02 + + 6.8352097272872925e-01 -1.9650200009346008e-01 + <_> + 135 + -3.7025990486145020e+00 + + <_> + + 0 -1 838 2.7444999665021896e-02 + + -8.9984202384948730e-01 5.1876497268676758e-01 + <_> + + 0 -1 839 1.1554100364446640e-01 + + -5.6524401903152466e-01 7.0551300048828125e-01 + <_> + + 0 -1 840 -2.2297000512480736e-02 + + 3.6079999804496765e-01 -6.6864597797393799e-01 + <_> + + 0 -1 841 1.3325000181794167e-02 + + -5.5573397874832153e-01 3.5789999365806580e-01 + <_> + + 0 -1 842 -3.8060001097619534e-03 + + -1.0713000297546387e+00 1.8850000202655792e-01 + <_> + + 0 -1 843 -2.6819999329745770e-03 + + -7.1584302186965942e-01 2.6344498991966248e-01 + <_> + + 0 -1 844 3.3819999080151320e-03 + + -4.6930798888206482e-01 2.6658400893211365e-01 + <_> + + 0 -1 845 3.7643000483512878e-02 + + 2.1098700165748596e-01 -1.0804339647293091e+00 + <_> + + 0 -1 846 -1.3861999846994877e-02 + + 6.6912001371383667e-01 -2.7942800521850586e-01 + <_> + + 0 -1 847 -2.7350001037120819e-03 + + -9.5332300662994385e-01 2.4051299691200256e-01 + <_> + + 0 -1 848 -3.8336999714374542e-02 + + 8.1432801485061646e-01 -2.4919399619102478e-01 + <_> + + 0 -1 849 -3.4697998315095901e-02 + + 1.2330100536346436e+00 6.8600000813603401e-03 + <_> + + 0 -1 850 2.3360999301075935e-02 + + -3.0794700980186462e-01 7.0714497566223145e-01 + <_> + + 0 -1 851 3.5057999193668365e-02 + + 2.1205900609493256e-01 -1.4399830102920532e+00 + <_> + + 0 -1 852 -1.3256999664008617e-02 + + -9.0260702371597290e-01 4.8610001802444458e-02 + <_> + + 0 -1 853 1.2740000151097775e-02 + + 2.2655199468135834e-01 -4.4643801450729370e-01 + <_> + + 0 -1 854 3.6400000099092722e-03 + + -3.9817899465560913e-01 3.4665399789810181e-01 + <_> + + 0 -1 855 1.0064700245857239e-01 + + 1.8383599817752838e-01 -1.3410769701004028e+00 + <_> + + 0 -1 856 0. + + 1.5536400675773621e-01 -5.1582497358322144e-01 + <_> + + 0 -1 857 1.1708999983966351e-02 + + 2.1651400625705719e-01 -7.2705197334289551e-01 + <_> + + 0 -1 858 -3.5964999347925186e-02 + + -1.4789500236511230e+00 -2.4317000061273575e-02 + <_> + + 0 -1 859 -2.1236000582575798e-02 + + -1.6844099760055542e-01 1.9526599347591400e-01 + <_> + + 0 -1 860 1.4874000102281570e-02 + + 3.7335999310016632e-02 -8.7557297945022583e-01 + <_> + + 0 -1 861 -5.1409997977316380e-03 + + 3.3466500043869019e-01 -2.4109700322151184e-01 + <_> + + 0 -1 862 2.3450000211596489e-02 + + 5.5320002138614655e-03 -1.2509720325469971e+00 + <_> + + 0 -1 863 -2.5062000378966331e-02 + + 4.5212399959564209e-01 -8.4469996392726898e-02 + <_> + + 0 -1 864 -7.7400001464411616e-04 + + 1.5249900519847870e-01 -4.8486500978469849e-01 + <_> + + 0 -1 865 -4.0483999997377396e-02 + + -1.3024920225143433e+00 1.7983500659465790e-01 + <_> + + 0 -1 866 2.8170999139547348e-02 + + -2.4410900473594666e-01 6.2271100282669067e-01 + <_> + + 0 -1 867 4.5692998915910721e-02 + + 2.8122000396251678e-02 9.2394399642944336e-01 + <_> + + 0 -1 868 3.9707001298666000e-02 + + -2.2332799434661865e-01 7.7674001455307007e-01 + <_> + + 0 -1 869 5.0517000257968903e-02 + + 2.0319999754428864e-01 -1.0895930528640747e+00 + <_> + + 0 -1 870 -1.7266999930143356e-02 + + 6.8598401546478271e-01 -2.3304499685764313e-01 + <_> + + 0 -1 871 8.0186001956462860e-02 + + -1.0292000137269497e-02 6.1881101131439209e-01 + <_> + + 0 -1 872 9.7676001489162445e-02 + + -2.0070299506187439e-01 1.0088349580764771e+00 + <_> + + 0 -1 873 -1.5572000294923782e-02 + + 4.7615298628807068e-01 4.5623999089002609e-02 + <_> + + 0 -1 874 -1.5305000357329845e-02 + + -1.1077369451522827e+00 4.5239999890327454e-03 + <_> + + 0 -1 875 -1.6485000029206276e-02 + + 1.0152939558029175e+00 1.6327999532222748e-02 + <_> + + 0 -1 876 -2.6141999289393425e-02 + + 4.1723299026489258e-01 -2.8645500540733337e-01 + <_> + + 0 -1 877 8.8679995387792587e-03 + + 2.1404999494552612e-01 -1.6772800683975220e-01 + <_> + + 0 -1 878 -2.6886999607086182e-02 + + -1.1564220190048218e+00 -1.0324000380933285e-02 + <_> + + 0 -1 879 7.7789998613297939e-03 + + 3.5359498858451843e-01 -2.9611301422119141e-01 + <_> + + 0 -1 880 -1.5974000096321106e-02 + + -1.5374109745025635e+00 -2.9958000406622887e-02 + <_> + + 0 -1 881 2.0866999402642250e-02 + + 2.0244100689888000e-01 -7.1270197629928589e-01 + <_> + + 0 -1 882 8.5482001304626465e-02 + + -2.5932999327778816e-02 -1.5156569480895996e+00 + <_> + + 0 -1 883 2.3872999474406242e-02 + + 1.6803400218486786e-01 -3.8806200027465820e-01 + <_> + + 0 -1 884 -3.9105001837015152e-02 + + -1.1958349943161011e+00 -2.0361000671982765e-02 + <_> + + 0 -1 885 -7.7946998178958893e-02 + + -1.0898950099945068e+00 1.4530299603939056e-01 + <_> + + 0 -1 886 -1.6876000910997391e-02 + + 2.8049701452255249e-01 -4.1336300969123840e-01 + <_> + + 0 -1 887 1.1875600367784500e-01 + + -4.3490998446941376e-02 4.1263699531555176e-01 + <_> + + 0 -1 888 1.5624199807643890e-01 + + -2.6429599523544312e-01 5.5127799510955811e-01 + <_> + + 0 -1 889 -4.5908000320196152e-02 + + 6.0189199447631836e-01 1.8921000882983208e-02 + <_> + + 0 -1 890 -1.0309999808669090e-02 + + 3.8152998685836792e-01 -2.9507899284362793e-01 + <_> + + 0 -1 891 9.5769003033638000e-02 + + 1.3246500492095947e-01 -4.6266800165176392e-01 + <_> + + 0 -1 892 1.3686999678611755e-02 + + 1.1738699674606323e-01 -5.1664102077484131e-01 + <_> + + 0 -1 893 2.3990001063793898e-03 + + -3.4007599949836731e-01 2.0953500270843506e-01 + <_> + + 0 -1 894 3.3264998346567154e-02 + + -1.7052799463272095e-01 1.4366799592971802e+00 + <_> + + 0 -1 895 -3.3206000924110413e-02 + + 6.1295700073242188e-01 -4.1549999266862869e-02 + <_> + + 0 -1 896 2.7979998849332333e-03 + + -4.8554301261901855e-01 1.3372699916362762e-01 + <_> + + 0 -1 897 -6.5792001783847809e-02 + + -4.0257668495178223e+00 1.0876700282096863e-01 + <_> + + 0 -1 898 2.1430000197142363e-03 + + -3.9179998636245728e-01 2.2427099943161011e-01 + <_> + + 0 -1 899 2.2363999858498573e-02 + + -8.6429998278617859e-02 3.7785199284553528e-01 + <_> + + 0 -1 900 -5.7410001754760742e-02 + + 1.1454069614410400e+00 -1.9736599922180176e-01 + <_> + + 0 -1 901 6.6550001502037048e-03 + + -2.1105000749230385e-02 5.8453398942947388e-01 + <_> + + 0 -1 902 1.2326999567449093e-02 + + 3.7817001342773438e-02 -6.6987001895904541e-01 + <_> + + 0 -1 903 -8.1869997084140778e-03 + + 5.6366002559661865e-01 -7.6877996325492859e-02 + <_> + + 0 -1 904 3.6681000143289566e-02 + + -1.7343300580978394e-01 1.1670149564743042e+00 + <_> + + 0 -1 905 -4.0220400691032410e-01 + + 1.2640819549560547e+00 4.3398998677730560e-02 + <_> + + 0 -1 906 -2.2126000374555588e-02 + + 6.6978102922439575e-01 -2.1605299413204193e-01 + <_> + + 0 -1 907 -1.3156999833881855e-02 + + -4.1198599338531494e-01 2.0215000212192535e-01 + <_> + + 0 -1 908 -1.2860000133514404e-02 + + -9.1582697629928589e-01 3.9232999086380005e-02 + <_> + + 0 -1 909 2.1627999842166901e-02 + + 3.8719999138265848e-03 3.5668200254440308e-01 + <_> + + 0 -1 910 1.1896000243723392e-02 + + -3.7303900718688965e-01 1.9235099852085114e-01 + <_> + + 0 -1 911 -1.9548999145627022e-02 + + -4.2374899983406067e-01 2.4429599940776825e-01 + <_> + + 0 -1 912 6.4444996416568756e-02 + + -1.6558900475502014e-01 1.2697030305862427e+00 + <_> + + 0 -1 913 1.0898499935865402e-01 + + 1.4894300699234009e-01 -2.1534640789031982e+00 + <_> + + 0 -1 914 -3.4077998250722885e-02 + + 1.3779460191726685e+00 -1.6198499500751495e-01 + <_> + + 0 -1 915 -3.7489999085664749e-03 + + -3.3828601241111755e-01 2.1152900159358978e-01 + <_> + + 0 -1 916 -1.0971999727189541e-02 + + 7.6517897844314575e-01 -1.9692599773406982e-01 + <_> + + 0 -1 917 -1.1485000140964985e-02 + + -6.9271200895309448e-01 2.1657100319862366e-01 + <_> + + 0 -1 918 2.5984000414609909e-02 + + -1.1983999982476234e-02 -9.9697297811508179e-01 + <_> + + 0 -1 919 4.2159999720752239e-03 + + -1.0205700248479843e-01 4.8884400725364685e-01 + <_> + + 0 -1 920 -4.7697000205516815e-02 + + 1.0666010379791260e+00 -1.7576299607753754e-01 + <_> + + 0 -1 921 4.0300001273863018e-04 + + 1.8524800240993500e-01 -7.4790000915527344e-01 + <_> + + 0 -1 922 1.1539600044488907e-01 + + -2.2019700706005096e-01 5.4509997367858887e-01 + <_> + + 0 -1 923 1.6021000221371651e-02 + + 2.5487500429153442e-01 -5.0740098953247070e-01 + <_> + + 0 -1 924 5.6632000952959061e-02 + + -1.1256000027060509e-02 -9.5968097448348999e-01 + <_> + + 0 -1 925 -1.0726000182330608e-02 + + -2.8544700145721436e-01 1.6994799673557281e-01 + <_> + + 0 -1 926 1.2420000135898590e-01 + + -3.6139998584985733e-02 -1.3132710456848145e+00 + <_> + + 0 -1 927 -5.3799999877810478e-03 + + 3.3092701435089111e-01 1.3307999819517136e-02 + <_> + + 0 -1 928 1.1908000335097313e-02 + + -3.4830299019813538e-01 2.4041900038719177e-01 + <_> + + 0 -1 929 -4.3007999658584595e-02 + + -1.4390469789505005e+00 1.5599599480628967e-01 + <_> + + 0 -1 930 -3.3149998635053635e-02 + + -1.1805850267410278e+00 -1.2347999960184097e-02 + <_> + + 0 -1 931 -2.1341999992728233e-02 + + 2.2119441032409668e+00 6.2737002968788147e-02 + <_> + + 0 -1 932 -1.2218999676406384e-02 + + -1.8709750175476074e+00 -4.5499999076128006e-02 + <_> + + 0 -1 933 -1.6860999166965485e-02 + + -7.6912701129913330e-01 1.5330000221729279e-01 + <_> + + 0 -1 934 -2.4999999441206455e-03 + + -6.2987399101257324e-01 5.1600001752376556e-02 + <_> + + 0 -1 935 -4.5037999749183655e-02 + + 8.5428899526596069e-01 6.2600001692771912e-03 + <_> + + 0 -1 936 3.9057999849319458e-02 + + -3.2458998262882233e-02 -1.3325669765472412e+00 + <_> + + 0 -1 937 6.6720000468194485e-03 + + -1.9423599541187286e-01 3.7328699231147766e-01 + <_> + + 0 -1 938 -1.6361000016331673e-02 + + 2.0605869293212891e+00 -1.5042699873447418e-01 + <_> + + 0 -1 939 6.1719999648630619e-03 + + -1.1610999703407288e-01 2.5455400347709656e-01 + <_> + + 0 -1 940 4.5722000300884247e-02 + + -1.6340000554919243e-02 -1.0449140071868896e+00 + <_> + + 0 -1 941 4.1209999471902847e-03 + + -4.1997998952865601e-02 3.9680999517440796e-01 + <_> + + 0 -1 942 -1.7800000205170363e-04 + + -6.6422599554061890e-01 3.3443000167608261e-02 + <_> + + 0 -1 943 7.1109998971223831e-03 + + -5.8231998234987259e-02 3.7857300043106079e-01 + <_> + + 0 -1 944 -4.9864001572132111e-02 + + 6.1019402742385864e-01 -2.1005700528621674e-01 + <_> + + 0 -1 945 -2.5011999532580376e-02 + + -5.7100099325180054e-01 1.7848399281501770e-01 + <_> + + 0 -1 946 3.0939999967813492e-02 + + 5.6363001465797424e-02 -6.4731001853942871e-01 + <_> + + 0 -1 947 4.6271000057458878e-02 + + 1.7482399940490723e-01 -9.8909401893615723e-01 + <_> + + 0 -1 948 -3.1870000530034304e-03 + + -6.6804802417755127e-01 3.2267000526189804e-02 + <_> + + 0 -1 949 -2.4351999163627625e-02 + + 2.9444900155067444e-01 -1.3599999947473407e-03 + <_> + + 0 -1 950 1.1974000371992588e-02 + + -2.8345099091529846e-01 4.7171199321746826e-01 + <_> + + 0 -1 951 1.3070000335574150e-02 + + -1.0834600031375885e-01 5.7193297147750854e-01 + <_> + + 0 -1 952 5.9163000434637070e-02 + + -5.0939001142978668e-02 -1.9059720039367676e+00 + <_> + + 0 -1 953 -4.1094999760389328e-02 + + 4.5104598999023438e-01 -9.7599998116493225e-03 + <_> + + 0 -1 954 -8.3989001810550690e-02 + + -2.0349199771881104e+00 -5.1019001752138138e-02 + <_> + + 0 -1 955 4.4619001448154449e-02 + + 1.7041100561618805e-01 -1.2278720140457153e+00 + <_> + + 0 -1 956 2.4419000372290611e-02 + + -2.1796999499201775e-02 -1.0822949409484863e+00 + <_> + + 0 -1 957 -4.3870001100003719e-03 + + 3.0466699600219727e-01 -3.7066599726676941e-01 + <_> + + 0 -1 958 2.4607999250292778e-02 + + -3.1169500946998596e-01 2.3657299578189850e-01 + <_> + + 0 -1 959 -8.5182003676891327e-02 + + -1.7982350587844849e+00 1.5254299342632294e-01 + <_> + + 0 -1 960 2.1844999864697456e-02 + + -5.1888000220060349e-02 -1.9017189741134644e+00 + <_> + + 0 -1 961 -1.6829000785946846e-02 + + 2.1025900542736053e-01 2.1656999364495277e-02 + <_> + + 0 -1 962 3.2547999173402786e-02 + + -2.0292599499225616e-01 6.0944002866744995e-01 + <_> + + 0 -1 963 2.4709999561309814e-03 + + -9.5371198654174805e-01 1.8568399548530579e-01 + <_> + + 0 -1 964 5.5415999144315720e-02 + + -1.4405299723148346e-01 2.1506340503692627e+00 + <_> + + 0 -1 965 -1.0635499656200409e-01 + + -1.0911970138549805e+00 1.3228000700473785e-01 + <_> + + 0 -1 966 -7.9889995977282524e-03 + + 1.0253400355577469e-01 -5.1744902133941650e-01 + <_> + + 0 -1 967 7.5567997992038727e-02 + + 5.8965001255273819e-02 1.2354209423065186e+00 + <_> + + 0 -1 968 -9.2805996537208557e-02 + + -1.3431650400161743e+00 -3.4462999552488327e-02 + <_> + + 0 -1 969 4.9431998282670975e-02 + + 4.9601998180150986e-02 1.6054730415344238e+00 + <_> + + 0 -1 970 -1.1772999539971352e-02 + + -1.0261050462722778e+00 -4.1559999808669090e-03 + <_> + + 0 -1 971 8.5886001586914062e-02 + + 8.4642998874187469e-02 9.5220798254013062e-01 + <_> + + 0 -1 972 8.1031002104282379e-02 + + -1.4687100052833557e-01 1.9359990358352661e+00 + <_> + 136 + -3.4265899658203125e+00 + + <_> + + 0 -1 973 -3.3840999007225037e-02 + + 6.5889501571655273e-01 -6.9755297899246216e-01 + <_> + + 0 -1 974 1.5410000458359718e-02 + + -9.0728402137756348e-01 3.0478599667549133e-01 + <_> + + 0 -1 975 5.4905999451875687e-02 + + -4.9774798750877380e-01 5.7132601737976074e-01 + <_> + + 0 -1 976 2.1390000358223915e-02 + + -4.2565199732780457e-01 5.8096802234649658e-01 + <_> + + 0 -1 977 7.8849997371435165e-03 + + -4.7905999422073364e-01 4.3016499280929565e-01 + <_> + + 0 -1 978 -3.7544999271631241e-02 + + 5.0861597061157227e-01 -1.9985899329185486e-01 + <_> + + 0 -1 979 1.5925799310207367e-01 + + -2.3263600468635559e-01 1.0993319749832153e+00 + <_> + + 0 -1 980 -6.8939998745918274e-02 + + 4.0569001436233521e-01 5.6855000555515289e-02 + <_> + + 0 -1 981 -3.3695001155138016e-02 + + 4.5132800936698914e-01 -3.3332800865173340e-01 + <_> + + 0 -1 982 -6.3314996659755707e-02 + + -8.5015702247619629e-01 2.2341699898242950e-01 + <_> + + 0 -1 983 7.3699997738003731e-03 + + -9.3082201480865479e-01 5.9216998517513275e-02 + <_> + + 0 -1 984 -9.5969997346401215e-03 + + -1.2794899940490723e+00 1.8447299301624298e-01 + <_> + + 0 -1 985 -1.3067999482154846e-01 + + 5.8426898717880249e-01 -2.6007199287414551e-01 + <_> + + 0 -1 986 5.7402998208999634e-02 + + -5.3789000958204269e-02 7.1175599098205566e-01 + <_> + + 0 -1 987 -7.2340001352131367e-03 + + -8.6962199211120605e-01 7.5214996933937073e-02 + <_> + + 0 -1 988 3.1098999083042145e-02 + + -7.5006999075412750e-02 9.0781599283218384e-01 + <_> + + 0 -1 989 3.5854000598192215e-02 + + -2.4795499444007874e-01 7.2272098064422607e-01 + <_> + + 0 -1 990 -3.1534999608993530e-02 + + -1.1238329410552979e+00 2.0988300442695618e-01 + <_> + + 0 -1 991 -1.9437000155448914e-02 + + -1.4499390125274658e+00 -1.5100000426173210e-02 + <_> + + 0 -1 992 -7.2420001961290836e-03 + + 5.3864902257919312e-01 -1.1375399678945541e-01 + <_> + + 0 -1 993 8.1639997661113739e-03 + + 6.6889002919197083e-02 -7.6872897148132324e-01 + <_> + + 0 -1 994 -4.3653000146150589e-02 + + 1.1413530111312866e+00 4.0217000991106033e-02 + <_> + + 0 -1 995 2.6569999754428864e-02 + + -2.4719099700450897e-01 5.9295099973678589e-01 + <_> + + 0 -1 996 3.2216999679803848e-02 + + -4.0024999529123306e-02 3.2688000798225403e-01 + <_> + + 0 -1 997 -7.2236001491546631e-02 + + 5.8729398250579834e-01 -2.5396001338958740e-01 + <_> + + 0 -1 998 3.1424999237060547e-02 + + 1.5315100550651550e-01 -5.6042098999023438e-01 + <_> + + 0 -1 999 -4.7699999413453043e-04 + + 1.6958899796009064e-01 -5.2626699209213257e-01 + <_> + + 0 -1 1000 2.7189999818801880e-03 + + -1.4944599568843842e-01 2.9658699035644531e-01 + <_> + + 0 -1 1001 3.2875001430511475e-02 + + -3.9943501353263855e-01 2.5156599283218384e-01 + <_> + + 0 -1 1002 -1.4553000219166279e-02 + + 2.7972599864006042e-01 -4.7203800082206726e-01 + <_> + + 0 -1 1003 3.8017999380826950e-02 + + -2.9200001154094934e-03 -1.1300059556961060e+00 + <_> + + 0 -1 1004 2.8659999370574951e-03 + + 4.1111800074577332e-01 -2.6220801472663879e-01 + <_> + + 0 -1 1005 -4.1606999933719635e-02 + + -1.4293819665908813e+00 -1.9132999703288078e-02 + <_> + + 0 -1 1006 -2.4802999570965767e-02 + + -2.5013598799705505e-01 1.5978699922561646e-01 + <_> + + 0 -1 1007 1.0098000057041645e-02 + + 4.3738998472690582e-02 -6.9986099004745483e-01 + <_> + + 0 -1 1008 -2.0947000011801720e-02 + + -9.4137799739837646e-01 2.3204000294208527e-01 + <_> + + 0 -1 1009 2.2458000108599663e-02 + + -2.7185800671577454e-01 4.5319199562072754e-01 + <_> + + 0 -1 1010 -3.7110999226570129e-02 + + -1.0314660072326660e+00 1.4421799778938293e-01 + <_> + + 0 -1 1011 -1.0648000054061413e-02 + + 6.3107001781463623e-01 -2.5520798563957214e-01 + <_> + + 0 -1 1012 5.5422998964786530e-02 + + 1.6206599771976471e-01 -1.7722640037536621e+00 + <_> + + 0 -1 1013 2.1601999178528786e-02 + + -2.5016099214553833e-01 5.4119801521301270e-01 + <_> + + 0 -1 1014 8.7000000348780304e-05 + + -2.9008901119232178e-01 3.3507999777793884e-01 + <_> + + 0 -1 1015 1.4406000263988972e-02 + + -7.8840004280209541e-03 -1.1677219867706299e+00 + <_> + + 0 -1 1016 1.0777399688959122e-01 + + 1.1292000114917755e-01 -2.4940319061279297e+00 + <_> + + 0 -1 1017 3.5943999886512756e-02 + + -1.9480599462985992e-01 9.5757502317428589e-01 + <_> + + 0 -1 1018 -3.9510000497102737e-03 + + 3.0927801132202148e-01 -2.5530201196670532e-01 + <_> + + 0 -1 1019 2.0942000672221184e-02 + + -7.6319999061524868e-03 -1.0086350440979004e+00 + <_> + + 0 -1 1020 -2.9877999797463417e-02 + + -4.6027699112892151e-01 1.9507199525833130e-01 + <_> + + 0 -1 1021 2.5971999391913414e-02 + + -1.2187999673187733e-02 -1.0035500526428223e+00 + <_> + + 0 -1 1022 1.0603000409901142e-02 + + -7.5969003140926361e-02 4.1669899225234985e-01 + <_> + + 0 -1 1023 8.5819996893405914e-03 + + -2.6648598909378052e-01 3.9111500978469849e-01 + <_> + + 0 -1 1024 2.1270999684929848e-02 + + 1.8273900449275970e-01 -3.6052298545837402e-01 + <_> + + 0 -1 1025 7.4518002569675446e-02 + + -1.8938399851322174e-01 9.2658001184463501e-01 + <_> + + 0 -1 1026 4.6569998376071453e-03 + + -1.4506199955940247e-01 3.3294600248336792e-01 + <_> + + 0 -1 1027 1.7119999974966049e-03 + + -5.2464002370834351e-01 8.9879997074604034e-02 + <_> + + 0 -1 1028 9.8500004969537258e-04 + + -3.8381999731063843e-01 2.4392999708652496e-01 + <_> + + 0 -1 1029 2.8233999386429787e-02 + + -5.7879998348653316e-03 -1.2617139816284180e+00 + <_> + + 0 -1 1030 -3.2678000628948212e-02 + + -5.7953298091888428e-01 1.6955299675464630e-01 + <_> + + 0 -1 1031 2.2536000236868858e-02 + + 2.2281000390648842e-02 -8.7869602441787720e-01 + <_> + + 0 -1 1032 -2.1657999604940414e-02 + + -6.5108501911163330e-01 1.2966899573802948e-01 + <_> + + 0 -1 1033 7.6799998059868813e-03 + + -3.3965200185775757e-01 2.2013300657272339e-01 + <_> + + 0 -1 1034 1.4592000283300877e-02 + + 1.5077300369739532e-01 -5.0452399253845215e-01 + <_> + + 0 -1 1035 2.7868000790476799e-02 + + -2.5045299530029297e-01 4.5741999149322510e-01 + <_> + + 0 -1 1036 5.6940000504255295e-03 + + -1.0948500037193298e-01 5.5757802724838257e-01 + <_> + + 0 -1 1037 -1.0002999566495419e-02 + + -9.7366297245025635e-01 1.8467999994754791e-02 + <_> + + 0 -1 1038 -4.0719998069107533e-03 + + 3.8222199678421021e-01 -1.6921100020408630e-01 + <_> + + 0 -1 1039 -2.2593999281525612e-02 + + -1.0391089916229248e+00 5.1839998923242092e-03 + <_> + + 0 -1 1040 -3.9579998701810837e-02 + + -5.5109229087829590e+00 1.1163999885320663e-01 + <_> + + 0 -1 1041 -1.7537999898195267e-02 + + 9.5485800504684448e-01 -1.8584500253200531e-01 + <_> + + 0 -1 1042 9.0300003066658974e-03 + + 1.0436000302433968e-02 8.2114797830581665e-01 + <_> + + 0 -1 1043 -7.9539995640516281e-03 + + 2.2632899880409241e-01 -3.4568199515342712e-01 + <_> + + 0 -1 1044 2.7091000229120255e-02 + + 1.6430099308490753e-01 -1.3926379680633545e+00 + <_> + + 0 -1 1045 -2.0625999197363853e-02 + + -8.6366099119186401e-01 2.3880000226199627e-03 + <_> + + 0 -1 1046 -7.1989998221397400e-02 + + -2.8192629814147949e+00 1.1570499837398529e-01 + <_> + + 0 -1 1047 -2.6964999735355377e-02 + + -1.2946130037307739e+00 -2.4661000818014145e-02 + <_> + + 0 -1 1048 -4.7377999871969223e-02 + + -8.1306397914886475e-01 1.1831399798393250e-01 + <_> + + 0 -1 1049 -1.0895600169897079e-01 + + 6.5937900543212891e-01 -2.0843900740146637e-01 + <_> + + 0 -1 1050 1.3574000447988510e-02 + + 7.4240001849830151e-03 5.3152197599411011e-01 + <_> + + 0 -1 1051 -6.6920001991093159e-03 + + 3.0655801296234131e-01 -3.1084299087524414e-01 + <_> + + 0 -1 1052 -3.9070001803338528e-03 + + 2.5576499104499817e-01 -5.2932001650333405e-02 + <_> + + 0 -1 1053 -3.7613000720739365e-02 + + -1.4350049495697021e+00 -1.5448000282049179e-02 + <_> + + 0 -1 1054 8.6329998448491096e-03 + + -1.6884399950504303e-01 4.2124900221824646e-01 + <_> + + 0 -1 1055 -3.2097000628709793e-02 + + -6.4979398250579834e-01 4.1110001504421234e-02 + <_> + + 0 -1 1056 5.8495998382568359e-02 + + -5.2963998168706894e-02 6.3368302583694458e-01 + <_> + + 0 -1 1057 -4.0901999920606613e-02 + + -9.2101097106933594e-01 9.0640000998973846e-03 + <_> + + 0 -1 1058 -1.9925000146031380e-02 + + 5.3759998083114624e-01 -6.2996998429298401e-02 + <_> + + 0 -1 1059 -4.6020001173019409e-03 + + -5.4333502054214478e-01 8.4104999899864197e-02 + <_> + + 0 -1 1060 1.6824999824166298e-02 + + 1.5563699603080750e-01 -4.0171200037002563e-01 + <_> + + 0 -1 1061 9.4790002331137657e-03 + + -2.4245299398899078e-01 5.1509499549865723e-01 + <_> + + 0 -1 1062 -1.9534999504685402e-02 + + -5.1118397712707520e-01 1.3831999897956848e-01 + <_> + + 0 -1 1063 1.0746000334620476e-02 + + -2.1854999661445618e-01 6.2828701734542847e-01 + <_> + + 0 -1 1064 3.7927001714706421e-02 + + 1.1640299856662750e-01 -2.7301959991455078e+00 + <_> + + 0 -1 1065 1.6390999779105186e-02 + + -1.4635999687016010e-02 -1.0797250270843506e+00 + <_> + + 0 -1 1066 -1.9785000011324883e-02 + + 1.2166420221328735e+00 3.3275000751018524e-02 + <_> + + 0 -1 1067 1.1067000217735767e-02 + + -2.5388300418853760e-01 4.4038599729537964e-01 + <_> + + 0 -1 1068 5.2479999139904976e-03 + + 2.2496800124645233e-01 -2.4216499924659729e-01 + <_> + + 0 -1 1069 -1.1141999624669552e-02 + + 2.5018098950386047e-01 -3.0811500549316406e-01 + <_> + + 0 -1 1070 -1.0666999965906143e-02 + + -3.2729101181030273e-01 2.6168298721313477e-01 + <_> + + 0 -1 1071 1.0545299947261810e-01 + + -5.5750001221895218e-02 -1.9605729579925537e+00 + <_> + + 0 -1 1072 5.4827999323606491e-02 + + -1.9519999623298645e-03 7.3866099119186401e-01 + <_> + + 0 -1 1073 1.7760999500751495e-02 + + -3.0647200345993042e-01 2.6346999406814575e-01 + <_> + + 0 -1 1074 -3.1185999512672424e-02 + + -2.4600900709629059e-01 1.7082199454307556e-01 + <_> + + 0 -1 1075 -5.7296000421047211e-02 + + 4.7033500671386719e-01 -2.6048299670219421e-01 + <_> + + 0 -1 1076 -1.1312000453472137e-02 + + 3.8628900051116943e-01 -2.8817000985145569e-01 + <_> + + 0 -1 1077 3.0592000111937523e-02 + + -4.8826001584529877e-02 -1.7638969421386719e+00 + <_> + + 0 -1 1078 1.8489999929443002e-03 + + 2.1099899709224701e-01 -2.5940999388694763e-02 + <_> + + 0 -1 1079 1.1419000104069710e-02 + + -1.6829599440097809e-01 1.0278660058975220e+00 + <_> + + 0 -1 1080 8.1403002142906189e-02 + + 1.1531999707221985e-01 -1.2482399940490723e+00 + <_> + + 0 -1 1081 5.3495999425649643e-02 + + -4.6303998678922653e-02 -1.7165969610214233e+00 + <_> + + 0 -1 1082 -2.3948000743985176e-02 + + -4.0246599912643433e-01 2.0562100410461426e-01 + <_> + + 0 -1 1083 6.7690000869333744e-03 + + -3.3152300119400024e-01 2.0683400332927704e-01 + <_> + + 0 -1 1084 -3.2343998551368713e-02 + + -7.2632801532745361e-01 2.0073500275611877e-01 + <_> + + 0 -1 1085 3.7863001227378845e-02 + + -1.5631000697612762e-01 1.6697460412979126e+00 + <_> + + 0 -1 1086 1.5440000221133232e-02 + + 1.9487400352954865e-01 -3.5384199023246765e-01 + <_> + + 0 -1 1087 -4.4376000761985779e-02 + + 8.2093602418899536e-01 -1.8193599581718445e-01 + <_> + + 0 -1 1088 -2.3102000355720520e-02 + + -4.3044099211692810e-01 1.2375400215387344e-01 + <_> + + 0 -1 1089 1.9400000572204590e-02 + + -2.9726000502705574e-02 -1.1597590446472168e+00 + <_> + + 0 -1 1090 1.0385700315237045e-01 + + 1.1149899661540985e-01 -4.6835222244262695e+00 + <_> + + 0 -1 1091 -1.8964000046253204e-02 + + 2.1773819923400879e+00 -1.4544400572776794e-01 + <_> + + 0 -1 1092 3.8750998675823212e-02 + + -4.9446001648902893e-02 3.4018298983573914e-01 + <_> + + 0 -1 1093 2.2766999900341034e-02 + + -3.2802999019622803e-01 3.0531400442123413e-01 + <_> + + 0 -1 1094 -3.1357001513242722e-02 + + 1.1520819664001465e+00 2.7305999770760536e-02 + <_> + + 0 -1 1095 9.6909999847412109e-03 + + -3.8799500465393066e-01 2.1512599289417267e-01 + <_> + + 0 -1 1096 -4.9284998327493668e-02 + + -1.6774909496307373e+00 1.5774199366569519e-01 + <_> + + 0 -1 1097 -3.9510998874902725e-02 + + -9.7647899389266968e-01 -1.0552000254392624e-02 + <_> + + 0 -1 1098 4.7997999936342239e-02 + + 2.0843900740146637e-01 -6.8992799520492554e-01 + <_> + + 0 -1 1099 5.1422998309135437e-02 + + -1.6665300726890564e-01 1.2149239778518677e+00 + <_> + + 0 -1 1100 1.4279999770224094e-02 + + 2.3627699911594391e-01 -4.1396799683570862e-01 + <_> + + 0 -1 1101 -9.1611996293067932e-02 + + -9.2830902338027954e-01 -1.8345000222325325e-02 + <_> + + 0 -1 1102 6.5080001950263977e-03 + + -7.3647201061248779e-01 1.9497099518775940e-01 + <_> + + 0 -1 1103 3.5723000764846802e-02 + + 1.4197799563407898e-01 -4.2089301347732544e-01 + <_> + + 0 -1 1104 5.0638001412153244e-02 + + 1.1644000187516212e-02 7.8486597537994385e-01 + <_> + + 0 -1 1105 -1.4613999985158443e-02 + + -1.1909500360488892e+00 -3.5128001123666763e-02 + <_> + + 0 -1 1106 -3.8662999868392944e-02 + + 2.4314730167388916e+00 6.5647996962070465e-02 + <_> + + 0 -1 1107 -4.0346998721361160e-02 + + 7.1755301952362061e-01 -1.9108299911022186e-01 + <_> + + 0 -1 1108 2.3902000859379768e-02 + + 1.5646199882030487e-01 -7.9294800758361816e-01 + <_> + 137 + -3.5125269889831543e+00 + + <_> + + 0 -1 1109 8.5640000179409981e-03 + + -8.1450700759887695e-01 5.8875298500061035e-01 + <_> + + 0 -1 1110 -1.3292600214481354e-01 + + 9.3213397264480591e-01 -2.9367300868034363e-01 + <_> + + 0 -1 1111 9.8400004208087921e-03 + + -5.6462901830673218e-01 4.1647699475288391e-01 + <_> + + 0 -1 1112 5.0889998674392700e-03 + + -7.9232800006866455e-01 1.6975000500679016e-01 + <_> + + 0 -1 1113 -6.1039000749588013e-02 + + -1.4169000387191772e+00 2.5020999833941460e-02 + <_> + + 0 -1 1114 -4.6599999768659472e-04 + + 3.7982499599456787e-01 -4.1567099094390869e-01 + <_> + + 0 -1 1115 3.3889999613165855e-03 + + -4.0768599510192871e-01 3.5548499226570129e-01 + <_> + + 0 -1 1116 2.1006999537348747e-02 + + -2.4080100655555725e-01 8.6112701892852783e-01 + <_> + + 0 -1 1117 7.5559997931122780e-03 + + -8.7467199563980103e-01 9.8572000861167908e-02 + <_> + + 0 -1 1118 2.4779999628663063e-02 + + 1.5566200017929077e-01 -6.9229799509048462e-01 + <_> + + 0 -1 1119 -3.5620000213384628e-02 + + -1.1472270488739014e+00 3.6359999328851700e-02 + <_> + + 0 -1 1120 1.9810000434517860e-02 + + 1.5516200661659241e-01 -6.9520097970962524e-01 + <_> + + 0 -1 1121 1.5019999817013741e-02 + + 4.1990000754594803e-02 -9.6622800827026367e-01 + <_> + + 0 -1 1122 -2.3137999698519707e-02 + + 4.3396899104118347e-01 2.4160000029951334e-03 + <_> + + 0 -1 1123 -1.8743000924587250e-02 + + 4.3481099605560303e-01 -3.2522499561309814e-01 + <_> + + 0 -1 1124 4.5080000162124634e-01 + + -9.4573996961116791e-02 7.2421300411224365e-01 + <_> + + 0 -1 1125 1.1854999698698521e-02 + + -3.8133099675178528e-01 3.0098399519920349e-01 + <_> + + 0 -1 1126 -2.4830000475049019e-02 + + 8.9300602674484253e-01 -1.0295899957418442e-01 + <_> + + 0 -1 1127 -4.4743001461029053e-02 + + 8.6280298233032227e-01 -2.1716499328613281e-01 + <_> + + 0 -1 1128 -1.4600000344216824e-02 + + 6.0069400072097778e-01 -1.5906299650669098e-01 + <_> + + 0 -1 1129 -2.4527000263333321e-02 + + -1.5872869491577148e+00 -2.1817000582814217e-02 + <_> + + 0 -1 1130 2.3024000227451324e-02 + + 1.6853399574756622e-01 -3.8106900453567505e-01 + <_> + + 0 -1 1131 -2.4917000904679298e-02 + + 5.0810897350311279e-01 -2.7279898524284363e-01 + <_> + + 0 -1 1132 1.0130000300705433e-03 + + -4.3138799071311951e-01 2.6438099145889282e-01 + <_> + + 0 -1 1133 1.5603000298142433e-02 + + -3.1624200940132141e-01 5.5715900659561157e-01 + <_> + + 0 -1 1134 -2.6685999706387520e-02 + + 1.0553920269012451e+00 2.9074000194668770e-02 + <_> + + 0 -1 1135 1.3940000208094716e-03 + + -7.1873801946640015e-01 6.5390996634960175e-02 + <_> + + 0 -1 1136 -6.4799998654052615e-04 + + 2.4884399771690369e-01 -2.0978200435638428e-01 + <_> + + 0 -1 1137 -3.1888000667095184e-02 + + -6.8844497203826904e-01 6.3589997589588165e-02 + <_> + + 0 -1 1138 -4.9290000461041927e-03 + + -5.9152501821517944e-01 2.7943599224090576e-01 + <_> + + 0 -1 1139 3.1168000772595406e-02 + + 4.5223999768495560e-02 -8.8639199733734131e-01 + <_> + + 0 -1 1140 -3.3663000911474228e-02 + + -6.1590200662612915e-01 1.5749299526214600e-01 + <_> + + 0 -1 1141 1.1966999620199203e-02 + + -3.0606698989868164e-01 4.2293301224708557e-01 + <_> + + 0 -1 1142 -3.4680001437664032e-02 + + -1.3734940290451050e+00 1.5908700227737427e-01 + <_> + + 0 -1 1143 9.9290004000067711e-03 + + -5.5860197544097900e-01 1.2119200080633163e-01 + <_> + + 0 -1 1144 5.9574998915195465e-02 + + 4.9720001406967640e-03 8.2055401802062988e-01 + <_> + + 0 -1 1145 -6.5428003668785095e-02 + + 1.5651429891586304e+00 -1.6817499697208405e-01 + <_> + + 0 -1 1146 -9.2895999550819397e-02 + + -1.5794529914855957e+00 1.4661799371242523e-01 + <_> + + 0 -1 1147 -4.1184000670909882e-02 + + -1.5518720149993896e+00 -2.9969999566674232e-02 + <_> + + 0 -1 1148 2.1447999402880669e-02 + + 1.7196300625801086e-01 -6.9343197345733643e-01 + <_> + + 0 -1 1149 -2.5569999590516090e-02 + + -1.3061310052871704e+00 -2.4336999282240868e-02 + <_> + + 0 -1 1150 -4.1200999170541763e-02 + + -1.3821059465408325e+00 1.4801800251007080e-01 + <_> + + 0 -1 1151 -1.7668999731540680e-02 + + -7.0889997482299805e-01 3.6524001508951187e-02 + <_> + + 0 -1 1152 9.0060001239180565e-03 + + -4.0913999080657959e-02 8.0373102426528931e-01 + <_> + + 0 -1 1153 -1.1652999557554722e-02 + + 5.7546800374984741e-01 -2.4991700053215027e-01 + <_> + + 0 -1 1154 -7.4780001305043697e-03 + + -4.9280899763107300e-01 1.9810900092124939e-01 + <_> + + 0 -1 1155 8.5499999113380909e-04 + + -4.8858100175857544e-01 1.3563099503517151e-01 + <_> + + 0 -1 1156 -3.0538000166416168e-02 + + -6.0278397798538208e-01 1.8522000312805176e-01 + <_> + + 0 -1 1157 -1.8846999853849411e-02 + + 2.3565599322319031e-01 -3.5136300325393677e-01 + <_> + + 0 -1 1158 -8.1129996106028557e-03 + + -8.1304997205734253e-02 2.1069599688053131e-01 + <_> + + 0 -1 1159 -3.4830000251531601e-02 + + -1.2065670490264893e+00 -1.4251999557018280e-02 + <_> + + 0 -1 1160 1.9021000713109970e-02 + + 2.3349900543689728e-01 -4.5664900541305542e-01 + <_> + + 0 -1 1161 -1.9004000350832939e-02 + + -8.1075799465179443e-01 1.3140000402927399e-02 + <_> + + 0 -1 1162 -8.9057996869087219e-02 + + 6.1542397737503052e-01 3.2983001321554184e-02 + <_> + + 0 -1 1163 6.8620000965893269e-03 + + -2.9583099484443665e-01 2.7003699541091919e-01 + <_> + + 0 -1 1164 -2.8240999206900597e-02 + + -6.1102700233459473e-01 1.7357499897480011e-01 + <_> + + 0 -1 1165 -3.2099999953061342e-04 + + -5.3322899341583252e-01 6.8539001047611237e-02 + <_> + + 0 -1 1166 -1.0829100012779236e-01 + + -1.2879559993743896e+00 1.1801700294017792e-01 + <_> + + 0 -1 1167 1.5878999605774879e-02 + + -1.7072600126266479e-01 1.1103910207748413e+00 + <_> + + 0 -1 1168 8.6859995499253273e-03 + + -1.0995099693536758e-01 4.6010500192642212e-01 + <_> + + 0 -1 1169 -2.5234999135136604e-02 + + 1.0220669507980347e+00 -1.8694299459457397e-01 + <_> + + 0 -1 1170 -1.3508999720215797e-02 + + -7.8316599130630493e-01 1.4202600717544556e-01 + <_> + + 0 -1 1171 -7.7149998396635056e-03 + + -8.8060700893402100e-01 1.1060000397264957e-02 + <_> + + 0 -1 1172 7.1580000221729279e-02 + + 1.1369399726390839e-01 -1.1032789945602417e+00 + <_> + + 0 -1 1173 -1.3554000295698643e-02 + + -8.1096500158309937e-01 3.4080001059919596e-03 + <_> + + 0 -1 1174 2.9450000729411840e-03 + + -7.2879999876022339e-02 3.4998100996017456e-01 + <_> + + 0 -1 1175 -5.0833001732826233e-02 + + -1.2868590354919434e+00 -2.8842000290751457e-02 + <_> + + 0 -1 1176 -8.7989997118711472e-03 + + 4.7613599896430969e-01 -1.4690400660037994e-01 + <_> + + 0 -1 1177 2.1424399316310883e-01 + + -5.9702001512050629e-02 -2.4802260398864746e+00 + <_> + + 0 -1 1178 1.3962999917566776e-02 + + 1.7420299351215363e-01 -4.3911001086235046e-01 + <_> + + 0 -1 1179 4.2502000927925110e-02 + + -1.9965299963951111e-01 7.0654797554016113e-01 + <_> + + 0 -1 1180 1.9827999174594879e-02 + + -6.9136001169681549e-02 6.1643397808074951e-01 + <_> + + 0 -1 1181 -3.3560000360012054e-02 + + -1.2740780115127563e+00 -2.5673000141978264e-02 + <_> + + 0 -1 1182 6.3542999327182770e-02 + + 1.2403500080108643e-01 -1.0776289701461792e+00 + <_> + + 0 -1 1183 2.1933000534772873e-02 + + 1.4952000230550766e-02 -7.1023499965667725e-01 + <_> + + 0 -1 1184 -7.8424997627735138e-02 + + 6.2033998966217041e-01 3.3610999584197998e-02 + <_> + + 0 -1 1185 1.4390000142157078e-02 + + -3.6324599385261536e-01 1.7308300733566284e-01 + <_> + + 0 -1 1186 -6.7309997975826263e-02 + + 5.2374100685119629e-01 1.2799999676644802e-02 + <_> + + 0 -1 1187 1.3047499954700470e-01 + + -1.7122499644756317e-01 1.1235200166702271e+00 + <_> + + 0 -1 1188 -4.6245999634265900e-02 + + -1.1908329725265503e+00 1.7425599694252014e-01 + <_> + + 0 -1 1189 -2.9842000454664230e-02 + + 8.3930599689483643e-01 -1.8064199388027191e-01 + <_> + + 0 -1 1190 -3.8099999073892832e-04 + + 3.5532799363136292e-01 -2.3842300474643707e-01 + <_> + + 0 -1 1191 -2.2378999739885330e-02 + + -8.7943899631500244e-01 -7.8399997437372804e-04 + <_> + + 0 -1 1192 -1.5569999814033508e-03 + + -1.4253300428390503e-01 2.5876200199127197e-01 + <_> + + 0 -1 1193 1.2013000436127186e-02 + + -2.9015499353408813e-01 2.6051101088523865e-01 + <_> + + 0 -1 1194 2.4384999647736549e-02 + + -3.1438998878002167e-02 5.8695900440216064e-01 + <_> + + 0 -1 1195 -4.7180999070405960e-02 + + 6.9430100917816162e-01 -2.1816100180149078e-01 + <_> + + 0 -1 1196 -2.4893999099731445e-02 + + -6.4599299430847168e-01 1.5611599385738373e-01 + <_> + + 0 -1 1197 2.1944999694824219e-02 + + -2.7742000296711922e-02 -1.1346880197525024e+00 + <_> + + 0 -1 1198 1.8809899687767029e-01 + + -1.0076000355184078e-02 1.2429029941558838e+00 + <_> + + 0 -1 1199 -7.7872000634670258e-02 + + 8.5008001327514648e-01 -1.9015499949455261e-01 + <_> + + 0 -1 1200 -4.8769000917673111e-02 + + -2.0763080120086670e+00 1.2179400026798248e-01 + <_> + + 0 -1 1201 -1.7115000635385513e-02 + + -8.5687297582626343e-01 7.8760003671050072e-03 + <_> + + 0 -1 1202 -2.7499999850988388e-03 + + 3.8645499944686890e-01 -1.1391499638557434e-01 + <_> + + 0 -1 1203 -9.8793998360633850e-02 + + -1.7233899831771851e+00 -5.6063000112771988e-02 + <_> + + 0 -1 1204 -2.1936999633908272e-02 + + 5.4749399423599243e-01 -4.2481999844312668e-02 + <_> + + 0 -1 1205 6.1096999794244766e-02 + + -3.8945000618696213e-02 -1.0807880163192749e+00 + <_> + + 0 -1 1206 -2.4563999846577644e-02 + + 5.8311098814010620e-01 -9.7599998116493225e-04 + <_> + + 0 -1 1207 3.3752001821994781e-02 + + -1.3795999810099602e-02 -8.4730297327041626e-01 + <_> + + 0 -1 1208 3.8199000060558319e-02 + + 1.5114299952983856e-01 -7.9473400115966797e-01 + <_> + + 0 -1 1209 -2.0117999985814095e-02 + + 5.1579099893569946e-01 -2.1445399522781372e-01 + <_> + + 0 -1 1210 2.4734999984502792e-02 + + -2.2105000913143158e-02 4.2917698621749878e-01 + <_> + + 0 -1 1211 -2.4357000365853310e-02 + + -8.6201298236846924e-01 -3.6760000512003899e-03 + <_> + + 0 -1 1212 -2.6442000642418861e-02 + + -4.5397499203681946e-01 2.2462800145149231e-01 + <_> + + 0 -1 1213 -3.4429999068379402e-03 + + 1.3073000311851501e-01 -3.8622701168060303e-01 + <_> + + 0 -1 1214 1.0701700299978256e-01 + + 1.3158600032329559e-01 -7.9306900501251221e-01 + <_> + + 0 -1 1215 4.5152999460697174e-02 + + -2.5296801328659058e-01 4.0672400593757629e-01 + <_> + + 0 -1 1216 4.4349998235702515e-02 + + 2.2613000124692917e-02 7.9618102312088013e-01 + <_> + + 0 -1 1217 1.0839999886229634e-03 + + -3.9158400893211365e-01 1.1639100313186646e-01 + <_> + + 0 -1 1218 7.1433000266551971e-02 + + 8.2466997206211090e-02 1.2530590295791626e+00 + <_> + + 0 -1 1219 3.5838000476360321e-02 + + -1.8203300237655640e-01 7.7078700065612793e-01 + <_> + + 0 -1 1220 -2.0839000120759010e-02 + + -6.1744397878646851e-01 1.5891399979591370e-01 + <_> + + 0 -1 1221 4.2525801062583923e-01 + + -4.8978000879287720e-02 -1.8422030210494995e+00 + <_> + + 0 -1 1222 1.1408000253140926e-02 + + 1.7918199300765991e-01 -1.5383499860763550e-01 + <_> + + 0 -1 1223 -1.5364999882876873e-02 + + -8.4016501903533936e-01 -1.0280000278726220e-03 + <_> + + 0 -1 1224 -1.5212000347673893e-02 + + -1.8995699286460876e-01 1.7130999267101288e-01 + <_> + + 0 -1 1225 -1.8972000107169151e-02 + + -7.9541999101638794e-01 6.6800001077353954e-03 + <_> + + 0 -1 1226 -3.3330000005662441e-03 + + -2.3530800640583038e-01 2.4730099737644196e-01 + <_> + + 0 -1 1227 9.3248002231121063e-02 + + -5.4758001118898392e-02 -1.8324300050735474e+00 + <_> + + 0 -1 1228 -1.2555000372231007e-02 + + 2.6385200023651123e-01 -3.8526400923728943e-01 + <_> + + 0 -1 1229 -2.7070000767707825e-02 + + -6.6929799318313599e-01 2.0340999588370323e-02 + <_> + + 0 -1 1230 -2.3677000775933266e-02 + + 6.7265301942825317e-01 -1.4344000257551670e-02 + <_> + + 0 -1 1231 -1.4275000430643559e-02 + + 3.0186399817466736e-01 -2.8514400124549866e-01 + <_> + + 0 -1 1232 2.8096999973058701e-02 + + 1.4766000211238861e-01 -1.4078520536422729e+00 + <_> + + 0 -1 1233 5.0840001553297043e-02 + + -1.8613600730895996e-01 7.9953002929687500e-01 + <_> + + 0 -1 1234 1.1505999602377415e-02 + + 1.9118399918079376e-01 -8.5035003721714020e-02 + <_> + + 0 -1 1235 -1.4661000110208988e-02 + + 4.5239299535751343e-01 -2.2205199301242828e-01 + <_> + + 0 -1 1236 2.2842499613761902e-01 + + 1.3488399982452393e-01 -1.2894610166549683e+00 + <_> + + 0 -1 1237 1.1106900125741959e-01 + + -2.0753799378871918e-01 5.4561597108840942e-01 + <_> + + 0 -1 1238 3.2450000289827585e-03 + + 3.2053700089454651e-01 -1.6403500735759735e-01 + <_> + + 0 -1 1239 8.5309997200965881e-02 + + -2.0210500061511993e-01 5.3296798467636108e-01 + <_> + + 0 -1 1240 2.2048000246286392e-02 + + 1.5698599815368652e-01 -1.7014099657535553e-01 + <_> + + 0 -1 1241 -1.5676999464631081e-02 + + -6.2863498926162720e-01 4.0761999785900116e-02 + <_> + + 0 -1 1242 3.3112901449203491e-01 + + 1.6609300673007965e-01 -1.0326379537582397e+00 + <_> + + 0 -1 1243 8.8470000773668289e-03 + + -2.5076198577880859e-01 3.1660598516464233e-01 + <_> + + 0 -1 1244 4.6080000698566437e-02 + + 1.5352100133895874e-01 -1.6333500146865845e+00 + <_> + + 0 -1 1245 -3.7703000009059906e-02 + + 5.6873798370361328e-01 -2.0102599263191223e-01 + <_> + 159 + -3.5939640998840332e+00 + + <_> + + 0 -1 1246 -8.1808999180793762e-02 + + 5.7124799489974976e-01 -6.7438799142837524e-01 + <_> + + 0 -1 1247 2.1761199831962585e-01 + + -3.8610199093818665e-01 9.0343999862670898e-01 + <_> + + 0 -1 1248 1.4878000132739544e-02 + + 2.2241599857807159e-01 -1.2779350280761719e+00 + <_> + + 0 -1 1249 5.2434999495744705e-02 + + -2.8690400719642639e-01 7.5742298364639282e-01 + <_> + + 0 -1 1250 9.1429995372891426e-03 + + -6.4880400896072388e-01 2.2268800437450409e-01 + <_> + + 0 -1 1251 7.9169999808073044e-03 + + -2.9253599047660828e-01 3.1030198931694031e-01 + <_> + + 0 -1 1252 -2.6084000244736671e-02 + + 4.5532700419425964e-01 -3.8500601053237915e-01 + <_> + + 0 -1 1253 -2.9400000348687172e-03 + + -5.1264399290084839e-01 2.7432298660278320e-01 + <_> + + 0 -1 1254 5.7130001485347748e-02 + + 1.5788000077009201e-02 -1.2133100032806396e+00 + <_> + + 0 -1 1255 -6.1309998854994774e-03 + + 3.9174601435661316e-01 -3.0866798758506775e-01 + <_> + + 0 -1 1256 -4.0405001491308212e-02 + + 1.1901949644088745e+00 -2.0347100496292114e-01 + <_> + + 0 -1 1257 -2.0297000184655190e-02 + + -6.8239498138427734e-01 2.0458699762821198e-01 + <_> + + 0 -1 1258 -1.7188999801874161e-02 + + -8.4939897060394287e-01 3.8433000445365906e-02 + <_> + + 0 -1 1259 -2.4215999990701675e-02 + + -1.1039420366287231e+00 1.5975099802017212e-01 + <_> + + 0 -1 1260 5.6869000196456909e-02 + + -1.9595299661159515e-01 1.1806850433349609e+00 + <_> + + 0 -1 1261 3.6199999158270657e-04 + + -4.0847799181938171e-01 3.2938599586486816e-01 + <_> + + 0 -1 1262 9.9790003150701523e-03 + + -2.9673001170158386e-01 4.1547900438308716e-01 + <_> + + 0 -1 1263 -5.2625000476837158e-02 + + -1.3069299459457397e+00 1.7862600088119507e-01 + <_> + + 0 -1 1264 -1.3748999685049057e-02 + + 2.3665800690650940e-01 -4.4536599516868591e-01 + <_> + + 0 -1 1265 -3.0517000705003738e-02 + + 2.9018300771713257e-01 -1.1210100352764130e-01 + <_> + + 0 -1 1266 -3.0037501454353333e-01 + + -2.4237680435180664e+00 -4.2830999940633774e-02 + <_> + + 0 -1 1267 -3.5990998148918152e-02 + + 8.8206499814987183e-01 -4.7012999653816223e-02 + <_> + + 0 -1 1268 -5.5112000554800034e-02 + + 8.0119001865386963e-01 -2.0490999519824982e-01 + <_> + + 0 -1 1269 3.3762000501155853e-02 + + 1.4617599546909332e-01 -1.1349489688873291e+00 + <_> + + 0 -1 1270 -8.2710003480315208e-03 + + -8.1604897975921631e-01 1.8988000229001045e-02 + <_> + + 0 -1 1271 -5.4399999789893627e-03 + + -7.0980900526046753e-01 2.2343699634075165e-01 + <_> + + 0 -1 1272 3.1059999018907547e-03 + + -7.2808599472045898e-01 4.0224999189376831e-02 + <_> + + 0 -1 1273 5.3651999682188034e-02 + + 1.7170900106430054e-01 -1.1163710355758667e+00 + <_> + + 0 -1 1274 -1.2541399896144867e-01 + + 2.7680370807647705e+00 -1.4611500501632690e-01 + <_> + + 0 -1 1275 9.2542000114917755e-02 + + 1.1609800159931183e-01 -3.9635529518127441e+00 + <_> + + 0 -1 1276 3.8513999432325363e-02 + + -7.6399999670684338e-03 -9.8780900239944458e-01 + <_> + + 0 -1 1277 -2.0200000144541264e-03 + + 2.3059999942779541e-01 -7.4970299005508423e-01 + <_> + + 0 -1 1278 9.7599998116493225e-03 + + -3.1137999892234802e-01 3.0287799239158630e-01 + <_> + + 0 -1 1279 2.4095000699162483e-02 + + -4.9529999494552612e-02 5.2690100669860840e-01 + <_> + + 0 -1 1280 -1.7982000485062599e-02 + + -1.1610640287399292e+00 -5.7000000961124897e-03 + <_> + + 0 -1 1281 -1.0555000044405460e-02 + + -2.7189099788665771e-01 2.3597699403762817e-01 + <_> + + 0 -1 1282 -7.2889998555183411e-03 + + -5.4219102859497070e-01 8.1914000213146210e-02 + <_> + + 0 -1 1283 2.3939000442624092e-02 + + 1.7975799739360809e-01 -6.7049497365951538e-01 + <_> + + 0 -1 1284 -1.8365999683737755e-02 + + 6.2664300203323364e-01 -2.0970100164413452e-01 + <_> + + 0 -1 1285 1.5715999528765678e-02 + + 2.4193699657917023e-01 -1.0444309711456299e+00 + <_> + + 0 -1 1286 -4.8804000020027161e-02 + + -9.4060599803924561e-01 -3.7519999314099550e-03 + <_> + + 0 -1 1287 6.7130001261830330e-03 + + -7.5432002544403076e-02 6.1575299501419067e-01 + <_> + + 0 -1 1288 9.7770001739263535e-03 + + 3.9285000413656235e-02 -8.4810298681259155e-01 + <_> + + 0 -1 1289 1.4744999818503857e-02 + + 1.6968999803066254e-01 -5.0906401872634888e-01 + <_> + + 0 -1 1290 9.7079001367092133e-02 + + -3.3103000372648239e-02 -1.2706379890441895e+00 + <_> + + 0 -1 1291 4.8285998404026031e-02 + + 9.4329997897148132e-02 2.7203190326690674e+00 + <_> + + 0 -1 1292 9.7810002043843269e-03 + + -3.9533400535583496e-01 1.5363800525665283e-01 + <_> + + 0 -1 1293 -3.9893999695777893e-02 + + -2.2767400741577148e-01 1.3913999497890472e-01 + <_> + + 0 -1 1294 2.2848000749945641e-02 + + -2.7391999959945679e-01 3.4199500083923340e-01 + <_> + + 0 -1 1295 6.7179999314248562e-03 + + -1.0874299705028534e-01 4.8125401139259338e-01 + <_> + + 0 -1 1296 5.9599999338388443e-02 + + -4.9522001296281815e-02 -2.0117089748382568e+00 + <_> + + 0 -1 1297 6.9340001791715622e-03 + + 1.5037499368190765e-01 -1.1271899938583374e-01 + <_> + + 0 -1 1298 1.5757000073790550e-02 + + -2.0885000005364418e-02 -1.1651979684829712e+00 + <_> + + 0 -1 1299 -4.9690000712871552e-02 + + -8.0213499069213867e-01 1.4372299611568451e-01 + <_> + + 0 -1 1300 5.2347000688314438e-02 + + -2.0836700499057770e-01 6.1677598953247070e-01 + <_> + + 0 -1 1301 2.2430999204516411e-02 + + 2.0305900275707245e-01 -7.5326198339462280e-01 + <_> + + 0 -1 1302 4.1142001748085022e-02 + + -1.8118199706077576e-01 1.0033359527587891e+00 + <_> + + 0 -1 1303 -2.1632000803947449e-02 + + 4.9998998641967773e-01 -3.4662999212741852e-02 + <_> + + 0 -1 1304 -8.2808002829551697e-02 + + 1.1711900234222412e+00 -1.8433600664138794e-01 + <_> + + 0 -1 1305 8.5060000419616699e-03 + + -6.3225001096725464e-02 2.9024899005889893e-01 + <_> + + 0 -1 1306 7.8905001282691956e-02 + + -2.3274500668048859e-01 5.9695798158645630e-01 + <_> + + 0 -1 1307 -9.0207003057003021e-02 + + -8.2211899757385254e-01 1.7772200703620911e-01 + <_> + + 0 -1 1308 -2.9269000515341759e-02 + + 6.0860699415206909e-01 -2.1468900144100189e-01 + <_> + + 0 -1 1309 6.9499998353421688e-03 + + -4.2665999382734299e-02 6.0512101650238037e-01 + <_> + + 0 -1 1310 -8.0629996955394745e-03 + + -1.1508270502090454e+00 -2.7286000549793243e-02 + <_> + + 0 -1 1311 1.9595999270677567e-02 + + -9.1880001127719879e-03 5.6857800483703613e-01 + <_> + + 0 -1 1312 -1.4884999953210354e-02 + + 3.7658798694610596e-01 -2.7149501442909241e-01 + <_> + + 0 -1 1313 2.5217000395059586e-02 + + -9.9991001188755035e-02 2.4664700031280518e-01 + <_> + + 0 -1 1314 -1.5855999663472176e-02 + + 6.6826701164245605e-01 -2.0614700019359589e-01 + <_> + + 0 -1 1315 2.9441000893712044e-02 + + 1.5832200646400452e-01 -7.6060897111892700e-01 + <_> + + 0 -1 1316 -8.5279997438192368e-03 + + 3.8212299346923828e-01 -2.5407800078392029e-01 + <_> + + 0 -1 1317 2.4421999230980873e-02 + + 1.5105099976062775e-01 -2.8752899169921875e-01 + <_> + + 0 -1 1318 -3.3886998891830444e-02 + + -6.8002802133560181e-01 3.4327000379562378e-02 + <_> + + 0 -1 1319 -2.0810000132769346e-03 + + 2.5413900613784790e-01 -2.6859098672866821e-01 + <_> + + 0 -1 1320 3.0358999967575073e-02 + + -3.0842000618577003e-02 -1.1476809978485107e+00 + <_> + + 0 -1 1321 4.0210001170635223e-03 + + -3.5253798961639404e-01 2.9868099093437195e-01 + <_> + + 0 -1 1322 2.7681000530719757e-02 + + -3.8148999214172363e-02 -1.3262039422988892e+00 + <_> + + 0 -1 1323 7.9039996489882469e-03 + + -2.3737000301480293e-02 7.0503002405166626e-01 + <_> + + 0 -1 1324 4.4031001627445221e-02 + + 1.0674899816513062e-01 -4.5261201262474060e-01 + <_> + + 0 -1 1325 -3.2370999455451965e-02 + + 4.6674901247024536e-01 -6.1546999961137772e-02 + <_> + + 0 -1 1326 2.0933000370860100e-02 + + -2.8447899222373962e-01 4.3845599889755249e-01 + <_> + + 0 -1 1327 2.5227999314665794e-02 + + -2.2537000477313995e-02 7.0389097929000854e-01 + <_> + + 0 -1 1328 6.5520000644028187e-03 + + -3.2554900646209717e-01 2.4023699760437012e-01 + <_> + + 0 -1 1329 -5.8557998389005661e-02 + + -1.2227720022201538e+00 1.1668799817562103e-01 + <_> + + 0 -1 1330 3.1899999827146530e-02 + + -1.9305000081658363e-02 -1.0973169803619385e+00 + <_> + + 0 -1 1331 -3.0445000156760216e-02 + + 6.5582501888275146e-01 7.5090996921062469e-02 + <_> + + 0 -1 1332 1.4933000318706036e-02 + + -5.2155798673629761e-01 1.1523099988698959e-01 + <_> + + 0 -1 1333 -4.9008000642061234e-02 + + -7.8303998708724976e-01 1.6657200455665588e-01 + <_> + + 0 -1 1334 8.3158999681472778e-02 + + -2.6879999786615372e-03 -8.5282301902770996e-01 + <_> + + 0 -1 1335 2.3902999237179756e-02 + + -5.1010999828577042e-02 4.1999098658561707e-01 + <_> + + 0 -1 1336 1.6428999602794647e-02 + + 1.9232999533414841e-02 -6.5049099922180176e-01 + <_> + + 0 -1 1337 -1.1838000267744064e-02 + + -6.2409800291061401e-01 1.5411199629306793e-01 + <_> + + 0 -1 1338 -1.6799999866634607e-04 + + 1.7589199542999268e-01 -3.4338700771331787e-01 + <_> + + 0 -1 1339 1.9193999469280243e-02 + + 4.3418999761343002e-02 7.9069197177886963e-01 + <_> + + 0 -1 1340 -1.0032000020146370e-02 + + 4.5648899674415588e-01 -2.2494800388813019e-01 + <_> + + 0 -1 1341 -1.4004000462591648e-02 + + 3.3570998907089233e-01 -4.8799999058246613e-03 + <_> + + 0 -1 1342 -1.0319899767637253e-01 + + -2.3378000259399414e+00 -5.8933001011610031e-02 + <_> + + 0 -1 1343 -9.5697000622749329e-02 + + -6.6153901815414429e-01 2.0098599791526794e-01 + <_> + + 0 -1 1344 -4.1480999439954758e-02 + + 4.5939201116561890e-01 -2.2314099967479706e-01 + <_> + + 0 -1 1345 2.4099999573081732e-03 + + -2.6898598670959473e-01 2.4922999739646912e-01 + <_> + + 0 -1 1346 1.0724999755620956e-01 + + -1.8640199303627014e-01 7.2769802808761597e-01 + <_> + + 0 -1 1347 3.1870000530034304e-03 + + -2.4608999490737915e-02 2.8643900156021118e-01 + <_> + + 0 -1 1348 2.9167000204324722e-02 + + -3.4683000296354294e-02 -1.1162580251693726e+00 + <_> + + 0 -1 1349 1.1287000030279160e-02 + + 6.3760001212358475e-03 6.6632097959518433e-01 + <_> + + 0 -1 1350 -1.2001000344753265e-02 + + 4.2420101165771484e-01 -2.6279801130294800e-01 + <_> + + 0 -1 1351 -1.2695999816060066e-02 + + -2.1957000717520714e-02 1.8936799466609955e-01 + <_> + + 0 -1 1352 2.4597000330686569e-02 + + -3.4963998943567276e-02 -1.0989320278167725e+00 + <_> + + 0 -1 1353 4.5953001827001572e-02 + + 1.1109799891710281e-01 -2.9306049346923828e+00 + <_> + + 0 -1 1354 -2.7241000905632973e-02 + + 2.9101699590682983e-01 -2.7407899498939514e-01 + <_> + + 0 -1 1355 4.0063999593257904e-02 + + 1.1877900362014771e-01 -6.2801802158355713e-01 + <_> + + 0 -1 1356 2.3055000230669975e-02 + + 1.4813800156116486e-01 -3.7007498741149902e-01 + <_> + + 0 -1 1357 -2.3737000301480293e-02 + + -5.3724801540374756e-01 1.9358199834823608e-01 + <_> + + 0 -1 1358 7.7522002160549164e-02 + + -6.0194000601768494e-02 -1.9489669799804688e+00 + <_> + + 0 -1 1359 -1.3345000334084034e-02 + + -4.5229598879814148e-01 1.8741500377655029e-01 + <_> + + 0 -1 1360 -2.1719999611377716e-02 + + 1.2144249677658081e+00 -1.5365800261497498e-01 + <_> + + 0 -1 1361 -7.1474999189376831e-02 + + -2.3047130107879639e+00 1.0999900102615356e-01 + <_> + + 0 -1 1362 -5.4999999701976776e-03 + + -7.1855199337005615e-01 2.0100999623537064e-02 + <_> + + 0 -1 1363 2.6740999892354012e-02 + + 7.3545001447200775e-02 9.8786002397537231e-01 + <_> + + 0 -1 1364 -3.9407998323440552e-02 + + -1.2227380275726318e+00 -4.3506998568773270e-02 + <_> + + 0 -1 1365 2.5888999924063683e-02 + + 1.3409300148487091e-01 -1.1770780086517334e+00 + <_> + + 0 -1 1366 4.8925001174211502e-02 + + -3.0810000374913216e-02 -9.3479502201080322e-01 + <_> + + 0 -1 1367 3.6892998963594437e-02 + + 1.3333700597286224e-01 -1.4998290538787842e+00 + <_> + + 0 -1 1368 7.8929997980594635e-02 + + -1.4538800716400146e-01 1.5631790161132812e+00 + <_> + + 0 -1 1369 2.9006000608205795e-02 + + 1.9383700191974640e-01 -6.7642802000045776e-01 + <_> + + 0 -1 1370 6.3089998438954353e-03 + + -3.7465399503707886e-01 1.0857500135898590e-01 + <_> + + 0 -1 1371 -6.5830998122692108e-02 + + 8.1059402227401733e-01 3.0201999470591545e-02 + <_> + + 0 -1 1372 -6.8965002894401550e-02 + + 8.3772599697113037e-01 -1.7140999436378479e-01 + <_> + + 0 -1 1373 -1.1669100075960159e-01 + + -9.4647198915481567e-01 1.3123199343681335e-01 + <_> + + 0 -1 1374 -1.3060000492259860e-03 + + 4.6007998287677765e-02 -5.2011597156524658e-01 + <_> + + 0 -1 1375 -4.4558998197317123e-02 + + -1.9423669576644897e+00 1.3200700283050537e-01 + <_> + + 0 -1 1376 5.1033001393079758e-02 + + -2.1480999886989594e-01 4.8673900961875916e-01 + <_> + + 0 -1 1377 -3.1578000634908676e-02 + + 5.9989798069000244e-01 7.9159997403621674e-03 + <_> + + 0 -1 1378 2.1020000800490379e-02 + + -2.2069500386714935e-01 5.4046201705932617e-01 + <_> + + 0 -1 1379 -1.3824200630187988e-01 + + 6.2957501411437988e-01 -2.1712999790906906e-02 + <_> + + 0 -1 1380 5.2228998392820358e-02 + + -2.3360900580883026e-01 4.9760800600051880e-01 + <_> + + 0 -1 1381 2.5884000584483147e-02 + + 1.8041999638080597e-01 -2.2039200365543365e-01 + <_> + + 0 -1 1382 -1.2138999998569489e-02 + + -6.9731897115707397e-01 1.5712000429630280e-02 + <_> + + 0 -1 1383 -2.4237999692559242e-02 + + 3.4593299031257629e-01 7.1469999849796295e-02 + <_> + + 0 -1 1384 -2.5272000581026077e-02 + + -8.7583297491073608e-01 -9.8240002989768982e-03 + <_> + + 0 -1 1385 1.2597000226378441e-02 + + 2.3649999499320984e-01 -2.8731200098991394e-01 + <_> + + 0 -1 1386 5.7330999523401260e-02 + + -6.1530999839305878e-02 -2.2326040267944336e+00 + <_> + + 0 -1 1387 1.6671000048518181e-02 + + -1.9850100576877594e-01 4.0810701251029968e-01 + <_> + + 0 -1 1388 -2.2818999364972115e-02 + + 9.6487599611282349e-01 -2.0245699584484100e-01 + <_> + + 0 -1 1389 3.7000001611886546e-05 + + -5.8908998966217041e-02 2.7055400609970093e-01 + <_> + + 0 -1 1390 -7.6700001955032349e-03 + + -4.5317101478576660e-01 8.9628003537654877e-02 + <_> + + 0 -1 1391 9.4085998833179474e-02 + + 1.1604599654674530e-01 -1.0951169729232788e+00 + <_> + + 0 -1 1392 -6.2267001718282700e-02 + + 1.8096530437469482e+00 -1.4773200452327728e-01 + <_> + + 0 -1 1393 1.7416000366210938e-02 + + 2.3068200051784515e-01 -4.2417600750923157e-01 + <_> + + 0 -1 1394 -2.2066000849008560e-02 + + 4.9270299077033997e-01 -2.0630900561809540e-01 + <_> + + 0 -1 1395 -1.0404000058770180e-02 + + 6.0924297571182251e-01 2.8130000457167625e-02 + <_> + + 0 -1 1396 -9.3670003116130829e-03 + + 4.0171200037002563e-01 -2.1681700646877289e-01 + <_> + + 0 -1 1397 -2.9039999470114708e-02 + + -8.4876501560211182e-01 1.4246800541877747e-01 + <_> + + 0 -1 1398 -2.1061999723315239e-02 + + -7.9198300838470459e-01 -1.2595999985933304e-02 + <_> + + 0 -1 1399 -3.7000998854637146e-02 + + -6.7488902807235718e-01 1.2830400466918945e-01 + <_> + + 0 -1 1400 1.0735999792814255e-02 + + 3.6779999732971191e-02 -6.3393002748489380e-01 + <_> + + 0 -1 1401 1.6367599368095398e-01 + + 1.3803899288177490e-01 -4.7189000248908997e-01 + <_> + + 0 -1 1402 9.4917997717857361e-02 + + -1.3855700194835663e-01 1.9492419958114624e+00 + <_> + + 0 -1 1403 3.5261999815702438e-02 + + 1.3721899688243866e-01 -2.1186530590057373e+00 + <_> + + 0 -1 1404 1.2811000458896160e-02 + + -2.0008100569248199e-01 4.9507799744606018e-01 + <_> + 155 + -3.3933560848236084e+00 + + <_> + + 0 -1 1405 1.3904400169849396e-01 + + -4.6581199765205383e-01 7.6431602239608765e-01 + <_> + + 0 -1 1406 1.1916999705135822e-02 + + -9.4398999214172363e-01 3.9726299047470093e-01 + <_> + + 0 -1 1407 -1.0006999596953392e-02 + + 3.2718798518180847e-01 -6.3367402553558350e-01 + <_> + + 0 -1 1408 -6.0479999519884586e-03 + + 2.7427899837493896e-01 -5.7446998357772827e-01 + <_> + + 0 -1 1409 -1.2489999644458294e-03 + + 2.3629300296306610e-01 -6.8593502044677734e-01 + <_> + + 0 -1 1410 3.2382000237703323e-02 + + -5.7630199193954468e-01 2.7492699027061462e-01 + <_> + + 0 -1 1411 -1.3957999646663666e-02 + + -6.1061501502990723e-01 2.4541600048542023e-01 + <_> + + 0 -1 1412 1.1159999994561076e-03 + + -5.6539100408554077e-01 2.7179300785064697e-01 + <_> + + 0 -1 1413 2.7000000045518391e-05 + + -8.0235999822616577e-01 1.1509100347757339e-01 + <_> + + 0 -1 1414 -2.5700000696815550e-04 + + -8.1205898523330688e-01 2.3844699561595917e-01 + <_> + + 0 -1 1415 4.0460000745952129e-03 + + 1.3909600675106049e-01 -6.6163200139999390e-01 + <_> + + 0 -1 1416 1.4356000348925591e-02 + + -1.6485199332237244e-01 4.1901698708534241e-01 + <_> + + 0 -1 1417 -5.5374998599290848e-02 + + 1.4425870180130005e+00 -1.8820199370384216e-01 + <_> + + 0 -1 1418 9.3594998121261597e-02 + + 1.3548299670219421e-01 -9.1636097431182861e-01 + <_> + + 0 -1 1419 2.6624999940395355e-02 + + -3.3748298883438110e-01 3.9233601093292236e-01 + <_> + + 0 -1 1420 3.7469998933374882e-03 + + -1.1615400016307831e-01 4.4399300217628479e-01 + <_> + + 0 -1 1421 -3.1886000186204910e-02 + + -9.9498301744461060e-01 1.6120000509545207e-03 + <_> + + 0 -1 1422 -2.2600000724196434e-02 + + -4.8067399859428406e-01 1.7007300257682800e-01 + <_> + + 0 -1 1423 2.5202000513672829e-02 + + 3.5580001771450043e-02 -8.0215400457382202e-01 + <_> + + 0 -1 1424 -3.1036999076604843e-02 + + -1.0895340442657471e+00 1.8081900477409363e-01 + <_> + + 0 -1 1425 -2.6475999504327774e-02 + + 9.5671200752258301e-01 -2.1049399673938751e-01 + <_> + + 0 -1 1426 -1.3853999786078930e-02 + + -1.0370320081710815e+00 2.2166700661182404e-01 + <_> + + 0 -1 1427 -6.2925003468990326e-02 + + 9.0199398994445801e-01 -1.9085299968719482e-01 + <_> + + 0 -1 1428 -4.4750999659299850e-02 + + -1.0119110345840454e+00 1.4691199362277985e-01 + <_> + + 0 -1 1429 -2.0428000018000603e-02 + + 6.1624497175216675e-01 -2.3552699387073517e-01 + <_> + + 0 -1 1430 -8.0329999327659607e-03 + + -8.3279997110366821e-02 2.1728700399398804e-01 + <_> + + 0 -1 1431 8.7280003353953362e-03 + + 6.5458998084068298e-02 -6.0318702459335327e-01 + <_> + + 0 -1 1432 -2.7202000841498375e-02 + + -9.3447399139404297e-01 1.5270000696182251e-01 + <_> + + 0 -1 1433 -1.6471000388264656e-02 + + -8.4177100658416748e-01 1.3332000002264977e-02 + <_> + + 0 -1 1434 -1.3744000345468521e-02 + + 6.0567200183868408e-01 -9.2021003365516663e-02 + <_> + + 0 -1 1435 2.9164999723434448e-02 + + -2.8114000335335732e-02 -1.4014569520950317e+00 + <_> + + 0 -1 1436 3.7457000464200974e-02 + + 1.3080599904060364e-01 -4.9382498860359192e-01 + <_> + + 0 -1 1437 -2.5070000439882278e-02 + + -1.1289390325546265e+00 -1.4600000344216824e-02 + <_> + + 0 -1 1438 -6.3812002539634705e-02 + + 7.5871598720550537e-01 -1.8200000049546361e-03 + <_> + + 0 -1 1439 -9.3900002539157867e-03 + + 2.9936400055885315e-01 -2.9487800598144531e-01 + <_> + + 0 -1 1440 -7.6000002445653081e-04 + + 1.9725000485777855e-02 1.9993899762630463e-01 + <_> + + 0 -1 1441 -2.1740999072790146e-02 + + -8.5247898101806641e-01 4.9169998615980148e-02 + <_> + + 0 -1 1442 -1.7869999632239342e-02 + + -5.9985999017953873e-02 1.5222500264644623e-01 + <_> + + 0 -1 1443 -2.4831000715494156e-02 + + 3.5603401064872742e-01 -2.6259899139404297e-01 + <_> + + 0 -1 1444 1.5715500712394714e-01 + + 1.5599999460391700e-04 1.0428730249404907e+00 + <_> + + 0 -1 1445 6.9026999175548553e-02 + + -3.3006999641656876e-02 -1.1796669960021973e+00 + <_> + + 0 -1 1446 -1.1021999642252922e-02 + + 5.8987700939178467e-01 -5.7647999376058578e-02 + <_> + + 0 -1 1447 -1.3834999874234200e-02 + + 5.9502798318862915e-01 -2.4418599903583527e-01 + <_> + + 0 -1 1448 -3.0941000208258629e-02 + + -1.1723799705505371e+00 1.6907000541687012e-01 + <_> + + 0 -1 1449 2.1258000284433365e-02 + + -1.8900999799370766e-02 -1.0684759616851807e+00 + <_> + + 0 -1 1450 9.3079999089241028e-02 + + 1.6305600106716156e-01 -1.3375270366668701e+00 + <_> + + 0 -1 1451 2.9635999351739883e-02 + + -2.2524799406528473e-01 4.5400100946426392e-01 + <_> + + 0 -1 1452 -1.2199999764561653e-04 + + 2.7409100532531738e-01 -3.7371399998664856e-01 + <_> + + 0 -1 1453 -4.2098000645637512e-02 + + -7.5828802585601807e-01 1.7137000337243080e-02 + <_> + + 0 -1 1454 -2.2505000233650208e-02 + + -2.2759300470352173e-01 2.3698699474334717e-01 + <_> + + 0 -1 1455 -1.2862999923527241e-02 + + 1.9252400100231171e-01 -3.2127100229263306e-01 + <_> + + 0 -1 1456 2.7860000729560852e-02 + + 1.6723699867725372e-01 -1.0209059715270996e+00 + <_> + + 0 -1 1457 -2.7807999402284622e-02 + + 1.2824759483337402e+00 -1.7225299775600433e-01 + <_> + + 0 -1 1458 -6.1630001291632652e-03 + + -5.4072898626327515e-01 2.3885700106620789e-01 + <_> + + 0 -1 1459 -2.0436000078916550e-02 + + 6.3355398178100586e-01 -2.1090599894523621e-01 + <_> + + 0 -1 1460 -1.2307999655604362e-02 + + -4.9778199195861816e-01 1.7402599751949310e-01 + <_> + + 0 -1 1461 -4.0493998676538467e-02 + + -1.1848740577697754e+00 -3.3890999853610992e-02 + <_> + + 0 -1 1462 2.9657000675797462e-02 + + 2.1740999072790146e-02 1.0069919824600220e+00 + <_> + + 0 -1 1463 6.8379999138414860e-03 + + 2.9217999428510666e-02 -5.9906297922134399e-01 + <_> + + 0 -1 1464 1.6164999455213547e-02 + + -2.1000799536705017e-01 3.7637299299240112e-01 + <_> + + 0 -1 1465 5.0193000584840775e-02 + + 2.5319999549537897e-03 -7.1668201684951782e-01 + <_> + + 0 -1 1466 1.9680000841617584e-03 + + -2.1921400725841522e-01 3.2298699021339417e-01 + <_> + + 0 -1 1467 2.4979999288916588e-02 + + -9.6840001642704010e-03 -7.7572900056838989e-01 + <_> + + 0 -1 1468 -1.5809999778866768e-02 + + 4.4637501239776611e-01 -6.1760000884532928e-02 + <_> + + 0 -1 1469 3.7206999957561493e-02 + + -2.0495399832725525e-01 5.7722198963165283e-01 + <_> + + 0 -1 1470 -7.9264998435974121e-02 + + -7.6745402812957764e-01 1.2550400197505951e-01 + <_> + + 0 -1 1471 -1.7152000218629837e-02 + + -1.4121830463409424e+00 -5.1704000681638718e-02 + <_> + + 0 -1 1472 3.2740000635385513e-02 + + 1.9334000349044800e-01 -6.3633698225021362e-01 + <_> + + 0 -1 1473 -1.1756999790668488e-01 + + 8.4325402975082397e-01 -1.8018600344657898e-01 + <_> + + 0 -1 1474 1.2057200074195862e-01 + + 1.2530000507831573e-01 -2.1213600635528564e+00 + <_> + + 0 -1 1475 4.2779999785125256e-03 + + -4.6604400873184204e-01 8.9643999934196472e-02 + <_> + + 0 -1 1476 -7.2544999420642853e-02 + + 5.1826500892639160e-01 1.6823999583721161e-02 + <_> + + 0 -1 1477 1.7710599303245544e-01 + + -3.0910000205039978e-02 -1.1046639680862427e+00 + <_> + + 0 -1 1478 8.4229996427893639e-03 + + 2.4445800483226776e-01 -3.8613098859786987e-01 + <_> + + 0 -1 1479 -1.3035000301897526e-02 + + 9.8004400730133057e-01 -1.7016500234603882e-01 + <_> + + 0 -1 1480 1.8912000581622124e-02 + + 2.0248499512672424e-01 -3.8545900583267212e-01 + <_> + + 0 -1 1481 2.1447999402880669e-02 + + -2.5717198848724365e-01 3.5181200504302979e-01 + <_> + + 0 -1 1482 6.3357003033161163e-02 + + 1.6994799673557281e-01 -9.1383802890777588e-01 + <_> + + 0 -1 1483 -3.2435998320579529e-02 + + -8.5681599378585815e-01 -2.1680999547243118e-02 + <_> + + 0 -1 1484 -2.3564999923110008e-02 + + 5.6115597486495972e-01 -2.2400000307243317e-04 + <_> + + 0 -1 1485 1.8789000809192657e-02 + + -2.5459799170494080e-01 3.4512901306152344e-01 + <_> + + 0 -1 1486 3.1042000278830528e-02 + + 7.5719999149441719e-03 3.4800198674201965e-01 + <_> + + 0 -1 1487 -1.1226999573409557e-02 + + -6.0219800472259521e-01 4.2814999818801880e-02 + <_> + + 0 -1 1488 -1.2845999561250210e-02 + + 4.2020401358604431e-01 -5.3801000118255615e-02 + <_> + + 0 -1 1489 -1.2791999615728855e-02 + + 2.2724500298500061e-01 -3.2398000359535217e-01 + <_> + + 0 -1 1490 6.8651996552944183e-02 + + 9.3532003462314606e-02 10. + <_> + + 0 -1 1491 5.2789999172091484e-03 + + -2.6926299929618835e-01 3.3303201198577881e-01 + <_> + + 0 -1 1492 -3.8779001682996750e-02 + + -7.2365301847457886e-01 1.7806500196456909e-01 + <_> + + 0 -1 1493 6.1820000410079956e-03 + + -3.5119399428367615e-01 1.6586300730705261e-01 + <_> + + 0 -1 1494 1.7515200376510620e-01 + + 1.1623100191354752e-01 -1.5419290065765381e+00 + <_> + + 0 -1 1495 1.1627999693155289e-01 + + -9.1479998081922531e-03 -9.9842602014541626e-01 + <_> + + 0 -1 1496 -2.2964000701904297e-02 + + 2.0565399527549744e-01 1.5432000160217285e-02 + <_> + + 0 -1 1497 -5.1410000771284103e-02 + + 5.8072400093078613e-01 -2.0118400454521179e-01 + <_> + + 0 -1 1498 2.2474199533462524e-01 + + 1.8728999421000481e-02 1.0829299688339233e+00 + <_> + + 0 -1 1499 9.4860000535845757e-03 + + -3.3171299099922180e-01 1.9902999699115753e-01 + <_> + + 0 -1 1500 -1.1846300214529037e-01 + + 1.3711010217666626e+00 6.8926997482776642e-02 + <_> + + 0 -1 1501 3.7810999900102615e-02 + + -9.3600002583116293e-04 -8.3996999263763428e-01 + <_> + + 0 -1 1502 2.2202000021934509e-02 + + -1.1963999830186367e-02 3.6673998832702637e-01 + <_> + + 0 -1 1503 -3.6366000771522522e-02 + + 3.7866500020027161e-01 -2.7714800834655762e-01 + <_> + + 0 -1 1504 -1.3184699416160583e-01 + + -2.7481179237365723e+00 1.0666900128126144e-01 + <_> + + 0 -1 1505 -4.1655998677015305e-02 + + 4.7524300217628479e-01 -2.3249800503253937e-01 + <_> + + 0 -1 1506 -3.3151999115943909e-02 + + -5.7929402589797974e-01 1.7434400320053101e-01 + <_> + + 0 -1 1507 1.5769999474287033e-02 + + -1.1284000240266323e-02 -8.3701401948928833e-01 + <_> + + 0 -1 1508 -3.9363000541925430e-02 + + 3.4821599721908569e-01 -1.7455400526523590e-01 + <_> + + 0 -1 1509 -6.7849002778530121e-02 + + 1.4225699901580811e+00 -1.4765599370002747e-01 + <_> + + 0 -1 1510 -2.6775000616908073e-02 + + 2.3947000503540039e-01 1.3271999545395374e-02 + <_> + + 0 -1 1511 3.9919000118970871e-02 + + -8.9999996125698090e-03 -7.5938898324966431e-01 + <_> + + 0 -1 1512 1.0065600275993347e-01 + + -1.8685000017285347e-02 7.6245301961898804e-01 + <_> + + 0 -1 1513 -8.1022001802921295e-02 + + -9.0439099073410034e-01 -8.5880002006888390e-03 + <_> + + 0 -1 1514 -2.1258000284433365e-02 + + -2.1319599449634552e-01 2.1919700503349304e-01 + <_> + + 0 -1 1515 -1.0630999691784382e-02 + + 1.9598099589347839e-01 -3.5768100619316101e-01 + <_> + + 0 -1 1516 8.1300002057105303e-04 + + -9.2794999480247498e-02 2.6145899295806885e-01 + <_> + + 0 -1 1517 3.4650000743567944e-03 + + -5.5336099863052368e-01 2.7386000379920006e-02 + <_> + + 0 -1 1518 1.8835999071598053e-02 + + 1.8446099758148193e-01 -6.6934299468994141e-01 + <_> + + 0 -1 1519 -2.5631999596953392e-02 + + 1.9382879734039307e+00 -1.4708900451660156e-01 + <_> + + 0 -1 1520 -4.0939999744296074e-03 + + -2.6451599597930908e-01 2.0733200013637543e-01 + <_> + + 0 -1 1521 -8.9199998183175921e-04 + + -5.5031597614288330e-01 5.0374999642372131e-02 + <_> + + 0 -1 1522 -4.9518000334501266e-02 + + -2.5615389347076416e+00 1.3141700625419617e-01 + <_> + + 0 -1 1523 1.1680999770760536e-02 + + -2.4819800257682800e-01 3.9982700347900391e-01 + <_> + + 0 -1 1524 3.4563999623060226e-02 + + 1.6178800165653229e-01 -7.1418899297714233e-01 + <_> + + 0 -1 1525 -8.2909995689988136e-03 + + 2.2180099785327911e-01 -2.9181700944900513e-01 + <_> + + 0 -1 1526 -2.2358000278472900e-02 + + 3.1044098734855652e-01 -2.7280000504106283e-03 + <_> + + 0 -1 1527 -3.0801000073552132e-02 + + -9.5672702789306641e-01 -8.3400001749396324e-03 + <_> + + 0 -1 1528 4.3779000639915466e-02 + + 1.2556900084018707e-01 -1.1759619712829590e+00 + <_> + + 0 -1 1529 4.3046001344919205e-02 + + -5.8876998722553253e-02 -1.8568470478057861e+00 + <_> + + 0 -1 1530 2.7188999578356743e-02 + + 4.2858000844717026e-02 3.9036700129508972e-01 + <_> + + 0 -1 1531 9.4149997457861900e-03 + + -4.3567001819610596e-02 -1.1094470024108887e+00 + <_> + + 0 -1 1532 9.4311997294425964e-02 + + 4.0256999433040619e-02 9.8442298173904419e-01 + <_> + + 0 -1 1533 1.7025099694728851e-01 + + 2.9510000720620155e-02 -6.9509297609329224e-01 + <_> + + 0 -1 1534 -4.7148000448942184e-02 + + 1.0338569879531860e+00 6.7602001130580902e-02 + <_> + + 0 -1 1535 1.1186300218105316e-01 + + -6.8682998418807983e-02 -2.4985830783843994e+00 + <_> + + 0 -1 1536 -1.4353999868035316e-02 + + -5.9481900930404663e-01 1.5001699328422546e-01 + <_> + + 0 -1 1537 3.4024000167846680e-02 + + -6.4823001623153687e-02 -2.1382639408111572e+00 + <_> + + 0 -1 1538 2.1601999178528786e-02 + + 5.5309999734163284e-02 7.8292900323867798e-01 + <_> + + 0 -1 1539 2.1771999076008797e-02 + + -7.1279997937381268e-03 -7.2148102521896362e-01 + <_> + + 0 -1 1540 8.2416996359825134e-02 + + 1.4609499275684357e-01 -1.3636670112609863e+00 + <_> + + 0 -1 1541 8.4671996533870697e-02 + + -1.7784699797630310e-01 7.2857701778411865e-01 + <_> + + 0 -1 1542 -5.5128000676631927e-02 + + -5.9402400255203247e-01 1.9357800483703613e-01 + <_> + + 0 -1 1543 -6.4823001623153687e-02 + + -1.0783840417861938e+00 -4.0734000504016876e-02 + <_> + + 0 -1 1544 -2.2769000381231308e-02 + + 7.7900201082229614e-01 3.4960000775754452e-03 + <_> + + 0 -1 1545 5.4756000638008118e-02 + + -6.5683998167514801e-02 -1.8188409805297852e+00 + <_> + + 0 -1 1546 -8.9000001025851816e-05 + + -1.7891999334096909e-02 2.0768299698829651e-01 + <_> + + 0 -1 1547 9.8361998796463013e-02 + + -5.5946998298168182e-02 -1.4153920412063599e+00 + <_> + + 0 -1 1548 -7.0930002257227898e-03 + + 3.4135299921035767e-01 -1.2089899927377701e-01 + <_> + + 0 -1 1549 5.0278000533580780e-02 + + -2.6286700367927551e-01 2.5797298550605774e-01 + <_> + + 0 -1 1550 -5.7870000600814819e-03 + + -1.3178600370883942e-01 1.7350199818611145e-01 + <_> + + 0 -1 1551 1.3973999768495560e-02 + + 2.8518000617623329e-02 -6.1152201890945435e-01 + <_> + + 0 -1 1552 2.1449999883770943e-02 + + 2.6181999593973160e-02 3.0306598544120789e-01 + <_> + + 0 -1 1553 -2.9214000329375267e-02 + + 4.4940599799156189e-01 -2.2803099453449249e-01 + <_> + + 0 -1 1554 4.8099999548867345e-04 + + -1.9879999756813049e-01 2.0744499564170837e-01 + <_> + + 0 -1 1555 1.7109999898821115e-03 + + -5.4037201404571533e-01 6.7865997552871704e-02 + <_> + + 0 -1 1556 8.6660003289580345e-03 + + -1.3128000311553478e-02 5.2297902107238770e-01 + <_> + + 0 -1 1557 6.3657999038696289e-02 + + 6.8299002945423126e-02 -4.9235099554061890e-01 + <_> + + 0 -1 1558 -2.7968000620603561e-02 + + 6.8183898925781250e-01 7.8781001269817352e-02 + <_> + + 0 -1 1559 4.8953998833894730e-02 + + -2.0622399449348450e-01 5.0388097763061523e-01 + <_> + 169 + -3.2396929264068604e+00 + + <_> + + 0 -1 1560 -2.9312999919056892e-02 + + 7.1284699440002441e-01 -5.8230698108673096e-01 + <_> + + 0 -1 1561 1.2415099889039993e-01 + + -3.6863499879837036e-01 6.0067200660705566e-01 + <_> + + 0 -1 1562 7.9349996522068977e-03 + + -8.6008298397064209e-01 2.1724699437618256e-01 + <_> + + 0 -1 1563 3.0365999788045883e-02 + + -2.7186998724937439e-01 6.1247897148132324e-01 + <_> + + 0 -1 1564 2.5218000635504723e-02 + + -3.4748300909996033e-01 5.0427699089050293e-01 + <_> + + 0 -1 1565 1.0014000348746777e-02 + + -3.1898999214172363e-01 4.1376799345016479e-01 + <_> + + 0 -1 1566 -1.6775000840425491e-02 + + -6.9048100709915161e-01 9.4830997288227081e-02 + <_> + + 0 -1 1567 -2.6950000319629908e-03 + + -2.0829799771308899e-01 2.3737199604511261e-01 + <_> + + 0 -1 1568 4.2257998138666153e-02 + + -4.9366700649261475e-01 1.8170599639415741e-01 + <_> + + 0 -1 1569 -4.8505000770092010e-02 + + 1.3429640531539917e+00 3.9769001305103302e-02 + <_> + + 0 -1 1570 2.8992999345064163e-02 + + 4.6496000140905380e-02 -8.1643497943878174e-01 + <_> + + 0 -1 1571 -4.0089000016450882e-02 + + -7.1197801828384399e-01 2.2553899884223938e-01 + <_> + + 0 -1 1572 -4.1021998971700668e-02 + + 1.0057929754257202e+00 -1.9690200686454773e-01 + <_> + + 0 -1 1573 1.1838000267744064e-02 + + -1.2600000016391277e-02 8.0767101049423218e-01 + <_> + + 0 -1 1574 -2.1328000351786613e-02 + + -8.2023900747299194e-01 2.0524999126791954e-02 + <_> + + 0 -1 1575 -2.3904999718070030e-02 + + 5.4210501909255981e-01 -7.4767000973224640e-02 + <_> + + 0 -1 1576 1.8008999526500702e-02 + + -3.3827701210975647e-01 4.2358601093292236e-01 + <_> + + 0 -1 1577 -4.3614000082015991e-02 + + -1.1983489990234375e+00 1.5566200017929077e-01 + <_> + + 0 -1 1578 -9.2449998483061790e-03 + + -8.9029997587203979e-01 1.1003999970853329e-02 + <_> + + 0 -1 1579 4.7485001385211945e-02 + + 1.6664099693298340e-01 -9.0764498710632324e-01 + <_> + + 0 -1 1580 -1.4233999885618687e-02 + + 6.2695199251174927e-01 -2.5791200995445251e-01 + <_> + + 0 -1 1581 3.8010000716894865e-03 + + -2.8229999542236328e-01 2.6624599099159241e-01 + <_> + + 0 -1 1582 3.4330000635236502e-03 + + -6.3771998882293701e-01 9.8422996699810028e-02 + <_> + + 0 -1 1583 -2.9221000149846077e-02 + + -7.6769900321960449e-01 2.2634500265121460e-01 + <_> + + 0 -1 1584 -6.4949998632073402e-03 + + 4.5600101351737976e-01 -2.6528900861740112e-01 + <_> + + 0 -1 1585 -3.0034000054001808e-02 + + -7.6551097631454468e-01 1.4009299874305725e-01 + <_> + + 0 -1 1586 7.8360000625252724e-03 + + 4.6755999326705933e-02 -7.2356200218200684e-01 + <_> + + 0 -1 1587 8.8550001382827759e-03 + + -4.9141999334096909e-02 5.1472699642181396e-01 + <_> + + 0 -1 1588 9.5973998308181763e-02 + + -2.0068999379873276e-02 -1.0850950479507446e+00 + <_> + + 0 -1 1589 -3.2876998186111450e-02 + + -9.5875298976898193e-01 1.4543600380420685e-01 + <_> + + 0 -1 1590 -1.3384000398218632e-02 + + -7.0013600587844849e-01 2.9157999902963638e-02 + <_> + + 0 -1 1591 1.5235999599099159e-02 + + -2.8235700726509094e-01 2.5367999076843262e-01 + <_> + + 0 -1 1592 1.2054000049829483e-02 + + -2.5303399562835693e-01 4.6526700258255005e-01 + <_> + + 0 -1 1593 -7.6295003294944763e-02 + + -6.9915801286697388e-01 1.3217200338840485e-01 + <_> + + 0 -1 1594 -1.2040000408887863e-02 + + 4.5894598960876465e-01 -2.3856499791145325e-01 + <_> + + 0 -1 1595 2.1916000172495842e-02 + + 1.8268600106239319e-01 -6.1629700660705566e-01 + <_> + + 0 -1 1596 -2.7330000884830952e-03 + + -6.3257902860641479e-01 3.4219000488519669e-02 + <_> + + 0 -1 1597 -4.8652000725269318e-02 + + -1.0297729969024658e+00 1.7386500537395477e-01 + <_> + + 0 -1 1598 -1.0463999584317207e-02 + + 3.4757301211357117e-01 -2.7464100718498230e-01 + <_> + + 0 -1 1599 -6.6550001502037048e-03 + + -2.8980299830436707e-01 2.4037900567054749e-01 + <_> + + 0 -1 1600 8.5469996556639671e-03 + + -4.4340500235557556e-01 1.4267399907112122e-01 + <_> + + 0 -1 1601 1.9913999363780022e-02 + + 1.7740400135517120e-01 -2.4096299707889557e-01 + <_> + + 0 -1 1602 2.2012999281287193e-02 + + -1.0812000371515751e-02 -9.4690799713134766e-01 + <_> + + 0 -1 1603 -5.2179001271724701e-02 + + 1.6547499895095825e+00 9.6487000584602356e-02 + <_> + + 0 -1 1604 1.9698999822139740e-02 + + -6.7560002207756042e-03 -8.6311501264572144e-01 + <_> + + 0 -1 1605 2.3040000349283218e-02 + + -2.3519999813288450e-03 3.8531300425529480e-01 + <_> + + 0 -1 1606 -1.5038000419735909e-02 + + -6.1905699968338013e-01 3.1077999621629715e-02 + <_> + + 0 -1 1607 -4.9956001341342926e-02 + + 7.0657497644424438e-01 4.7880999743938446e-02 + <_> + + 0 -1 1608 -6.9269999861717224e-02 + + 3.9212900400161743e-01 -2.3848000168800354e-01 + <_> + + 0 -1 1609 4.7399997711181641e-03 + + -2.4309000000357628e-02 2.5386300683021545e-01 + <_> + + 0 -1 1610 -3.3923998475074768e-02 + + 4.6930399537086487e-01 -2.3321899771690369e-01 + <_> + + 0 -1 1611 -1.6231000423431396e-02 + + 3.2319200038909912e-01 -2.0545600354671478e-01 + <_> + + 0 -1 1612 -5.0193000584840775e-02 + + -1.2277870178222656e+00 -4.0798000991344452e-02 + <_> + + 0 -1 1613 5.6944001466035843e-02 + + 4.5184001326560974e-02 6.0197502374649048e-01 + <_> + + 0 -1 1614 4.0936999022960663e-02 + + -1.6772800683975220e-01 8.9819300174713135e-01 + <_> + + 0 -1 1615 -3.0839999672025442e-03 + + 3.3716198801994324e-01 -2.7240800857543945e-01 + <_> + + 0 -1 1616 -3.2600000500679016e-02 + + -8.5446500778198242e-01 1.9664999097585678e-02 + <_> + + 0 -1 1617 9.8480999469757080e-02 + + 5.4742000997066498e-02 6.3827300071716309e-01 + <_> + + 0 -1 1618 -3.8185000419616699e-02 + + 5.2274698019027710e-01 -2.3384800553321838e-01 + <_> + + 0 -1 1619 -4.5917000621557236e-02 + + 6.2829202413558960e-01 3.2859001308679581e-02 + <_> + + 0 -1 1620 -1.1955499649047852e-01 + + -6.1572700738906860e-01 3.4680001437664032e-02 + <_> + + 0 -1 1621 -1.2044399976730347e-01 + + -8.4380000829696655e-01 1.6530700027942657e-01 + <_> + + 0 -1 1622 7.0619001984596252e-02 + + -6.3261002302169800e-02 -1.9863929748535156e+00 + <_> + + 0 -1 1623 8.4889996796846390e-03 + + -1.7663399875164032e-01 3.8011199235916138e-01 + <_> + + 0 -1 1624 2.2710999473929405e-02 + + -2.7605999261140823e-02 -9.1921401023864746e-01 + <_> + + 0 -1 1625 4.9700000090524554e-04 + + -2.4293200671672821e-01 2.2878900170326233e-01 + <_> + + 0 -1 1626 3.4651998430490494e-02 + + -2.3705999553203583e-01 5.4010999202728271e-01 + <_> + + 0 -1 1627 -4.4700000435113907e-03 + + 3.9078998565673828e-01 -1.2693800032138824e-01 + <_> + + 0 -1 1628 2.3643000051379204e-02 + + -2.6663699746131897e-01 3.2312598824501038e-01 + <_> + + 0 -1 1629 1.2813000008463860e-02 + + 1.7540800571441650e-01 -6.0787999629974365e-01 + <_> + + 0 -1 1630 -1.1250999756157398e-02 + + -1.0852589607238770e+00 -2.8046000748872757e-02 + <_> + + 0 -1 1631 -4.1535001248121262e-02 + + 7.1887397766113281e-01 2.7982000261545181e-02 + <_> + + 0 -1 1632 -9.3470998108386993e-02 + + -1.1906319856643677e+00 -4.4810999184846878e-02 + <_> + + 0 -1 1633 -2.7249999344348907e-02 + + 6.2942498922348022e-01 9.5039997249841690e-03 + <_> + + 0 -1 1634 -2.1759999915957451e-02 + + 1.3233649730682373e+00 -1.5027000010013580e-01 + <_> + + 0 -1 1635 -9.6890004351735115e-03 + + -3.3947101235389709e-01 1.7085799574851990e-01 + <_> + + 0 -1 1636 6.9395996630191803e-02 + + -2.5657799839973450e-01 4.7652098536491394e-01 + <_> + + 0 -1 1637 3.1208999454975128e-02 + + 1.4154000580310822e-01 -3.4942001104354858e-01 + <_> + + 0 -1 1638 -4.9727000296115875e-02 + + -1.1675560474395752e+00 -4.0757998824119568e-02 + <_> + + 0 -1 1639 -2.0301999524235725e-02 + + -3.9486399292945862e-01 1.5814900398254395e-01 + <_> + + 0 -1 1640 -1.5367000363767147e-02 + + 4.9300000071525574e-01 -2.0092099905014038e-01 + <_> + + 0 -1 1641 -5.0735000520944595e-02 + + 1.8736059665679932e+00 8.6730003356933594e-02 + <_> + + 0 -1 1642 -2.0726000890135765e-02 + + -8.8938397169113159e-01 -7.3199998587369919e-03 + <_> + + 0 -1 1643 -3.0993999913334846e-02 + + -1.1664899587631226e+00 1.4274600148200989e-01 + <_> + + 0 -1 1644 -4.4269999489188194e-03 + + -6.6815102100372314e-01 4.4120000675320625e-03 + <_> + + 0 -1 1645 -4.5743998140096664e-02 + + -4.7955200076103210e-01 1.5121999382972717e-01 + <_> + + 0 -1 1646 1.6698999330401421e-02 + + 1.2048599869012833e-01 -4.5235899090766907e-01 + <_> + + 0 -1 1647 3.2210000790655613e-03 + + -7.7615000307559967e-02 2.7846598625183105e-01 + <_> + + 0 -1 1648 2.4434000253677368e-02 + + -1.9987100362777710e-01 6.7253702878952026e-01 + <_> + + 0 -1 1649 -7.9677999019622803e-02 + + 9.2222398519515991e-01 9.2557996511459351e-02 + <_> + + 0 -1 1650 4.4530000537633896e-02 + + -2.6690500974655151e-01 3.3320501446723938e-01 + <_> + + 0 -1 1651 -1.2528300285339355e-01 + + -5.4253101348876953e-01 1.3976299762725830e-01 + <_> + + 0 -1 1652 1.7971999943256378e-02 + + 1.8219999969005585e-02 -6.8048501014709473e-01 + <_> + + 0 -1 1653 1.9184000790119171e-02 + + -1.2583999894559383e-02 5.4126697778701782e-01 + <_> + + 0 -1 1654 4.0024001151323318e-02 + + -1.7638799548149109e-01 7.8810399770736694e-01 + <_> + + 0 -1 1655 1.3558999635279179e-02 + + 2.0737600326538086e-01 -4.7744300961494446e-01 + <_> + + 0 -1 1656 1.6220999881625175e-02 + + 2.3076999932527542e-02 -6.1182099580764771e-01 + <_> + + 0 -1 1657 1.1229000054299831e-02 + + -1.7728000879287720e-02 4.1764199733734131e-01 + <_> + + 0 -1 1658 3.9193000644445419e-02 + + -1.8948499858379364e-01 7.4019300937652588e-01 + <_> + + 0 -1 1659 -9.5539996400475502e-03 + + 4.0947100520133972e-01 -1.3508899509906769e-01 + <_> + + 0 -1 1660 2.7878999710083008e-02 + + -2.0350700616836548e-01 6.1625397205352783e-01 + <_> + + 0 -1 1661 -2.3600999265909195e-02 + + -1.6967060565948486e+00 1.4633199572563171e-01 + <_> + + 0 -1 1662 2.6930000633001328e-02 + + -3.0401999130845070e-02 -1.0909470319747925e+00 + <_> + + 0 -1 1663 2.8999999631196260e-04 + + -2.0076000690460205e-01 2.2314099967479706e-01 + <_> + + 0 -1 1664 -4.1124999523162842e-02 + + -4.5242199301719666e-01 5.7392001152038574e-02 + <_> + + 0 -1 1665 6.6789998672902584e-03 + + 2.3824900388717651e-01 -2.1262100338935852e-01 + <_> + + 0 -1 1666 4.7864999622106552e-02 + + -1.8194800615310669e-01 6.1918401718139648e-01 + <_> + + 0 -1 1667 -3.1679999083280563e-03 + + -2.7393200993537903e-01 2.5017300248146057e-01 + <_> + + 0 -1 1668 -8.6230002343654633e-03 + + -4.6280300617218018e-01 4.2397998273372650e-02 + <_> + + 0 -1 1669 -7.4350000359117985e-03 + + 4.1796800494194031e-01 -1.7079999670386314e-03 + <_> + + 0 -1 1670 -1.8769999733194709e-03 + + 1.4602300524711609e-01 -3.3721101284027100e-01 + <_> + + 0 -1 1671 -8.6226001381874084e-02 + + 7.5143402814865112e-01 1.0711999610066414e-02 + <_> + + 0 -1 1672 4.6833999454975128e-02 + + -1.9119599461555481e-01 4.8414900898933411e-01 + <_> + + 0 -1 1673 -9.2000002041459084e-05 + + 3.5220399498939514e-01 -1.7333300411701202e-01 + <_> + + 0 -1 1674 -1.6343999654054642e-02 + + -6.4397698640823364e-01 9.0680001303553581e-03 + <_> + + 0 -1 1675 4.5703999698162079e-02 + + 1.8216000869870186e-02 3.1970798969268799e-01 + <_> + + 0 -1 1676 -2.7382999658584595e-02 + + 1.0564049482345581e+00 -1.7276400327682495e-01 + <_> + + 0 -1 1677 -2.7602000162005424e-02 + + 2.9715499281883240e-01 -9.4600003212690353e-03 + <_> + + 0 -1 1678 7.6939999125897884e-03 + + -2.1660299599170685e-01 4.7385200858116150e-01 + <_> + + 0 -1 1679 -7.0500001311302185e-04 + + 2.4048799276351929e-01 -2.6776000857353210e-01 + <_> + + 0 -1 1680 1.1054199934005737e-01 + + -3.3539000898599625e-02 -1.0233880281448364e+00 + <_> + + 0 -1 1681 6.8765997886657715e-02 + + -4.3239998631179333e-03 5.7153397798538208e-01 + <_> + + 0 -1 1682 1.7999999690800905e-03 + + 7.7574998140335083e-02 -4.2092698812484741e-01 + <_> + + 0 -1 1683 1.9232000410556793e-01 + + 8.2021996378898621e-02 2.8810169696807861e+00 + <_> + + 0 -1 1684 1.5742099285125732e-01 + + -1.3708199560642242e-01 2.0890059471130371e+00 + <_> + + 0 -1 1685 -4.9387000501155853e-02 + + -1.8610910177230835e+00 1.4332099258899689e-01 + <_> + + 0 -1 1686 5.1929000765085220e-02 + + -1.8737000226974487e-01 5.4231601953506470e-01 + <_> + + 0 -1 1687 4.9965001642704010e-02 + + 1.4175300300121307e-01 -1.5625779628753662e+00 + <_> + + 0 -1 1688 -4.2633000761270523e-02 + + 1.6059479713439941e+00 -1.4712899923324585e-01 + <_> + + 0 -1 1689 -3.7553999572992325e-02 + + -8.0974900722503662e-01 1.3256999850273132e-01 + <_> + + 0 -1 1690 -3.7174999713897705e-02 + + -1.3945020437240601e+00 -5.7055000215768814e-02 + <_> + + 0 -1 1691 1.3945999555289745e-02 + + 3.3427000045776367e-02 5.7474797964096069e-01 + <_> + + 0 -1 1692 -4.4800000614486635e-04 + + -5.5327498912811279e-01 2.1952999755740166e-02 + <_> + + 0 -1 1693 3.1993001699447632e-02 + + 2.0340999588370323e-02 3.7459200620651245e-01 + <_> + + 0 -1 1694 -4.2799999937415123e-03 + + 4.4428700208663940e-01 -2.2999699413776398e-01 + <_> + + 0 -1 1695 9.8550003021955490e-03 + + 1.8315799534320831e-01 -4.0964999794960022e-01 + <_> + + 0 -1 1696 9.3356996774673462e-02 + + -6.3661001622676849e-02 -1.6929290294647217e+00 + <_> + + 0 -1 1697 1.7209999263286591e-02 + + 2.0153899490833282e-01 -4.6061098575592041e-01 + <_> + + 0 -1 1698 8.4319999441504478e-03 + + -3.2003998756408691e-01 1.5312199294567108e-01 + <_> + + 0 -1 1699 -1.4054999686777592e-02 + + 8.6882400512695312e-01 3.2575000077486038e-02 + <_> + + 0 -1 1700 -7.7180000953376293e-03 + + 6.3686698675155640e-01 -1.8425500392913818e-01 + <_> + + 0 -1 1701 2.8005000203847885e-02 + + 1.7357499897480011e-01 -4.7883599996566772e-01 + <_> + + 0 -1 1702 -1.8884999677538872e-02 + + 2.4101600050926208e-01 -2.6547598838806152e-01 + <_> + + 0 -1 1703 -1.8585000187158585e-02 + + 5.4232501983642578e-01 5.3633000701665878e-02 + <_> + + 0 -1 1704 -3.6437001079320908e-02 + + 2.3908898830413818e+00 -1.3634699583053589e-01 + <_> + + 0 -1 1705 3.2455001026391983e-02 + + 1.5910699963569641e-01 -6.7581498622894287e-01 + <_> + + 0 -1 1706 5.9781998395919800e-02 + + -2.3479999508708715e-03 -7.3053699731826782e-01 + <_> + + 0 -1 1707 9.8209995776414871e-03 + + -1.1444099992513657e-01 3.0570301413536072e-01 + <_> + + 0 -1 1708 -3.5163998603820801e-02 + + -1.0511469841003418e+00 -3.3103000372648239e-02 + <_> + + 0 -1 1709 2.7429999317973852e-03 + + -2.0135399699211121e-01 3.2754099369049072e-01 + <_> + + 0 -1 1710 8.1059997901320457e-03 + + -2.1383500099182129e-01 4.3362098932266235e-01 + <_> + + 0 -1 1711 8.8942997157573700e-02 + + 1.0940899699926376e-01 -4.7609338760375977e+00 + <_> + + 0 -1 1712 -3.0054999515414238e-02 + + -1.7169300317764282e+00 -6.0919001698493958e-02 + <_> + + 0 -1 1713 -2.1734999492764473e-02 + + 6.4778900146484375e-01 -3.2830998301506042e-02 + <_> + + 0 -1 1714 3.7648998200893402e-02 + + -1.0060000233352184e-02 -7.6569098234176636e-01 + <_> + + 0 -1 1715 2.7189999818801880e-03 + + 1.9888900220394135e-01 -8.2479000091552734e-02 + <_> + + 0 -1 1716 -1.0548000223934650e-02 + + -8.6613601446151733e-01 -2.5986000895500183e-02 + <_> + + 0 -1 1717 1.2966300547122955e-01 + + 1.3911999762058258e-01 -2.2271950244903564e+00 + <_> + + 0 -1 1718 -1.7676999792456627e-02 + + 3.3967700600624084e-01 -2.3989599943161011e-01 + <_> + + 0 -1 1719 -7.7051997184753418e-02 + + -2.5017969608306885e+00 1.2841999530792236e-01 + <_> + + 0 -1 1720 -1.9230000674724579e-02 + + 5.0641202926635742e-01 -1.9751599431037903e-01 + <_> + + 0 -1 1721 -5.1222998648881912e-02 + + -2.9333369731903076e+00 1.3858500123023987e-01 + <_> + + 0 -1 1722 2.0830000285059214e-03 + + -6.0043597221374512e-01 2.9718000441789627e-02 + <_> + + 0 -1 1723 2.5418000295758247e-02 + + 3.3915799856185913e-01 -1.4392000436782837e-01 + <_> + + 0 -1 1724 -2.3905999958515167e-02 + + -1.1082680225372314e+00 -4.7377001494169235e-02 + <_> + + 0 -1 1725 -6.3740001060068607e-03 + + 4.4533699750900269e-01 -6.7052997648715973e-02 + <_> + + 0 -1 1726 -3.7698999047279358e-02 + + -1.0406579971313477e+00 -4.1790001094341278e-02 + <_> + + 0 -1 1727 2.1655100584030151e-01 + + 3.3863000571727753e-02 8.2017302513122559e-01 + <_> + + 0 -1 1728 -1.3400999829173088e-02 + + 5.2903497219085693e-01 -1.9133000075817108e-01 + <_> + 196 + -3.2103500366210938e+00 + + <_> + + 0 -1 1729 7.1268998086452484e-02 + + -5.3631198406219482e-01 6.0715299844741821e-01 + <_> + + 0 -1 1730 5.6111000478267670e-02 + + -5.0141602754592896e-01 4.3976101279258728e-01 + <_> + + 0 -1 1731 4.0463998913764954e-02 + + -3.2922199368476868e-01 5.4834699630737305e-01 + <_> + + 0 -1 1732 6.3155002892017365e-02 + + -3.1701698899269104e-01 4.6152999997138977e-01 + <_> + + 0 -1 1733 1.0320999659597874e-02 + + 1.0694999992847443e-01 -9.8243898153305054e-01 + <_> + + 0 -1 1734 6.2606997787952423e-02 + + -1.4329700171947479e-01 7.1095001697540283e-01 + <_> + + 0 -1 1735 -3.9416000247001648e-02 + + 9.4380199909210205e-01 -2.1572099626064301e-01 + <_> + + 0 -1 1736 -5.3960001096129417e-03 + + -5.4611998796463013e-01 2.5303798913955688e-01 + <_> + + 0 -1 1737 1.0773199796676636e-01 + + 1.2496000155806541e-02 -1.0809199810028076e+00 + <_> + + 0 -1 1738 1.6982000321149826e-02 + + -3.1536400318145752e-01 5.1239997148513794e-01 + <_> + + 0 -1 1739 3.1216999515891075e-02 + + -4.5199999585747719e-03 -1.2443480491638184e+00 + <_> + + 0 -1 1740 -2.3106999695301056e-02 + + -7.6492899656295776e-01 2.0640599727630615e-01 + <_> + + 0 -1 1741 -1.1203999631106853e-02 + + 2.4092699587345123e-01 -3.5142099857330322e-01 + <_> + + 0 -1 1742 -4.7479998320341110e-03 + + -9.7007997334003448e-02 2.0638099312782288e-01 + <_> + + 0 -1 1743 -1.7358999699354172e-02 + + -7.9020297527313232e-01 2.1852999925613403e-02 + <_> + + 0 -1 1744 1.8851999193429947e-02 + + -1.0394600033760071e-01 5.4844200611114502e-01 + <_> + + 0 -1 1745 7.2249998338520527e-03 + + -4.0409401059150696e-01 2.6763799786567688e-01 + <_> + + 0 -1 1746 1.8915999680757523e-02 + + 2.0508000254631042e-01 -1.0206340551376343e+00 + <_> + + 0 -1 1747 3.1156999990344048e-02 + + 1.2400000123307109e-03 -8.7293499708175659e-01 + <_> + + 0 -1 1748 2.0951999351382256e-02 + + -5.5559999309480190e-03 8.0356198549270630e-01 + <_> + + 0 -1 1749 1.1291000060737133e-02 + + -3.6478400230407715e-01 2.2767899930477142e-01 + <_> + + 0 -1 1750 -5.7011000812053680e-02 + + -1.4295619726181030e+00 1.4322000741958618e-01 + <_> + + 0 -1 1751 7.2194002568721771e-02 + + -4.1850000619888306e-02 -1.9111829996109009e+00 + <_> + + 0 -1 1752 -1.9874000921845436e-02 + + 2.6425498723983765e-01 -3.2617700099945068e-01 + <_> + + 0 -1 1753 -1.6692999750375748e-02 + + -8.3907800912857056e-01 4.0799999260343611e-04 + <_> + + 0 -1 1754 -3.9834998548030853e-02 + + -4.8858499526977539e-01 1.6436100006103516e-01 + <_> + + 0 -1 1755 2.7009999379515648e-02 + + -1.8862499296665192e-01 8.3419400453567505e-01 + <_> + + 0 -1 1756 -3.9420002140104771e-03 + + 2.3231500387191772e-01 -7.2360001504421234e-02 + <_> + + 0 -1 1757 2.2833000868558884e-02 + + -3.5884000360965729e-02 -1.1549400091171265e+00 + <_> + + 0 -1 1758 -6.8888001143932343e-02 + + -1.7837309837341309e+00 1.5159000456333160e-01 + <_> + + 0 -1 1759 4.3097000569105148e-02 + + -2.1608099341392517e-01 5.0624102354049683e-01 + <_> + + 0 -1 1760 8.6239995434880257e-03 + + -1.7795599997043610e-01 2.8957900404930115e-01 + <_> + + 0 -1 1761 1.4561000280082226e-02 + + -1.1408000253140926e-02 -8.9402002096176147e-01 + <_> + + 0 -1 1762 -1.1501000262796879e-02 + + 3.0171999335289001e-01 -4.3659001588821411e-02 + <_> + + 0 -1 1763 -1.0971499979496002e-01 + + -9.5147097110748291e-01 -1.9973000511527061e-02 + <_> + + 0 -1 1764 4.5228000730276108e-02 + + 3.3110998570919037e-02 9.6619802713394165e-01 + <_> + + 0 -1 1765 -2.7047999203205109e-02 + + 9.7963601350784302e-01 -1.7261900007724762e-01 + <_> + + 0 -1 1766 1.8030999228358269e-02 + + -2.0801000297069550e-02 2.7385899424552917e-01 + <_> + + 0 -1 1767 5.0524998456239700e-02 + + -5.6802999228239059e-02 -1.7775089740753174e+00 + <_> + + 0 -1 1768 -2.9923999682068825e-02 + + 6.5329200029373169e-01 -2.3537000641226768e-02 + <_> + + 0 -1 1769 3.8058001548051834e-02 + + 2.6317000389099121e-02 -7.0665699243545532e-01 + <_> + + 0 -1 1770 1.8563899397850037e-01 + + -5.6039998307824135e-03 3.2873699069023132e-01 + <_> + + 0 -1 1771 -4.0670000016689301e-03 + + 3.4204798936843872e-01 -3.0171599984169006e-01 + <_> + + 0 -1 1772 1.0108999907970428e-02 + + -7.3600001633167267e-03 5.7981598377227783e-01 + <_> + + 0 -1 1773 -1.1567000299692154e-02 + + -5.2722197771072388e-01 4.6447999775409698e-02 + <_> + + 0 -1 1774 -6.5649999305605888e-03 + + -5.8529102802276611e-01 1.9101899862289429e-01 + <_> + + 0 -1 1775 1.0582000017166138e-02 + + 2.1073000505566597e-02 -6.8892598152160645e-01 + <_> + + 0 -1 1776 -2.0304000005125999e-02 + + -3.6400699615478516e-01 1.5338799357414246e-01 + <_> + + 0 -1 1777 2.3529999889433384e-03 + + 3.6164000630378723e-02 -5.9825098514556885e-01 + <_> + + 0 -1 1778 -1.4690000098198652e-03 + + -1.4707699418067932e-01 3.7507998943328857e-01 + <_> + + 0 -1 1779 8.6449999362230301e-03 + + -2.1708500385284424e-01 5.1936799287796021e-01 + <_> + + 0 -1 1780 -2.4326000362634659e-02 + + -1.0846769809722900e+00 1.4084799587726593e-01 + <_> + + 0 -1 1781 7.4418999254703522e-02 + + -1.5513800084590912e-01 1.1822769641876221e+00 + <_> + + 0 -1 1782 1.7077999189496040e-02 + + 4.4231001287698746e-02 9.1561102867126465e-01 + <_> + + 0 -1 1783 -2.4577999487519264e-02 + + -1.5504100322723389e+00 -5.4745998233556747e-02 + <_> + + 0 -1 1784 3.0205000191926956e-02 + + 1.6662800312042236e-01 -1.0001239776611328e+00 + <_> + + 0 -1 1785 1.2136000208556652e-02 + + -7.7079099416732788e-01 -4.8639997839927673e-03 + <_> + + 0 -1 1786 8.6717002093791962e-02 + + 1.1061699688434601e-01 -1.6857999563217163e+00 + <_> + + 0 -1 1787 -4.2309001088142395e-02 + + 1.1075930595397949e+00 -1.5438599884510040e-01 + <_> + + 0 -1 1788 -2.6420000940561295e-03 + + 2.7451899647712708e-01 -1.8456199765205383e-01 + <_> + + 0 -1 1789 -5.6662000715732574e-02 + + -8.0625599622726440e-01 -1.6928000375628471e-02 + <_> + + 0 -1 1790 2.3475000634789467e-02 + + 1.4187699556350708e-01 -2.5500899553298950e-01 + <_> + + 0 -1 1791 -2.0803000777959824e-02 + + 1.9826300442218781e-01 -3.1171199679374695e-01 + <_> + + 0 -1 1792 7.2599998675286770e-03 + + -5.0590999424457550e-02 4.1923800110816956e-01 + <_> + + 0 -1 1793 3.4160000085830688e-01 + + -1.6674900054931641e-01 9.2748600244522095e-01 + <_> + + 0 -1 1794 6.2029999680817127e-03 + + -1.2625899910926819e-01 4.0445300936698914e-01 + <_> + + 0 -1 1795 3.2692000269889832e-02 + + -3.2634999603033066e-02 -9.8939800262451172e-01 + <_> + + 0 -1 1796 2.1100000594742596e-04 + + -6.4534001052379608e-02 2.5473698973655701e-01 + <_> + + 0 -1 1797 7.2100001852959394e-04 + + -3.6618599295616150e-01 1.1973100155591965e-01 + <_> + + 0 -1 1798 5.4490998387336731e-02 + + 1.2073499709367752e-01 -1.0291390419006348e+00 + <_> + + 0 -1 1799 -1.0141000151634216e-02 + + -5.2177202701568604e-01 3.3734999597072601e-02 + <_> + + 0 -1 1800 -1.8815999850630760e-02 + + 6.5181797742843628e-01 1.3399999588727951e-03 + <_> + + 0 -1 1801 -5.3480002097785473e-03 + + 1.7370699346065521e-01 -3.4132000803947449e-01 + <_> + + 0 -1 1802 -1.0847000405192375e-02 + + -1.9699899852275848e-01 1.5045499801635742e-01 + <_> + + 0 -1 1803 -4.9926001578569412e-02 + + -5.0888502597808838e-01 3.0762000009417534e-02 + <_> + + 0 -1 1804 1.2160000391304493e-02 + + -6.9251999258995056e-02 1.8745499849319458e-01 + <_> + + 0 -1 1805 -2.2189998999238014e-03 + + -4.0849098563194275e-01 7.9954996705055237e-02 + <_> + + 0 -1 1806 3.1580000650137663e-03 + + -2.1124599874019623e-01 2.2366400063037872e-01 + <_> + + 0 -1 1807 4.1439998894929886e-03 + + -4.9900299310684204e-01 6.2917001545429230e-02 + <_> + + 0 -1 1808 -7.3730000294744968e-03 + + -2.0553299784660339e-01 2.2096699476242065e-01 + <_> + + 0 -1 1809 5.1812000572681427e-02 + + 1.8096800148487091e-01 -4.3495801091194153e-01 + <_> + + 0 -1 1810 1.8340000882744789e-02 + + 1.5200000256299973e-02 3.7991699576377869e-01 + <_> + + 0 -1 1811 1.7490799725055695e-01 + + -2.0920799672603607e-01 4.0013000369071960e-01 + <_> + + 0 -1 1812 5.3993999958038330e-02 + + 2.4751600623130798e-01 -2.6712900400161743e-01 + <_> + + 0 -1 1813 -3.2033199071884155e-01 + + -1.9094380140304565e+00 -6.6960997879505157e-02 + <_> + + 0 -1 1814 -2.7060000225901604e-02 + + -7.1371299028396606e-01 1.5904599428176880e-01 + <_> + + 0 -1 1815 7.7463999390602112e-02 + + -1.6970199346542358e-01 7.7552998065948486e-01 + <_> + + 0 -1 1816 2.3771999403834343e-02 + + 1.9021899998188019e-01 -6.0162097215652466e-01 + <_> + + 0 -1 1817 1.1501000262796879e-02 + + 7.7039999887347221e-03 -6.1730301380157471e-01 + <_> + + 0 -1 1818 3.2616000622510910e-02 + + 1.7159199714660645e-01 -7.0978200435638428e-01 + <_> + + 0 -1 1819 -4.4383000582456589e-02 + + -2.2606229782104492e+00 -7.3276996612548828e-02 + <_> + + 0 -1 1820 -5.8476001024246216e-02 + + 2.4087750911712646e+00 8.3091996610164642e-02 + <_> + + 0 -1 1821 1.9303999841213226e-02 + + -2.7082300186157227e-01 2.7369999885559082e-01 + <_> + + 0 -1 1822 -4.4705998152494431e-02 + + 3.1355598568916321e-01 -6.2492001801729202e-02 + <_> + + 0 -1 1823 -6.0334999114274979e-02 + + -1.4515119791030884e+00 -5.8761000633239746e-02 + <_> + + 0 -1 1824 1.1667000129818916e-02 + + -1.8084999173879623e-02 5.0479698181152344e-01 + <_> + + 0 -1 1825 2.8009999543428421e-02 + + -2.3302899301052094e-01 3.0708700418472290e-01 + <_> + + 0 -1 1826 6.5397001802921295e-02 + + 1.4135900139808655e-01 -5.0010901689529419e-01 + <_> + + 0 -1 1827 9.6239997074007988e-03 + + -2.2054600715637207e-01 3.9191201329231262e-01 + <_> + + 0 -1 1828 2.5510000996291637e-03 + + -1.1381500214338303e-01 2.0032300055027008e-01 + <_> + + 0 -1 1829 3.1847000122070312e-02 + + 2.5476999580860138e-02 -5.3326398134231567e-01 + <_> + + 0 -1 1830 3.3055000007152557e-02 + + 1.7807699739933014e-01 -6.2793898582458496e-01 + <_> + + 0 -1 1831 4.7600999474525452e-02 + + -1.4747899770736694e-01 1.4204180240631104e+00 + <_> + + 0 -1 1832 -1.9571999087929726e-02 + + -5.2693498134613037e-01 1.5838600695133209e-01 + <_> + + 0 -1 1833 -5.4730001837015152e-02 + + 8.8231599330902100e-01 -1.6627800464630127e-01 + <_> + + 0 -1 1834 -2.2686000913381577e-02 + + -4.8386898636817932e-01 1.5000100433826447e-01 + <_> + + 0 -1 1835 1.0713200271129608e-01 + + -2.1336199343204498e-01 4.2333900928497314e-01 + <_> + + 0 -1 1836 -3.6380000412464142e-02 + + -7.4198000133037567e-02 1.4589400589466095e-01 + <_> + + 0 -1 1837 1.3935999944806099e-02 + + -2.4911600351333618e-01 2.6771199703216553e-01 + <_> + + 0 -1 1838 2.0991999655961990e-02 + + 8.7959999218583107e-03 4.3064999580383301e-01 + <_> + + 0 -1 1839 4.9118999391794205e-02 + + -1.7591999471187592e-01 6.9282901287078857e-01 + <_> + + 0 -1 1840 3.6315999925136566e-02 + + 1.3145299255847931e-01 -3.3597299456596375e-01 + <_> + + 0 -1 1841 4.1228000074625015e-02 + + -4.5692000538110733e-02 -1.3515930175781250e+00 + <_> + + 0 -1 1842 1.5672000125050545e-02 + + 1.7544099688529968e-01 -6.0550000518560410e-02 + <_> + + 0 -1 1843 -1.6286000609397888e-02 + + -1.1308189630508423e+00 -3.9533000439405441e-02 + <_> + + 0 -1 1844 -3.0229999683797359e-03 + + -2.2454300522804260e-01 2.3628099262714386e-01 + <_> + + 0 -1 1845 -1.3786299526691437e-01 + + 4.5376899838447571e-01 -2.1098700165748596e-01 + <_> + + 0 -1 1846 -9.6760001033544540e-03 + + -1.5105099976062775e-01 2.0781700313091278e-01 + <_> + + 0 -1 1847 -2.4839999154210091e-02 + + -6.8350297212600708e-01 -8.0040004104375839e-03 + <_> + + 0 -1 1848 -1.3964399695396423e-01 + + 6.5011298656463623e-01 4.6544000506401062e-02 + <_> + + 0 -1 1849 -8.2153998315334320e-02 + + 4.4887199997901917e-01 -2.3591999709606171e-01 + <_> + + 0 -1 1850 3.8449999410659075e-03 + + -8.8173002004623413e-02 2.7346798777580261e-01 + <_> + + 0 -1 1851 -6.6579999402165413e-03 + + -4.6866598725318909e-01 7.7001996338367462e-02 + <_> + + 0 -1 1852 -1.5898000448942184e-02 + + 2.9268398880958557e-01 -2.1941000595688820e-02 + <_> + + 0 -1 1853 -5.0946000963449478e-02 + + -1.2093789577484131e+00 -4.2109999805688858e-02 + <_> + + 0 -1 1854 1.6837999224662781e-02 + + -4.5595999807119370e-02 5.0180697441101074e-01 + <_> + + 0 -1 1855 1.5918999910354614e-02 + + -2.6904299855232239e-01 2.6516300439834595e-01 + <_> + + 0 -1 1856 3.6309999413788319e-03 + + -1.3046100735664368e-01 3.1807100772857666e-01 + <_> + + 0 -1 1857 -8.6144998669624329e-02 + + 1.9443659782409668e+00 -1.3978299498558044e-01 + <_> + + 0 -1 1858 3.3140998333692551e-02 + + 1.5266799926757812e-01 -3.0866000801324844e-02 + <_> + + 0 -1 1859 -3.9679999463260174e-03 + + -7.1202301979064941e-01 -1.3844000175595284e-02 + <_> + + 0 -1 1860 -2.4008000269532204e-02 + + 9.2007797956466675e-01 4.6723999083042145e-02 + <_> + + 0 -1 1861 8.7320003658533096e-03 + + -2.2567300498485565e-01 3.1931799650192261e-01 + <_> + + 0 -1 1862 -2.7786999940872192e-02 + + -7.2337102890014648e-01 1.7018599808216095e-01 + <_> + + 0 -1 1863 -1.9455300271511078e-01 + + 1.2461860179901123e+00 -1.4736199378967285e-01 + <_> + + 0 -1 1864 -1.0869699716567993e-01 + + -1.4465179443359375e+00 1.2145300209522247e-01 + <_> + + 0 -1 1865 -1.9494999200105667e-02 + + -7.8153097629547119e-01 -2.3732999339699745e-02 + <_> + + 0 -1 1866 3.0650000553578138e-03 + + -8.5471397638320923e-01 1.6686999797821045e-01 + <_> + + 0 -1 1867 5.9193998575210571e-02 + + -1.4853699505329132e-01 1.1273469924926758e+00 + <_> + + 0 -1 1868 -5.4207999259233475e-02 + + 5.4726999998092651e-01 3.5523999482393265e-02 + <_> + + 0 -1 1869 -3.9324998855590820e-02 + + 3.6642599105834961e-01 -2.0543999969959259e-01 + <_> + + 0 -1 1870 8.2278996706008911e-02 + + -3.5007998347282410e-02 5.3994202613830566e-01 + <_> + + 0 -1 1871 -7.4479999020695686e-03 + + -6.1537498235702515e-01 -3.5319998860359192e-03 + <_> + + 0 -1 1872 7.3770000599324703e-03 + + -6.5591000020503998e-02 4.1961398720741272e-01 + <_> + + 0 -1 1873 7.0779998786747456e-03 + + -3.4129500389099121e-01 1.2536799907684326e-01 + <_> + + 0 -1 1874 -1.5581999905407429e-02 + + -3.0240398645401001e-01 2.1511000394821167e-01 + <_> + + 0 -1 1875 -2.7399999089539051e-03 + + 7.6553001999855042e-02 -4.1060501337051392e-01 + <_> + + 0 -1 1876 -7.0600003004074097e-02 + + -9.7356200218200684e-01 1.1241800338029861e-01 + <_> + + 0 -1 1877 -1.1706000193953514e-02 + + 1.8560700118541718e-01 -2.9755198955535889e-01 + <_> + + 0 -1 1878 7.1499997284263372e-04 + + -5.9650000184774399e-02 2.4824699759483337e-01 + <_> + + 0 -1 1879 -3.6866001784801483e-02 + + 3.2751700282096863e-01 -2.3059600591659546e-01 + <_> + + 0 -1 1880 -3.2526999711990356e-02 + + -2.9320299625396729e-01 1.5427699685096741e-01 + <_> + + 0 -1 1881 -7.4813999235630035e-02 + + -1.2143570184707642e+00 -5.2244000136852264e-02 + <_> + + 0 -1 1882 4.1469998657703400e-02 + + 1.3062499463558197e-01 -2.3274369239807129e+00 + <_> + + 0 -1 1883 -2.8880000114440918e-02 + + -6.6074597835540771e-01 -9.0960003435611725e-03 + <_> + + 0 -1 1884 4.6381998807191849e-02 + + 1.6630199551582336e-01 -6.6949498653411865e-01 + <_> + + 0 -1 1885 2.5424998998641968e-01 + + -5.4641999304294586e-02 -1.2676080465316772e+00 + <_> + + 0 -1 1886 2.4000001139938831e-03 + + 2.0276799798011780e-01 1.4667999930679798e-02 + <_> + + 0 -1 1887 -8.2805998623371124e-02 + + -7.8713601827621460e-01 -2.4468999356031418e-02 + <_> + + 0 -1 1888 -1.1438000015914440e-02 + + 2.8623399138450623e-01 -3.0894000083208084e-02 + <_> + + 0 -1 1889 -1.2913399934768677e-01 + + 1.7292929887771606e+00 -1.4293900132179260e-01 + <_> + + 0 -1 1890 3.8552999496459961e-02 + + 1.9232999533414841e-02 3.7732601165771484e-01 + <_> + + 0 -1 1891 1.0191400349140167e-01 + + -7.4533998966217041e-02 -3.3868899345397949e+00 + <_> + + 0 -1 1892 -1.9068000838160515e-02 + + 3.1814101338386536e-01 1.9261000677943230e-02 + <_> + + 0 -1 1893 -6.0775000602006912e-02 + + 7.6936298608779907e-01 -1.7644000053405762e-01 + <_> + + 0 -1 1894 2.4679999798536301e-02 + + 1.8396499752998352e-01 -3.0868801474571228e-01 + <_> + + 0 -1 1895 2.6759000495076180e-02 + + -2.3454900085926056e-01 3.3056598901748657e-01 + <_> + + 0 -1 1896 1.4969999901950359e-02 + + 1.7213599383831024e-01 -1.8248899281024933e-01 + <_> + + 0 -1 1897 2.6142999529838562e-02 + + -4.6463999897241592e-02 -1.1318379640579224e+00 + <_> + + 0 -1 1898 -3.7512000650167465e-02 + + 8.0404001474380493e-01 6.9660000503063202e-02 + <_> + + 0 -1 1899 -5.3229997865855694e-03 + + -8.1884402036666870e-01 -1.8224999308586121e-02 + <_> + + 0 -1 1900 1.7813000828027725e-02 + + 1.4957800507545471e-01 -1.8667200207710266e-01 + <_> + + 0 -1 1901 -3.4010000526905060e-02 + + -7.2852301597595215e-01 -1.6615999862551689e-02 + <_> + + 0 -1 1902 -1.5953000634908676e-02 + + 5.6944000720977783e-01 1.3832000084221363e-02 + <_> + + 0 -1 1903 1.9743999466300011e-02 + + 4.0525000542402267e-02 -4.1773399710655212e-01 + <_> + + 0 -1 1904 -1.0374800115823746e-01 + + -1.9825149774551392e+00 1.1960200220346451e-01 + <_> + + 0 -1 1905 -1.9285000860691071e-02 + + 5.0230598449707031e-01 -1.9745899736881256e-01 + <_> + + 0 -1 1906 -1.2780000455677509e-02 + + 4.0195000171661377e-01 -2.6957999914884567e-02 + <_> + + 0 -1 1907 -1.6352999955415726e-02 + + -7.6608800888061523e-01 -2.4209000170230865e-02 + <_> + + 0 -1 1908 -1.2763699889183044e-01 + + 8.6578500270843506e-01 6.4205996692180634e-02 + <_> + + 0 -1 1909 1.9068999215960503e-02 + + -5.5929797887802124e-01 -1.6880000475794077e-03 + <_> + + 0 -1 1910 3.2480999827384949e-02 + + 4.0722001343965530e-02 4.8925098776817322e-01 + <_> + + 0 -1 1911 9.4849998131394386e-03 + + -1.9231900572776794e-01 5.1139700412750244e-01 + <_> + + 0 -1 1912 5.0470000132918358e-03 + + 1.8706800043582916e-01 -1.6113600134849548e-01 + <_> + + 0 -1 1913 4.1267998516559601e-02 + + -4.8817999660968781e-02 -1.1326299905776978e+00 + <_> + + 0 -1 1914 -7.6358996331691742e-02 + + 1.4169390201568604e+00 8.7319999933242798e-02 + <_> + + 0 -1 1915 -7.2834998369216919e-02 + + 1.3189860582351685e+00 -1.4819100499153137e-01 + <_> + + 0 -1 1916 5.9576999396085739e-02 + + 4.8376999795436859e-02 8.5611802339553833e-01 + <_> + + 0 -1 1917 2.0263999700546265e-02 + + -2.1044099330902100e-01 3.3858999609947205e-01 + <_> + + 0 -1 1918 -8.0301001667976379e-02 + + -1.2464400529861450e+00 1.1857099831104279e-01 + <_> + + 0 -1 1919 -1.7835000529885292e-02 + + 2.5782299041748047e-01 -2.4564799666404724e-01 + <_> + + 0 -1 1920 1.1431000195443630e-02 + + 2.2949799895286560e-01 -2.9497599601745605e-01 + <_> + + 0 -1 1921 -2.5541000068187714e-02 + + -8.6252999305725098e-01 -7.0400000549852848e-04 + <_> + + 0 -1 1922 -7.6899997657164931e-04 + + 3.1511399149894714e-01 -1.4349000155925751e-01 + <_> + + 0 -1 1923 -1.4453999698162079e-02 + + 2.5148499011993408e-01 -2.8232899308204651e-01 + <_> + + 0 -1 1924 8.6730001494288445e-03 + + 2.6601400971412659e-01 -2.8190800547599792e-01 + <_> + 197 + -3.2772979736328125e+00 + + <_> + + 0 -1 1925 5.4708998650312424e-02 + + -5.4144299030303955e-01 6.1043000221252441e-01 + <_> + + 0 -1 1926 -1.0838799923658371e-01 + + 7.1739900112152100e-01 -4.1196098923683167e-01 + <_> + + 0 -1 1927 2.2996999323368073e-02 + + -5.8269798755645752e-01 2.9645600914955139e-01 + <_> + + 0 -1 1928 2.7540000155568123e-03 + + -7.4243897199630737e-01 1.4183300733566284e-01 + <_> + + 0 -1 1929 -2.1520000882446766e-03 + + 1.7879900336265564e-01 -6.8548601865768433e-01 + <_> + + 0 -1 1930 -2.2559000179171562e-02 + + -1.0775549411773682e+00 1.2388999760150909e-01 + <_> + + 0 -1 1931 8.3025000989437103e-02 + + 2.4500999599695206e-02 -1.0251879692077637e+00 + <_> + + 0 -1 1932 -6.6740000620484352e-03 + + -4.5283100008964539e-01 2.1230199933052063e-01 + <_> + + 0 -1 1933 7.6485000550746918e-02 + + -2.6972699165344238e-01 4.8580199480056763e-01 + <_> + + 0 -1 1934 5.4910001344978809e-03 + + -4.8871201276779175e-01 3.1616398692131042e-01 + <_> + + 0 -1 1935 -1.0414999909698963e-02 + + 4.1512900590896606e-01 -3.0044800043106079e-01 + <_> + + 0 -1 1936 2.7607999742031097e-02 + + 1.6203799843788147e-01 -9.9868500232696533e-01 + <_> + + 0 -1 1937 -2.3272000253200531e-02 + + -1.1024399995803833e+00 2.1124999970197678e-02 + <_> + + 0 -1 1938 -5.5619999766349792e-02 + + 6.5033102035522461e-01 -2.7938000857830048e-02 + <_> + + 0 -1 1939 -4.0631998330354691e-02 + + 4.2117300629615784e-01 -2.6763799786567688e-01 + <_> + + 0 -1 1940 -7.3560001328587532e-03 + + 3.5277798771858215e-01 -3.7854000926017761e-01 + <_> + + 0 -1 1941 1.7007000744342804e-02 + + -2.9189500212669373e-01 4.1053798794746399e-01 + <_> + + 0 -1 1942 -3.7034001201391220e-02 + + -1.3216309547424316e+00 1.2966500222682953e-01 + <_> + + 0 -1 1943 -1.9633000716567039e-02 + + -8.7702298164367676e-01 1.0799999581649899e-03 + <_> + + 0 -1 1944 -2.3546999320387840e-02 + + 2.6106101274490356e-01 -2.1481400728225708e-01 + <_> + + 0 -1 1945 -4.3352998793125153e-02 + + -9.9089699983596802e-01 -9.9560003727674484e-03 + <_> + + 0 -1 1946 -2.2183999419212341e-02 + + 6.3454401493072510e-01 -5.6547001004219055e-02 + <_> + + 0 -1 1947 1.6530999913811684e-02 + + 2.4664999917149544e-02 -7.3326802253723145e-01 + <_> + + 0 -1 1948 -3.2744001597166061e-02 + + -5.6297200918197632e-01 1.6640299558639526e-01 + <_> + + 0 -1 1949 7.1415998041629791e-02 + + -3.0000001424923539e-04 -9.3286401033401489e-01 + <_> + + 0 -1 1950 8.0999999772757292e-04 + + -9.5380000770092010e-02 2.5184699892997742e-01 + <_> + + 0 -1 1951 -8.4090000018477440e-03 + + -6.5496802330017090e-01 6.7300997674465179e-02 + <_> + + 0 -1 1952 -1.7254000529646873e-02 + + -4.6492999792098999e-01 1.6070899367332458e-01 + <_> + + 0 -1 1953 -1.8641000613570213e-02 + + -1.0594010353088379e+00 -1.9617000594735146e-02 + <_> + + 0 -1 1954 -9.1979997232556343e-03 + + 5.0716197490692139e-01 -1.5339200198650360e-01 + <_> + + 0 -1 1955 1.8538000062108040e-02 + + -3.0498200654983521e-01 7.3506200313568115e-01 + <_> + + 0 -1 1956 -5.0335001200437546e-02 + + -1.1140480041503906e+00 1.8000100553035736e-01 + <_> + + 0 -1 1957 -2.3529000580310822e-02 + + -8.6907899379730225e-01 -1.2459999881684780e-02 + <_> + + 0 -1 1958 -2.7100000530481339e-02 + + 6.5942901372909546e-01 -3.5323999822139740e-02 + <_> + + 0 -1 1959 6.5879998728632927e-03 + + -2.2953400015830994e-01 4.2425099015235901e-01 + <_> + + 0 -1 1960 2.3360000923275948e-02 + + 1.8356199562549591e-01 -9.8587298393249512e-01 + <_> + + 0 -1 1961 1.2946999631822109e-02 + + -3.3147400617599487e-01 2.1323199570178986e-01 + <_> + + 0 -1 1962 -6.6559999249875546e-03 + + -1.1951400339603424e-01 2.9752799868583679e-01 + <_> + + 0 -1 1963 -2.2570999339222908e-02 + + 3.8499400019645691e-01 -2.4434499442577362e-01 + <_> + + 0 -1 1964 -6.3813999295234680e-02 + + -8.9383500814437866e-01 1.4217500388622284e-01 + <_> + + 0 -1 1965 -4.9945000559091568e-02 + + 5.3864401578903198e-01 -2.0485299825668335e-01 + <_> + + 0 -1 1966 6.8319998681545258e-03 + + -5.6678999215364456e-02 3.9970999956130981e-01 + <_> + + 0 -1 1967 -5.5835999548435211e-02 + + -1.5239470005035400e+00 -5.1183000206947327e-02 + <_> + + 0 -1 1968 3.1957000494003296e-01 + + 7.4574001133441925e-02 1.2447799444198608e+00 + <_> + + 0 -1 1969 8.0955997109413147e-02 + + -1.9665500521659851e-01 5.9889698028564453e-01 + <_> + + 0 -1 1970 -1.4911999925971031e-02 + + -6.4020597934722900e-01 1.5807600319385529e-01 + <_> + + 0 -1 1971 4.6709001064300537e-02 + + 8.5239000618457794e-02 -4.5487201213836670e-01 + <_> + + 0 -1 1972 6.0539999976754189e-03 + + -4.3184000253677368e-01 2.2452600300312042e-01 + <_> + + 0 -1 1973 -3.4375999122858047e-02 + + 4.0202501416206360e-01 -2.3903599381446838e-01 + <_> + + 0 -1 1974 -3.4924000501632690e-02 + + 5.2870100736618042e-01 3.9709001779556274e-02 + <_> + + 0 -1 1975 3.0030000489205122e-03 + + -3.8754299283027649e-01 1.4192600548267365e-01 + <_> + + 0 -1 1976 -1.4132999815046787e-02 + + 8.7528401613235474e-01 8.5507996380329132e-02 + <_> + + 0 -1 1977 -6.7940000444650650e-03 + + -1.1649219989776611e+00 -3.3943001180887222e-02 + <_> + + 0 -1 1978 -5.2886001765727997e-02 + + 1.0930680036544800e+00 5.1187001168727875e-02 + <_> + + 0 -1 1979 -2.1079999860376120e-03 + + 1.3696199655532837e-01 -3.3849999308586121e-01 + <_> + + 0 -1 1980 1.8353000283241272e-02 + + 1.3661600649356842e-01 -4.0777799487113953e-01 + <_> + + 0 -1 1981 1.2671999633312225e-02 + + -1.4936000108718872e-02 -8.1707501411437988e-01 + <_> + + 0 -1 1982 1.2924999929964542e-02 + + 1.7625099420547485e-01 -3.2491698861122131e-01 + <_> + + 0 -1 1983 -1.7921000719070435e-02 + + -5.2745401859283447e-01 4.4443000108003616e-02 + <_> + + 0 -1 1984 1.9160000374540687e-03 + + -1.0978599637746811e-01 2.2067500650882721e-01 + <_> + + 0 -1 1985 -1.4697999693453312e-02 + + 3.9067798852920532e-01 -2.2224999964237213e-01 + <_> + + 0 -1 1986 -1.4972999691963196e-02 + + -2.5450900197029114e-01 1.7790000140666962e-01 + <_> + + 0 -1 1987 1.4636999927461147e-02 + + -2.5125000625848770e-02 -8.7121301889419556e-01 + <_> + + 0 -1 1988 -1.0974000208079815e-02 + + 7.9082798957824707e-01 2.0121000707149506e-02 + <_> + + 0 -1 1989 -9.1599998995661736e-03 + + -4.7906899452209473e-01 5.2232000976800919e-02 + <_> + + 0 -1 1990 4.6179997734725475e-03 + + -1.7244599759578705e-01 3.4527799487113953e-01 + <_> + + 0 -1 1991 2.3476999253034592e-02 + + 3.7760001141577959e-03 -6.5333700180053711e-01 + <_> + + 0 -1 1992 3.1766999512910843e-02 + + 1.6364000737667084e-02 5.8723700046539307e-01 + <_> + + 0 -1 1993 -1.8419999629259109e-02 + + 1.9993899762630463e-01 -3.2056498527526855e-01 + <_> + + 0 -1 1994 1.9543999806046486e-02 + + 1.8450200557708740e-01 -2.3793600499629974e-01 + <_> + + 0 -1 1995 4.1159498691558838e-01 + + -6.0382001101970673e-02 -1.6072119474411011e+00 + <_> + + 0 -1 1996 -4.1595999151468277e-02 + + -3.2756200432777405e-01 1.5058000385761261e-01 + <_> + + 0 -1 1997 -1.0335999540984631e-02 + + -6.2394398450851440e-01 1.3112000189721584e-02 + <_> + + 0 -1 1998 1.2392999604344368e-02 + + -3.3114999532699585e-02 5.5579900741577148e-01 + <_> + + 0 -1 1999 -8.7270000949501991e-03 + + 1.9883200526237488e-01 -3.7635600566864014e-01 + <_> + + 0 -1 2000 1.6295000910758972e-02 + + 2.0373000204563141e-01 -4.2800799012184143e-01 + <_> + + 0 -1 2001 -1.0483999736607075e-02 + + -5.6847000122070312e-01 4.4199001044034958e-02 + <_> + + 0 -1 2002 -1.2431999668478966e-02 + + 7.4641901254653931e-01 4.3678998947143555e-02 + <_> + + 0 -1 2003 -5.0374999642372131e-02 + + 8.5090100765228271e-01 -1.7773799598217010e-01 + <_> + + 0 -1 2004 4.9548000097274780e-02 + + 1.6784900426864624e-01 -2.9877498745918274e-01 + <_> + + 0 -1 2005 -4.1085001081228256e-02 + + -1.3302919864654541e+00 -4.9182001501321793e-02 + <_> + + 0 -1 2006 1.0069999843835831e-03 + + -6.0538999736309052e-02 1.8483200669288635e-01 + <_> + + 0 -1 2007 -5.0142999738454819e-02 + + 7.6447701454162598e-01 -1.8356999754905701e-01 + <_> + + 0 -1 2008 -8.7879998609423637e-03 + + 2.2655999660491943e-01 -6.3156999647617340e-02 + <_> + + 0 -1 2009 -5.0170999020338058e-02 + + -1.5899070501327515e+00 -6.1255000531673431e-02 + <_> + + 0 -1 2010 1.0216099768877029e-01 + + 1.2071800231933594e-01 -1.4120110273361206e+00 + <_> + + 0 -1 2011 -1.4372999779880047e-02 + + -1.3116970062255859e+00 -5.1936000585556030e-02 + <_> + + 0 -1 2012 1.0281999595463276e-02 + + -2.1639999467879534e-03 4.4247201085090637e-01 + <_> + + 0 -1 2013 -1.1814000084996223e-02 + + 6.5378099679946899e-01 -1.8723699450492859e-01 + <_> + + 0 -1 2014 7.2114996612071991e-02 + + 7.1846999228000641e-02 8.1496298313140869e-01 + <_> + + 0 -1 2015 -1.9001999869942665e-02 + + -6.7427200078964233e-01 -4.3200000072829425e-04 + <_> + + 0 -1 2016 -4.6990001574158669e-03 + + 3.3311501145362854e-01 5.5794000625610352e-02 + <_> + + 0 -1 2017 -5.8157000690698624e-02 + + 4.5572298765182495e-01 -2.0305100083351135e-01 + <_> + + 0 -1 2018 1.1360000353306532e-03 + + -4.4686999171972275e-02 2.2681899368762970e-01 + <_> + + 0 -1 2019 -4.9414999783039093e-02 + + 2.6694598793983459e-01 -2.6116999983787537e-01 + <_> + + 0 -1 2020 -1.1913800239562988e-01 + + -8.3017998933792114e-01 1.3248500227928162e-01 + <_> + + 0 -1 2021 -1.8303999677300453e-02 + + -6.7499202489852905e-01 1.7092000693082809e-02 + <_> + + 0 -1 2022 -7.9199997708201408e-03 + + -7.2287000715732574e-02 1.4425800740718842e-01 + <_> + + 0 -1 2023 5.1925998181104660e-02 + + 3.0921999365091324e-02 -5.5860602855682373e-01 + <_> + + 0 -1 2024 6.6724002361297607e-02 + + 1.3666400313377380e-01 -2.9411000013351440e-01 + <_> + + 0 -1 2025 -1.3778000138700008e-02 + + -5.9443902969360352e-01 1.5300000086426735e-02 + <_> + + 0 -1 2026 -1.7760999500751495e-02 + + 4.0496501326560974e-01 -3.3559999428689480e-03 + <_> + + 0 -1 2027 -4.2234998196363449e-02 + + -1.0897940397262573e+00 -4.0224999189376831e-02 + <_> + + 0 -1 2028 -1.3524999842047691e-02 + + 2.8921899199485779e-01 -2.5194799900054932e-01 + <_> + + 0 -1 2029 -1.1106000281870365e-02 + + 6.5312802791595459e-01 -1.8053700029850006e-01 + <_> + + 0 -1 2030 -1.2284599989652634e-01 + + -1.9570649862289429e+00 1.4815400540828705e-01 + <_> + + 0 -1 2031 4.7715999186038971e-02 + + -2.2875599563121796e-01 3.4233701229095459e-01 + <_> + + 0 -1 2032 3.1817000359296799e-02 + + 1.5976299345493317e-01 -1.0091969966888428e+00 + <_> + + 0 -1 2033 4.2570000514388084e-03 + + -3.8881298899650574e-01 8.4210000932216644e-02 + <_> + + 0 -1 2034 -6.1372999101877213e-02 + + 1.7152810096740723e+00 5.9324998408555984e-02 + <_> + + 0 -1 2035 -2.7030000928789377e-03 + + -3.8161700963973999e-01 8.5127003490924835e-02 + <_> + + 0 -1 2036 -6.8544000387191772e-02 + + -3.0925889015197754e+00 1.1788000166416168e-01 + <_> + + 0 -1 2037 1.0372500121593475e-01 + + -1.3769300282001495e-01 1.9009410142898560e+00 + <_> + + 0 -1 2038 1.5799000859260559e-02 + + -6.2660001218318939e-02 2.5917699933052063e-01 + <_> + + 0 -1 2039 -9.8040001466870308e-03 + + -5.6291598081588745e-01 4.3923001736402512e-02 + <_> + + 0 -1 2040 -9.0229995548725128e-03 + + 2.5287100672721863e-01 -4.1225999593734741e-02 + <_> + + 0 -1 2041 -6.3754998147487640e-02 + + -2.6178569793701172e+00 -7.4005998671054840e-02 + <_> + + 0 -1 2042 3.8954999297857285e-02 + + 5.9032998979091644e-02 8.5945600271224976e-01 + <_> + + 0 -1 2043 -3.9802998304367065e-02 + + 9.3600499629974365e-01 -1.5639400482177734e-01 + <_> + + 0 -1 2044 5.0301998853683472e-02 + + 1.3725900650024414e-01 -2.5549728870391846e+00 + <_> + + 0 -1 2045 4.6250000596046448e-02 + + -1.3964000158011913e-02 -7.1026200056076050e-01 + <_> + + 0 -1 2046 6.2196001410484314e-02 + + 5.9526000171899796e-02 1.6509100198745728e+00 + <_> + + 0 -1 2047 -6.4776003360748291e-02 + + 7.1368998289108276e-01 -1.7270000278949738e-01 + <_> + + 0 -1 2048 2.7522999793291092e-02 + + 1.4631600677967072e-01 -8.1428997218608856e-02 + <_> + + 0 -1 2049 3.9900001138448715e-04 + + -3.7144500017166138e-01 1.0152699798345566e-01 + <_> + + 0 -1 2050 -4.3299999088048935e-03 + + -2.3756299912929535e-01 2.6798400282859802e-01 + <_> + + 0 -1 2051 4.7297000885009766e-02 + + -2.7682000771164894e-02 -8.4910297393798828e-01 + <_> + + 0 -1 2052 1.2508999556303024e-02 + + 1.8730199337005615e-01 -5.6001102924346924e-01 + <_> + + 0 -1 2053 4.5899000018835068e-02 + + -1.5601199865341187e-01 9.7073000669479370e-01 + <_> + + 0 -1 2054 1.9853399693965912e-01 + + 1.4895500242710114e-01 -1.1015529632568359e+00 + <_> + + 0 -1 2055 1.6674999147653580e-02 + + -1.6615299880504608e-01 8.2210999727249146e-01 + <_> + + 0 -1 2056 1.9829999655485153e-03 + + -7.1249999105930328e-02 2.8810900449752808e-01 + <_> + + 0 -1 2057 2.2447999566793442e-02 + + -2.0981000736355782e-02 -7.8416502475738525e-01 + <_> + + 0 -1 2058 -1.3913000002503395e-02 + + -1.8165799975395203e-01 2.0491799712181091e-01 + <_> + + 0 -1 2059 -7.7659999951720238e-03 + + -4.5595899224281311e-01 6.3576996326446533e-02 + <_> + + 0 -1 2060 -1.3209000229835510e-02 + + 2.6632300019264221e-01 -1.7795999348163605e-01 + <_> + + 0 -1 2061 4.9052998423576355e-02 + + -1.5476800501346588e-01 1.1069979667663574e+00 + <_> + + 0 -1 2062 2.0263999700546265e-02 + + 6.8915002048015594e-02 6.9867497682571411e-01 + <_> + + 0 -1 2063 -1.6828000545501709e-02 + + 2.7607199549674988e-01 -2.5139200687408447e-01 + <_> + + 0 -1 2064 -1.6939499974250793e-01 + + -3.0767529010772705e+00 1.1617500334978104e-01 + <_> + + 0 -1 2065 -1.1336100101470947e-01 + + -1.4639229774475098e+00 -5.1447000354528427e-02 + <_> + + 0 -1 2066 -7.7685996890068054e-02 + + 8.8430202007293701e-01 4.3306998908519745e-02 + <_> + + 0 -1 2067 -1.5568000264465809e-02 + + 1.3672499358654022e-01 -3.4505501389503479e-01 + <_> + + 0 -1 2068 -6.6018998622894287e-02 + + -1.0300110578536987e+00 1.1601399630308151e-01 + <_> + + 0 -1 2069 8.3699999377131462e-03 + + 7.6429001986980438e-02 -4.4002500176429749e-01 + <_> + + 0 -1 2070 3.5402998328208923e-02 + + 1.1979500204324722e-01 -7.2668302059173584e-01 + <_> + + 0 -1 2071 -3.9051000028848648e-02 + + 6.7375302314758301e-01 -1.8196000158786774e-01 + <_> + + 0 -1 2072 -9.7899995744228363e-03 + + 2.1264599263668060e-01 3.6756001412868500e-02 + <_> + + 0 -1 2073 -2.3047000169754028e-02 + + 4.4742199778556824e-01 -2.0986700057983398e-01 + <_> + + 0 -1 2074 3.1169999856501818e-03 + + 3.7544000893831253e-02 2.7808201313018799e-01 + <_> + + 0 -1 2075 1.3136000372469425e-02 + + -1.9842399656772614e-01 5.4335701465606689e-01 + <_> + + 0 -1 2076 1.4782000333070755e-02 + + 1.3530600070953369e-01 -1.1153600364923477e-01 + <_> + + 0 -1 2077 -6.0139000415802002e-02 + + 8.4039300680160522e-01 -1.6711600124835968e-01 + <_> + + 0 -1 2078 5.1998998969793320e-02 + + 1.7372000217437744e-01 -7.8547602891921997e-01 + <_> + + 0 -1 2079 2.4792000651359558e-02 + + -1.7739200592041016e-01 6.6752600669860840e-01 + <_> + + 0 -1 2080 -1.2014999985694885e-02 + + -1.4263699948787689e-01 1.6070500016212463e-01 + <_> + + 0 -1 2081 -9.8655998706817627e-02 + + 1.0429769754409790e+00 -1.5770199894905090e-01 + <_> + + 0 -1 2082 1.1758299916982651e-01 + + 1.0955700278282166e-01 -4.4920377731323242e+00 + <_> + + 0 -1 2083 -1.8922999501228333e-02 + + -7.8543400764465332e-01 1.2984000146389008e-02 + <_> + + 0 -1 2084 -2.8390999883413315e-02 + + -6.0569900274276733e-01 1.2903499603271484e-01 + <_> + + 0 -1 2085 1.3182999566197395e-02 + + -1.4415999874472618e-02 -7.3210501670837402e-01 + <_> + + 0 -1 2086 -1.1653000116348267e-01 + + -2.0442469120025635e+00 1.4053100347518921e-01 + <_> + + 0 -1 2087 -3.8880000356584787e-03 + + -4.1861599683761597e-01 7.8704997897148132e-02 + <_> + + 0 -1 2088 3.1229000538587570e-02 + + 2.4632999673485756e-02 4.1870400309562683e-01 + <_> + + 0 -1 2089 2.5198999792337418e-02 + + -1.7557799816131592e-01 6.4710599184036255e-01 + <_> + + 0 -1 2090 -2.8124000877141953e-02 + + -2.2005599737167358e-01 1.4121000468730927e-01 + <_> + + 0 -1 2091 3.6499001085758209e-02 + + -6.8426996469497681e-02 -2.3410849571228027e+00 + <_> + + 0 -1 2092 -7.2292998433113098e-02 + + 1.2898750305175781e+00 8.4875002503395081e-02 + <_> + + 0 -1 2093 -4.1671000421047211e-02 + + -1.1630970239639282e+00 -5.3752999752759933e-02 + <_> + + 0 -1 2094 4.7703001648187637e-02 + + 7.0101000368595123e-02 7.3676502704620361e-01 + <_> + + 0 -1 2095 6.5793000161647797e-02 + + -1.7755299806594849e-01 6.9780498743057251e-01 + <_> + + 0 -1 2096 1.3904999941587448e-02 + + 2.1936799585819244e-01 -2.0390799641609192e-01 + <_> + + 0 -1 2097 -2.7730999514460564e-02 + + 6.1867898702621460e-01 -1.7804099619388580e-01 + <_> + + 0 -1 2098 -1.5879999846220016e-02 + + -4.6484100818634033e-01 1.8828600645065308e-01 + <_> + + 0 -1 2099 7.4128001928329468e-02 + + -1.2858100235462189e-01 3.2792479991912842e+00 + <_> + + 0 -1 2100 -8.9000002481043339e-04 + + -3.0117601156234741e-01 2.3818799853324890e-01 + <_> + + 0 -1 2101 1.7965000122785568e-02 + + -2.2284999489784241e-01 2.9954001307487488e-01 + <_> + + 0 -1 2102 -2.5380000006407499e-03 + + 2.5064399838447571e-01 -1.3665600121021271e-01 + <_> + + 0 -1 2103 -9.0680001303553581e-03 + + 2.9017499089241028e-01 -2.8929701447486877e-01 + <_> + + 0 -1 2104 4.9169998615980148e-02 + + 1.9156399369239807e-01 -6.8328702449798584e-01 + <_> + + 0 -1 2105 -3.0680999159812927e-02 + + -7.5677001476287842e-01 -1.3279999606311321e-02 + <_> + + 0 -1 2106 1.0017400234937668e-01 + + 8.4453999996185303e-02 1.0888710021972656e+00 + <_> + + 0 -1 2107 3.1950001139193773e-03 + + -2.6919400691986084e-01 1.9537900388240814e-01 + <_> + + 0 -1 2108 3.5503000020980835e-02 + + 1.3632300496101379e-01 -5.6917202472686768e-01 + <_> + + 0 -1 2109 4.5900000259280205e-04 + + -4.0443998575210571e-01 1.4074799418449402e-01 + <_> + + 0 -1 2110 2.5258999317884445e-02 + + 1.6243200004100800e-01 -5.5741798877716064e-01 + <_> + + 0 -1 2111 -5.1549999043345451e-03 + + 3.1132599711418152e-01 -2.2756099700927734e-01 + <_> + + 0 -1 2112 1.5869999770075083e-03 + + -2.6867699623107910e-01 1.9565400481224060e-01 + <_> + + 0 -1 2113 -1.6204999759793282e-02 + + 1.5486499667167664e-01 -3.4057798981666565e-01 + <_> + + 0 -1 2114 -2.9624000191688538e-02 + + 1.1466799974441528e+00 9.0557999908924103e-02 + <_> + + 0 -1 2115 -1.5930000226944685e-03 + + -7.1257501840591431e-01 -7.0400000549852848e-04 + <_> + + 0 -1 2116 -5.4019000381231308e-02 + + 4.1537499427795410e-01 2.7246000245213509e-02 + <_> + + 0 -1 2117 -6.6211000084877014e-02 + + -1.3340090513229370e+00 -4.7352999448776245e-02 + <_> + + 0 -1 2118 2.7940999716520309e-02 + + 1.4446300268173218e-01 -5.1518398523330688e-01 + <_> + + 0 -1 2119 2.8957000002264977e-02 + + -4.9966000020503998e-02 -1.1929039955139160e+00 + <_> + + 0 -1 2120 -2.0424999296665192e-02 + + 6.3881301879882812e-01 3.8141001015901566e-02 + <_> + + 0 -1 2121 1.2416999787092209e-02 + + -2.1547000110149384e-01 4.9477699398994446e-01 + <_> + 181 + -3.3196411132812500e+00 + + <_> + + 0 -1 2122 4.3274000287055969e-02 + + -8.0494397878646851e-01 3.9897298812866211e-01 + <_> + + 0 -1 2123 1.8615500628948212e-01 + + -3.1655299663543701e-01 6.8877297639846802e-01 + <_> + + 0 -1 2124 3.1860999763011932e-02 + + -6.4266198873519897e-01 2.5550898909568787e-01 + <_> + + 0 -1 2125 1.4022000133991241e-02 + + -4.5926600694656372e-01 3.1171199679374695e-01 + <_> + + 0 -1 2126 -6.3029997982084751e-03 + + 4.6026900410652161e-01 -2.7438500523567200e-01 + <_> + + 0 -1 2127 -5.4310001432895660e-03 + + 3.6608600616455078e-01 -2.7205801010131836e-01 + <_> + + 0 -1 2128 1.6822999343276024e-02 + + 2.3476999253034592e-02 -8.8443797826766968e-01 + <_> + + 0 -1 2129 2.6039000600576401e-02 + + 1.7488799989223480e-01 -5.4564702510833740e-01 + <_> + + 0 -1 2130 -2.6720000430941582e-02 + + -9.6396499872207642e-01 2.3524999618530273e-02 + <_> + + 0 -1 2131 -1.7041999846696854e-02 + + -7.0848798751831055e-01 2.1468099951744080e-01 + <_> + + 0 -1 2132 5.9569999575614929e-03 + + 7.3601000010967255e-02 -6.8225598335266113e-01 + <_> + + 0 -1 2133 -2.8679999522864819e-03 + + -7.4935001134872437e-01 2.3803399503231049e-01 + <_> + + 0 -1 2134 -4.3774999678134918e-02 + + 6.8323302268981934e-01 -2.1380299329757690e-01 + <_> + + 0 -1 2135 5.1633000373840332e-02 + + -1.2566499412059784e-01 6.7523801326751709e-01 + <_> + + 0 -1 2136 8.1780003383755684e-03 + + 7.0689998567104340e-02 -8.0665898323059082e-01 + <_> + + 0 -1 2137 -5.2841998636722565e-02 + + 9.5433902740478516e-01 1.6548000276088715e-02 + <_> + + 0 -1 2138 5.2583999931812286e-02 + + -2.8414401412010193e-01 4.7129800915718079e-01 + <_> + + 0 -1 2139 -1.2659000232815742e-02 + + 3.8445401191711426e-01 -6.2288001179695129e-02 + <_> + + 0 -1 2140 1.1694000102579594e-02 + + 5.6000000768108293e-05 -1.0173139572143555e+00 + <_> + + 0 -1 2141 -2.3918999359011650e-02 + + 8.4921300411224365e-01 5.7399999350309372e-03 + <_> + + 0 -1 2142 -6.1673998832702637e-02 + + -9.2571401596069336e-01 -1.7679999582469463e-03 + <_> + + 0 -1 2143 -1.8279999494552612e-03 + + -5.4372298717498779e-01 2.4932399392127991e-01 + <_> + + 0 -1 2144 3.5257998853921890e-02 + + -7.3719997890293598e-03 -9.3963998556137085e-01 + <_> + + 0 -1 2145 -1.8438000231981277e-02 + + 7.2136700153350830e-01 1.0491999797523022e-02 + <_> + + 0 -1 2146 -3.8389001041650772e-02 + + 1.9272600114345551e-01 -3.5832101106643677e-01 + <_> + + 0 -1 2147 9.9720999598503113e-02 + + 1.1354199796915054e-01 -1.6304190158843994e+00 + <_> + + 0 -1 2148 8.4462001919746399e-02 + + -5.3420998156070709e-02 -1.6981120109558105e+00 + <_> + + 0 -1 2149 4.0270000696182251e-02 + + -1.0783199965953827e-01 5.1926600933074951e-01 + <_> + + 0 -1 2150 5.8935999870300293e-02 + + -1.8053700029850006e-01 9.5119798183441162e-01 + <_> + + 0 -1 2151 1.4957000315189362e-01 + + 1.6785299777984619e-01 -1.1591869592666626e+00 + <_> + + 0 -1 2152 6.9399998756125569e-04 + + 2.0491400361061096e-01 -3.3118200302124023e-01 + <_> + + 0 -1 2153 -3.3369001001119614e-02 + + 9.3468099832534790e-01 -2.9639999847859144e-03 + <_> + + 0 -1 2154 9.3759996816515923e-03 + + 3.7000000011175871e-03 -7.7549797296524048e-01 + <_> + + 0 -1 2155 4.3193999677896500e-02 + + -2.2040000185370445e-03 7.4589699506759644e-01 + <_> + + 0 -1 2156 -6.7555002868175507e-02 + + 7.2292101383209229e-01 -1.8404200673103333e-01 + <_> + + 0 -1 2157 -3.1168600916862488e-01 + + 1.0014270544052124e+00 3.4003000706434250e-02 + <_> + + 0 -1 2158 2.9743999242782593e-02 + + -4.6356000006198883e-02 -1.2781809568405151e+00 + <_> + + 0 -1 2159 1.0737000033259392e-02 + + 1.4812000095844269e-02 6.6649997234344482e-01 + <_> + + 0 -1 2160 -2.8841000050306320e-02 + + -9.4222599267959595e-01 -2.0796999335289001e-02 + <_> + + 0 -1 2161 -5.7649998925626278e-03 + + -4.3541899323463440e-01 2.3386000096797943e-01 + <_> + + 0 -1 2162 2.8410999104380608e-02 + + -1.7615799605846405e-01 8.5765302181243896e-01 + <_> + + 0 -1 2163 -2.9007999226450920e-02 + + 5.7978099584579468e-01 2.8565999120473862e-02 + <_> + + 0 -1 2164 2.4965999647974968e-02 + + -2.2729000076651573e-02 -9.6773099899291992e-01 + <_> + + 0 -1 2165 1.2036000378429890e-02 + + -1.4214700460433960e-01 5.1687997579574585e-01 + <_> + + 0 -1 2166 -4.2514000087976456e-02 + + 9.7273802757263184e-01 -1.8119800090789795e-01 + <_> + + 0 -1 2167 1.0276000015437603e-02 + + -8.3099998533725739e-02 3.1762799620628357e-01 + <_> + + 0 -1 2168 -6.9191999733448029e-02 + + -2.0668580532073975e+00 -6.0173999518156052e-02 + <_> + + 0 -1 2169 -4.6769999898970127e-03 + + 4.4131800532341003e-01 2.3209000006318092e-02 + <_> + + 0 -1 2170 -1.3923999853432178e-02 + + 2.8606700897216797e-01 -2.9152700304985046e-01 + <_> + + 0 -1 2171 -1.5333999879658222e-02 + + -5.7414501905441284e-01 2.3063300549983978e-01 + <_> + + 0 -1 2172 -1.0239000432193279e-02 + + 3.4479200839996338e-01 -2.6080399751663208e-01 + <_> + + 0 -1 2173 -5.0988998264074326e-02 + + 5.6154102087020874e-01 6.1218999326229095e-02 + <_> + + 0 -1 2174 3.0689999461174011e-02 + + -1.4772799611091614e-01 1.6378489732742310e+00 + <_> + + 0 -1 2175 -1.1223999783396721e-02 + + 2.4006199836730957e-01 -4.4864898920059204e-01 + <_> + + 0 -1 2176 -6.2899999320507050e-03 + + 4.3119499087333679e-01 -2.3808999359607697e-01 + <_> + + 0 -1 2177 7.8590996563434601e-02 + + 1.9865000620484352e-02 8.0853801965713501e-01 + <_> + + 0 -1 2178 -1.0178999975323677e-02 + + 1.8193200230598450e-01 -3.2877799868583679e-01 + <_> + + 0 -1 2179 3.1227000057697296e-02 + + 1.4973899722099304e-01 -1.4180339574813843e+00 + <_> + + 0 -1 2180 4.0196999907493591e-02 + + -1.9760499894618988e-01 5.8508199453353882e-01 + <_> + + 0 -1 2181 1.6138000413775444e-02 + + 5.0000002374872565e-04 3.9050000905990601e-01 + <_> + + 0 -1 2182 -4.5519001781940460e-02 + + 1.2646820545196533e+00 -1.5632599592208862e-01 + <_> + + 0 -1 2183 -1.8130000680685043e-02 + + 6.5148502588272095e-01 1.0235999710857868e-02 + <_> + + 0 -1 2184 -1.4001999981701374e-02 + + -1.0344820022583008e+00 -3.2182998955249786e-02 + <_> + + 0 -1 2185 -3.8816001266241074e-02 + + -4.7874298691749573e-01 1.6290700435638428e-01 + <_> + + 0 -1 2186 3.1656000763177872e-02 + + -2.0983399450778961e-01 5.4575902223587036e-01 + <_> + + 0 -1 2187 -1.0839999653398991e-02 + + 5.1898801326751709e-01 -1.5080000273883343e-02 + <_> + + 0 -1 2188 1.2032999657094479e-02 + + -2.1107600629329681e-01 7.5937002897262573e-01 + <_> + + 0 -1 2189 7.0772998034954071e-02 + + 1.8048800528049469e-01 -7.4048501253128052e-01 + <_> + + 0 -1 2190 5.3139799833297729e-01 + + -1.4491699635982513e-01 1.5360039472579956e+00 + <_> + + 0 -1 2191 -1.4774000272154808e-02 + + -2.8153699636459351e-01 2.0407299697399139e-01 + <_> + + 0 -1 2192 -2.2410000674426556e-03 + + -4.4876301288604736e-01 5.3989000618457794e-02 + <_> + + 0 -1 2193 4.9968000501394272e-02 + + 4.1514001786708832e-02 2.9417100548744202e-01 + <_> + + 0 -1 2194 -4.7701999545097351e-02 + + 3.9674299955368042e-01 -2.8301799297332764e-01 + <_> + + 0 -1 2195 -9.1311000287532806e-02 + + 2.1994259357452393e+00 8.7964996695518494e-02 + <_> + + 0 -1 2196 3.8070000708103180e-02 + + -2.8025600314140320e-01 2.5156199932098389e-01 + <_> + + 0 -1 2197 -1.5538999810814857e-02 + + 3.4157499670982361e-01 1.7924999818205833e-02 + <_> + + 0 -1 2198 -1.5445999801158905e-02 + + 2.8680199384689331e-01 -2.5135898590087891e-01 + <_> + + 0 -1 2199 -5.7388000190258026e-02 + + 6.3830000162124634e-01 8.8597998023033142e-02 + <_> + + 0 -1 2200 -5.9440000914037228e-03 + + 7.9016998410224915e-02 -4.0774899721145630e-01 + <_> + + 0 -1 2201 -6.9968998432159424e-02 + + -4.4644200801849365e-01 1.7219600081443787e-01 + <_> + + 0 -1 2202 -2.5064999237656593e-02 + + -9.8270201683044434e-01 -3.5388000309467316e-02 + <_> + + 0 -1 2203 1.7216000705957413e-02 + + 2.2705900669097900e-01 -8.0550098419189453e-01 + <_> + + 0 -1 2204 -4.4279001653194427e-02 + + 8.3951997756958008e-01 -1.7429600656032562e-01 + <_> + + 0 -1 2205 4.3988998979330063e-02 + + 1.1557199805974960e-01 -1.9666889905929565e+00 + <_> + + 0 -1 2206 1.5907000750303268e-02 + + -3.7576001137495041e-02 -1.0311100482940674e+00 + <_> + + 0 -1 2207 -9.2754997313022614e-02 + + -1.3530019521713257e+00 1.2141299992799759e-01 + <_> + + 0 -1 2208 7.1037001907825470e-02 + + -1.7684300243854523e-01 7.4485200643539429e-01 + <_> + + 0 -1 2209 5.7762000709772110e-02 + + 1.2835599482059479e-01 -4.4444200396537781e-01 + <_> + + 0 -1 2210 -1.6432000324130058e-02 + + 8.0152702331542969e-01 -1.7491699755191803e-01 + <_> + + 0 -1 2211 2.3939000442624092e-02 + + 1.6144999861717224e-01 -1.2364500015974045e-01 + <_> + + 0 -1 2212 1.2636000290513039e-02 + + 1.5411999821662903e-01 -3.3293798565864563e-01 + <_> + + 0 -1 2213 -5.4347999393939972e-02 + + -1.8400700092315674e+00 1.4835999906063080e-01 + <_> + + 0 -1 2214 -1.3261999934911728e-02 + + -8.0838799476623535e-01 -2.7726000174880028e-02 + <_> + + 0 -1 2215 6.1340001411736012e-03 + + -1.3785000145435333e-01 3.2858499884605408e-01 + <_> + + 0 -1 2216 2.8991000726819038e-02 + + -2.5516999885439873e-02 -8.3387202024459839e-01 + <_> + + 0 -1 2217 -2.1986000239849091e-02 + + -7.3739999532699585e-01 1.7887100577354431e-01 + <_> + + 0 -1 2218 5.3269998170435429e-03 + + -4.5449298620223999e-01 6.8791002035140991e-02 + <_> + + 0 -1 2219 8.6047999560832977e-02 + + 2.1008500456809998e-01 -3.7808901071548462e-01 + <_> + + 0 -1 2220 -8.5549997165799141e-03 + + 4.0134999155998230e-01 -2.1074099838733673e-01 + <_> + + 0 -1 2221 6.7790001630783081e-03 + + -2.1648999303579330e-02 4.5421499013900757e-01 + <_> + + 0 -1 2222 -6.3959998078644276e-03 + + -4.9818599224090576e-01 7.5907997786998749e-02 + <_> + + 0 -1 2223 8.9469999074935913e-03 + + 1.7857700586318970e-01 -2.8454899787902832e-01 + <_> + + 0 -1 2224 3.2589999027550220e-03 + + 4.6624999493360519e-02 -5.5206298828125000e-01 + <_> + + 0 -1 2225 4.1476998478174210e-02 + + 1.7550499737262726e-01 -2.0703999698162079e-01 + <_> + + 0 -1 2226 -6.7449999041855335e-03 + + -4.6392598748207092e-01 6.9303996860980988e-02 + <_> + + 0 -1 2227 3.0564999207854271e-02 + + 5.1734998822212219e-02 7.5550502538681030e-01 + <_> + + 0 -1 2228 -7.4780001305043697e-03 + + 1.4893899857997894e-01 -3.1906801462173462e-01 + <_> + + 0 -1 2229 8.9088998734951019e-02 + + 1.3738800585269928e-01 -1.1379710435867310e+00 + <_> + + 0 -1 2230 7.3230001144111156e-03 + + -2.8829199075698853e-01 1.9088600575923920e-01 + <_> + + 0 -1 2231 -1.8205000087618828e-02 + + -3.0178600549697876e-01 1.6795800626277924e-01 + <_> + + 0 -1 2232 -2.5828000158071518e-02 + + -9.8137998580932617e-01 -1.9860999658703804e-02 + <_> + + 0 -1 2233 1.0936199873685837e-01 + + 4.8790000379085541e-02 5.3118300437927246e-01 + <_> + + 0 -1 2234 -1.1424999684095383e-02 + + 2.3705999553203583e-01 -2.7925300598144531e-01 + <_> + + 0 -1 2235 -5.7565998286008835e-02 + + 4.7255399823188782e-01 6.5171003341674805e-02 + <_> + + 0 -1 2236 1.0278300195932388e-01 + + -2.0765100419521332e-01 5.0947701930999756e-01 + <_> + + 0 -1 2237 2.7041999623179436e-02 + + 1.6421200335025787e-01 -1.4508620500564575e+00 + <_> + + 0 -1 2238 -1.3635000213980675e-02 + + -5.6543898582458496e-01 2.3788999766111374e-02 + <_> + + 0 -1 2239 -3.2158198952674866e-01 + + -3.5602829456329346e+00 1.1801300197839737e-01 + <_> + + 0 -1 2240 2.0458100736141205e-01 + + -3.7016000598669052e-02 -1.0225499868392944e+00 + <_> + + 0 -1 2241 -7.0347003638744354e-02 + + -5.6491899490356445e-01 1.8525199592113495e-01 + <_> + + 0 -1 2242 3.7831000983715057e-02 + + -2.9901999980211258e-02 -8.2921499013900757e-01 + <_> + + 0 -1 2243 -7.0298001170158386e-02 + + -5.3172302246093750e-01 1.4430199563503265e-01 + <_> + + 0 -1 2244 6.3221000134944916e-02 + + -2.2041200101375580e-01 4.7952198982238770e-01 + <_> + + 0 -1 2245 3.6393001675605774e-02 + + 1.4222699403762817e-01 -6.1193901300430298e-01 + <_> + + 0 -1 2246 4.0099998004734516e-03 + + -3.4560799598693848e-01 1.1738699674606323e-01 + <_> + + 0 -1 2247 -4.9106001853942871e-02 + + 9.5984101295471191e-01 6.4934998750686646e-02 + <_> + + 0 -1 2248 -7.1583002805709839e-02 + + 1.7385669946670532e+00 -1.4252899587154388e-01 + <_> + + 0 -1 2249 -3.8008999079465866e-02 + + 1.3872820138931274e+00 6.6188000142574310e-02 + <_> + + 0 -1 2250 -3.1570000573992729e-03 + + 5.3677000105381012e-02 -5.4048001766204834e-01 + <_> + + 0 -1 2251 1.9458999857306480e-02 + + -9.3620002269744873e-02 3.9131000638008118e-01 + <_> + + 0 -1 2252 1.1293999850749969e-02 + + 3.7223998457193375e-02 -5.4251801967620850e-01 + <_> + + 0 -1 2253 -3.3495001494884491e-02 + + 9.5307898521423340e-01 3.7696998566389084e-02 + <_> + + 0 -1 2254 9.2035003006458282e-02 + + -1.3488399982452393e-01 2.2897069454193115e+00 + <_> + + 0 -1 2255 3.7529999390244484e-03 + + 2.2824199497699738e-01 -5.9983700513839722e-01 + <_> + + 0 -1 2256 1.2848000042140484e-02 + + -2.2005200386047363e-01 3.7221899628639221e-01 + <_> + + 0 -1 2257 -1.4316199719905853e-01 + + 1.2855789661407471e+00 4.7237001359462738e-02 + <_> + + 0 -1 2258 -9.6879996359348297e-02 + + -3.9550929069519043e+00 -7.2903998196125031e-02 + <_> + + 0 -1 2259 -8.8459998369216919e-03 + + 3.7674999237060547e-01 -4.6484000980854034e-02 + <_> + + 0 -1 2260 1.5900000929832458e-02 + + -2.4457000195980072e-02 -8.0034798383712769e-01 + <_> + + 0 -1 2261 7.0372000336647034e-02 + + 1.7019000649452209e-01 -6.3068997859954834e-01 + <_> + + 0 -1 2262 -3.7953998893499374e-02 + + -9.3667197227478027e-01 -4.1214000433683395e-02 + <_> + + 0 -1 2263 5.1597899198532104e-01 + + 1.3080599904060364e-01 -1.5802290439605713e+00 + <_> + + 0 -1 2264 -3.2843001186847687e-02 + + -1.1441620588302612e+00 -4.9173999577760696e-02 + <_> + + 0 -1 2265 -3.6357000470161438e-02 + + 4.9606400728225708e-01 -3.4458998590707779e-02 + <_> + + 0 -1 2266 6.8080001510679722e-03 + + -3.0997800827026367e-01 1.7054800689220428e-01 + <_> + + 0 -1 2267 -1.6114000231027603e-02 + + -3.7904599308967590e-01 1.6078999638557434e-01 + <_> + + 0 -1 2268 8.4530003368854523e-03 + + -1.8655499815940857e-01 5.6367701292037964e-01 + <_> + + 0 -1 2269 -1.3752399384975433e-01 + + -5.8989900350570679e-01 1.1749500036239624e-01 + <_> + + 0 -1 2270 1.7688000202178955e-01 + + -1.5424899756908417e-01 9.2911100387573242e-01 + <_> + + 0 -1 2271 7.9309996217489243e-03 + + 3.2190701365470886e-01 -1.6392600536346436e-01 + <_> + + 0 -1 2272 1.0971800237894058e-01 + + -1.5876500308513641e-01 1.0186259746551514e+00 + <_> + + 0 -1 2273 -3.0293000862002373e-02 + + 7.5587302446365356e-01 3.1794998794794083e-02 + <_> + + 0 -1 2274 -2.3118000477552414e-02 + + -8.8451498746871948e-01 -9.5039997249841690e-03 + <_> + + 0 -1 2275 -3.0900000128895044e-03 + + 2.3838299512863159e-01 -1.1606200039386749e-01 + <_> + + 0 -1 2276 -3.3392000943422318e-02 + + -1.8738139867782593e+00 -6.8502999842166901e-02 + <_> + + 0 -1 2277 1.3190000317990780e-02 + + 1.2919899821281433e-01 -6.7512202262878418e-01 + <_> + + 0 -1 2278 1.4661000110208988e-02 + + -2.4829000234603882e-02 -7.4396800994873047e-01 + <_> + + 0 -1 2279 -1.3248000293970108e-02 + + 4.6820199489593506e-01 -2.4165000766515732e-02 + <_> + + 0 -1 2280 -1.6218999400734901e-02 + + 4.0083798766136169e-01 -2.1255700290203094e-01 + <_> + + 0 -1 2281 -2.9052000492811203e-02 + + -1.5650019645690918e+00 1.4375899732112885e-01 + <_> + + 0 -1 2282 -1.0153199732303619e-01 + + -1.9220689535140991e+00 -6.9559998810291290e-02 + <_> + + 0 -1 2283 3.7753999233245850e-02 + + 1.3396799564361572e-01 -2.2639141082763672e+00 + <_> + + 0 -1 2284 -2.8555598855018616e-01 + + 1.0215270519256592e+00 -1.5232199430465698e-01 + <_> + + 0 -1 2285 1.5360699594020844e-01 + + -9.7409002482891083e-02 4.1662400960922241e-01 + <_> + + 0 -1 2286 -2.1199999901000410e-04 + + 1.1271899938583374e-01 -4.1653999686241150e-01 + <_> + + 0 -1 2287 -2.0597999915480614e-02 + + 6.0540497303009033e-01 6.2467999756336212e-02 + <_> + + 0 -1 2288 3.7353999912738800e-02 + + -1.8919000029563904e-01 4.6464699506759644e-01 + <_> + + 0 -1 2289 5.7275000959634781e-02 + + 1.1565300077199936e-01 -1.3213009834289551e+00 + <_> + + 0 -1 2290 5.1029999740421772e-03 + + -2.8061500191688538e-01 1.9313399493694305e-01 + <_> + + 0 -1 2291 -5.4644998162984848e-02 + + 7.2428500652313232e-01 7.5447998940944672e-02 + <_> + + 0 -1 2292 2.5349000468850136e-02 + + -1.9481800496578217e-01 4.6032801270484924e-01 + <_> + + 0 -1 2293 2.4311000481247902e-02 + + 1.5564100444316864e-01 -4.9913901090621948e-01 + <_> + + 0 -1 2294 3.5962000489234924e-02 + + -5.8573000133037567e-02 -1.5418399572372437e+00 + <_> + + 0 -1 2295 -1.0000699758529663e-01 + + -1.6100039482116699e+00 1.1450500041246414e-01 + <_> + + 0 -1 2296 8.4435999393463135e-02 + + -6.1406999826431274e-02 -1.4673349857330322e+00 + <_> + + 0 -1 2297 1.5947999432682991e-02 + + 1.6287900507450104e-01 -1.1026400327682495e-01 + <_> + + 0 -1 2298 3.3824000507593155e-02 + + -1.7932699620723724e-01 5.7218402624130249e-01 + <_> + + 0 -1 2299 -6.1996001750230789e-02 + + 4.6511812210083008e+00 9.4534002244472504e-02 + <_> + + 0 -1 2300 6.9876998662948608e-02 + + -1.6985900700092316e-01 8.7028998136520386e-01 + <_> + + 0 -1 2301 -2.7916999533772469e-02 + + 9.1042500734329224e-01 5.6827001273632050e-02 + <_> + + 0 -1 2302 -1.2764000333845615e-02 + + 2.2066700458526611e-01 -2.7769100666046143e-01 + <_> + 199 + -3.2573320865631104e+00 + + <_> + + 0 -1 2303 2.1662000566720963e-02 + + -8.9868897199630737e-01 2.9436299204826355e-01 + <_> + + 0 -1 2304 1.0044500231742859e-01 + + -3.7659201025962830e-01 6.0891002416610718e-01 + <_> + + 0 -1 2305 2.6003999635577202e-02 + + -3.8128501176834106e-01 3.9217400550842285e-01 + <_> + + 0 -1 2306 2.8441000729799271e-02 + + -1.8182300031185150e-01 5.8927202224731445e-01 + <_> + + 0 -1 2307 3.8612000644207001e-02 + + -2.2399599850177765e-01 6.3779997825622559e-01 + <_> + + 0 -1 2308 -4.6594999730587006e-02 + + 7.0812201499938965e-01 -1.4666199684143066e-01 + <_> + + 0 -1 2309 -4.2791999876499176e-02 + + 4.7680398821830750e-01 -2.9233199357986450e-01 + <_> + + 0 -1 2310 3.7960000336170197e-03 + + -1.8510299921035767e-01 5.2626699209213257e-01 + <_> + + 0 -1 2311 4.2348999530076981e-02 + + 3.9244998246431351e-02 -8.9197701215744019e-01 + <_> + + 0 -1 2312 1.9598999992012978e-02 + + -2.3358400166034698e-01 4.4146499037742615e-01 + <_> + + 0 -1 2313 8.7400001939386129e-04 + + -4.6063598990440369e-01 1.7689600586891174e-01 + <_> + + 0 -1 2314 -4.3629999272525311e-03 + + 3.3493199944496155e-01 -2.9893401265144348e-01 + <_> + + 0 -1 2315 1.6973000019788742e-02 + + -1.6408699750900269e-01 1.5993679761886597e+00 + <_> + + 0 -1 2316 3.6063998937606812e-02 + + 2.2601699829101562e-01 -5.3186100721359253e-01 + <_> + + 0 -1 2317 -7.0864997804164886e-02 + + 1.5220500528812408e-01 -4.1914600133895874e-01 + <_> + + 0 -1 2318 -6.3075996935367584e-02 + + -1.4874019622802734e+00 1.2953700125217438e-01 + <_> + + 0 -1 2319 2.9670000076293945e-02 + + -1.9145900011062622e-01 9.8184901475906372e-01 + <_> + + 0 -1 2320 3.7873998284339905e-02 + + 1.3459500670433044e-01 -5.6316298246383667e-01 + <_> + + 0 -1 2321 -3.3289000391960144e-02 + + -1.0828030109405518e+00 -1.1504000052809715e-02 + <_> + + 0 -1 2322 -3.1608998775482178e-02 + + -5.9224498271942139e-01 1.3394799828529358e-01 + <_> + + 0 -1 2323 1.0740000288933516e-03 + + -4.9185800552368164e-01 9.4446003437042236e-02 + <_> + + 0 -1 2324 -7.1556001901626587e-02 + + 5.9710198640823364e-01 -3.9553001523017883e-02 + <_> + + 0 -1 2325 -8.1170000135898590e-02 + + -1.1817820072174072e+00 -2.8254000470042229e-02 + <_> + + 0 -1 2326 4.4860001653432846e-03 + + -6.1028099060058594e-01 2.2619099915027618e-01 + <_> + + 0 -1 2327 -4.2176000773906708e-02 + + -1.1435619592666626e+00 -2.9001999646425247e-02 + <_> + + 0 -1 2328 -6.5640002489089966e-02 + + -1.6470279693603516e+00 1.2810300290584564e-01 + <_> + + 0 -1 2329 1.8188999965786934e-02 + + -3.1149399280548096e-01 2.5739601254463196e-01 + <_> + + 0 -1 2330 -5.1520001143217087e-02 + + -6.9206899404525757e-01 1.5270799398422241e-01 + <_> + + 0 -1 2331 -4.7150999307632446e-02 + + -7.1868300437927246e-01 2.6879999786615372e-03 + <_> + + 0 -1 2332 1.7488999292254448e-02 + + 2.2371199727058411e-01 -5.5381798744201660e-01 + <_> + + 0 -1 2333 -2.5264000520110130e-02 + + 1.0319819450378418e+00 -1.7496499419212341e-01 + <_> + + 0 -1 2334 -4.0745001286268234e-02 + + 4.4961598515510559e-01 3.9349000900983810e-02 + <_> + + 0 -1 2335 -3.7666998803615570e-02 + + -8.5475701093673706e-01 -1.2463999912142754e-02 + <_> + + 0 -1 2336 -1.3411000370979309e-02 + + 5.7845598459243774e-01 -1.7467999830842018e-02 + <_> + + 0 -1 2337 -7.8999997640494257e-05 + + -3.7749201059341431e-01 1.3961799442768097e-01 + <_> + + 0 -1 2338 -1.1415000073611736e-02 + + -2.6186600327491760e-01 2.3712499439716339e-01 + <_> + + 0 -1 2339 3.7200000137090683e-02 + + -2.8626000508666039e-02 -1.2945239543914795e+00 + <_> + + 0 -1 2340 3.4050000831484795e-03 + + 2.0531399548053741e-01 -1.8747499585151672e-01 + <_> + + 0 -1 2341 -2.2483000531792641e-02 + + 6.7027199268341064e-01 -1.9594000279903412e-01 + <_> + + 0 -1 2342 2.3274999111890793e-02 + + 1.7405399680137634e-01 -3.2746300101280212e-01 + <_> + + 0 -1 2343 -1.3917000032961369e-02 + + -8.3954298496246338e-01 -6.3760001212358475e-03 + <_> + + 0 -1 2344 7.5429999269545078e-03 + + -3.4194998443126678e-02 5.8998197317123413e-01 + <_> + + 0 -1 2345 -1.1539000086486340e-02 + + 4.2142799496650696e-01 -2.3510499298572540e-01 + <_> + + 0 -1 2346 5.2501998841762543e-02 + + 6.9303996860980988e-02 7.3226499557495117e-01 + <_> + + 0 -1 2347 5.2715998142957687e-02 + + -1.5688100457191467e-01 1.0907289981842041e+00 + <_> + + 0 -1 2348 -1.1726000346243382e-02 + + -7.0934301614761353e-01 1.6828800737857819e-01 + <_> + + 0 -1 2349 9.5945999026298523e-02 + + -1.6192899644374847e-01 1.0072519779205322e+00 + <_> + + 0 -1 2350 -1.5871999785304070e-02 + + 3.9008399844169617e-01 -5.3777001798152924e-02 + <_> + + 0 -1 2351 3.4818001091480255e-02 + + 1.7179999500513077e-02 -9.3941801786422729e-01 + <_> + + 0 -1 2352 3.4791998565196991e-02 + + 5.0462998449802399e-02 5.4465699195861816e-01 + <_> + + 0 -1 2353 1.6284000128507614e-02 + + -2.6981300115585327e-01 4.0365299582481384e-01 + <_> + + 0 -1 2354 -4.4319000095129013e-02 + + 8.4399998188018799e-01 3.2882999628782272e-02 + <_> + + 0 -1 2355 -5.5689997971057892e-03 + + 1.5309399366378784e-01 -3.4959799051284790e-01 + <_> + + 0 -1 2356 -6.5842002630233765e-02 + + -9.2711198329925537e-01 1.6800999641418457e-01 + <_> + + 0 -1 2357 -7.3337003588676453e-02 + + 5.1614499092102051e-01 -2.0236000418663025e-01 + <_> + + 0 -1 2358 1.6450000926852226e-02 + + 1.3950599730014801e-01 -4.9301299452781677e-01 + <_> + + 0 -1 2359 -9.2630004510283470e-03 + + -9.0101999044418335e-01 -1.6116000711917877e-02 + <_> + + 0 -1 2360 5.9139998629689217e-03 + + 1.9858199357986450e-01 -1.6731299459934235e-01 + <_> + + 0 -1 2361 -8.4699998842552304e-04 + + 9.4005003571510315e-02 -4.1570898890495300e-01 + <_> + + 0 -1 2362 2.0532900094985962e-01 + + -6.0022000223398209e-02 7.0993602275848389e-01 + <_> + + 0 -1 2363 -1.6883000731468201e-02 + + 2.4392199516296387e-01 -3.0551800131797791e-01 + <_> + + 0 -1 2364 -1.9111000001430511e-02 + + 6.1229902505874634e-01 2.4252999573945999e-02 + <_> + + 0 -1 2365 -2.5962999090552330e-02 + + 9.0764999389648438e-01 -1.6722099483013153e-01 + <_> + + 0 -1 2366 -2.1762000396847725e-02 + + -3.1384700536727905e-01 2.0134599506855011e-01 + <_> + + 0 -1 2367 -2.4119999259710312e-02 + + -6.6588401794433594e-01 7.4559999629855156e-03 + <_> + + 0 -1 2368 4.7129999846220016e-02 + + 5.9533998370170593e-02 8.7804502248764038e-01 + <_> + + 0 -1 2369 -4.5984998345375061e-02 + + 8.0067998170852661e-01 -1.7252300679683685e-01 + <_> + + 0 -1 2370 2.6507999747991562e-02 + + 1.8774099647998810e-01 -6.0850602388381958e-01 + <_> + + 0 -1 2371 -4.8615001142024994e-02 + + 5.8644098043441772e-01 -1.9427700340747833e-01 + <_> + + 0 -1 2372 -1.8562000244855881e-02 + + -2.5587901473045349e-01 1.6326199471950531e-01 + <_> + + 0 -1 2373 1.2678000144660473e-02 + + -1.4228000305593014e-02 -7.6738101243972778e-01 + <_> + + 0 -1 2374 -1.1919999960809946e-03 + + 2.0495000481605530e-01 -1.1404299736022949e-01 + <_> + + 0 -1 2375 -4.9088999629020691e-02 + + -1.0740849971771240e+00 -3.8940999656915665e-02 + <_> + + 0 -1 2376 -1.7436999827623367e-02 + + -5.7973802089691162e-01 1.8584500253200531e-01 + <_> + + 0 -1 2377 -1.4770000241696835e-02 + + -6.6150301694869995e-01 5.3119999356567860e-03 + <_> + + 0 -1 2378 -2.2905200719833374e-01 + + -4.8305100202560425e-01 1.2326399981975555e-01 + <_> + + 0 -1 2379 -1.2707099318504333e-01 + + 5.7452601194381714e-01 -1.9420400261878967e-01 + <_> + + 0 -1 2380 1.0339000262320042e-02 + + -5.4641999304294586e-02 2.4501800537109375e-01 + <_> + + 0 -1 2381 6.9010001607239246e-03 + + 1.2180600315332413e-01 -3.8797399401664734e-01 + <_> + + 0 -1 2382 2.9025399684906006e-01 + + 1.0966199636459351e-01 -30. + <_> + + 0 -1 2383 -2.3804999887943268e-01 + + -1.7352679967880249e+00 -6.3809998333454132e-02 + <_> + + 0 -1 2384 6.2481001019477844e-02 + + 1.3523000478744507e-01 -7.0301097631454468e-01 + <_> + + 0 -1 2385 4.7109997831285000e-03 + + -4.6984100341796875e-01 6.0341998934745789e-02 + <_> + + 0 -1 2386 -2.7815999463200569e-02 + + 6.9807600975036621e-01 1.3719999697059393e-03 + <_> + + 0 -1 2387 -1.7020000144839287e-02 + + 1.6870440244674683e+00 -1.4314800500869751e-01 + <_> + + 0 -1 2388 -4.9754999577999115e-02 + + 7.9497700929641724e-01 7.7199999941512942e-04 + <_> + + 0 -1 2389 -7.4732996523380280e-02 + + -1.0132360458374023e+00 -1.9388999789953232e-02 + <_> + + 0 -1 2390 3.2009001821279526e-02 + + 1.4412100613117218e-01 -4.2139101028442383e-01 + <_> + + 0 -1 2391 -9.4463996589183807e-02 + + 5.0682598352432251e-01 -2.0478899776935577e-01 + <_> + + 0 -1 2392 -1.5426999889314175e-02 + + -1.5811300277709961e-01 1.7806899547576904e-01 + <_> + + 0 -1 2393 -4.0540001355111599e-03 + + -5.4366701841354370e-01 3.1235000118613243e-02 + <_> + + 0 -1 2394 3.0080000869929790e-03 + + -1.7376799881458282e-01 3.0441701412200928e-01 + <_> + + 0 -1 2395 -1.0091999545693398e-02 + + 2.5103801488876343e-01 -2.6224100589752197e-01 + <_> + + 0 -1 2396 -3.8818001747131348e-02 + + 9.3226701021194458e-01 7.2659999132156372e-02 + <_> + + 0 -1 2397 3.4651998430490494e-02 + + -3.3934999257326126e-02 -8.5707902908325195e-01 + <_> + + 0 -1 2398 -4.6729999594390392e-03 + + 3.4969300031661987e-01 -4.8517998307943344e-02 + <_> + + 0 -1 2399 6.8499997723847628e-04 + + 6.6573001444339752e-02 -4.4973799586296082e-01 + <_> + + 0 -1 2400 3.5317000001668930e-02 + + 1.4275799691677094e-01 -4.6726399660110474e-01 + <_> + + 0 -1 2401 -2.3569999262690544e-02 + + -1.0286079645156860e+00 -4.5288000255823135e-02 + <_> + + 0 -1 2402 -1.9109999993816018e-03 + + -1.9652199745178223e-01 2.8661000728607178e-01 + <_> + + 0 -1 2403 -1.6659000888466835e-02 + + -7.7532202005386353e-01 -8.3280000835657120e-03 + <_> + + 0 -1 2404 6.6062200069427490e-01 + + 1.3232499361038208e-01 -3.5266680717468262e+00 + <_> + + 0 -1 2405 1.0970599949359894e-01 + + -1.5547199547290802e-01 1.4674140214920044e+00 + <_> + + 0 -1 2406 1.3500999659299850e-02 + + 1.5233400464057922e-01 -1.3020930290222168e+00 + <_> + + 0 -1 2407 -2.2871999070048332e-02 + + -7.1325999498367310e-01 -8.7040001526474953e-03 + <_> + + 0 -1 2408 -8.1821002066135406e-02 + + 1.1127580404281616e+00 8.3219997584819794e-02 + <_> + + 0 -1 2409 -5.2728001028299332e-02 + + 9.3165099620819092e-01 -1.7103999853134155e-01 + <_> + + 0 -1 2410 -2.5242000818252563e-02 + + -1.9733799993991852e-01 2.5359401106834412e-01 + <_> + + 0 -1 2411 -4.3818999081850052e-02 + + 4.1815200448036194e-01 -2.4585500359535217e-01 + <_> + + 0 -1 2412 -1.8188999965786934e-02 + + -5.1743197441101074e-01 2.0174199342727661e-01 + <_> + + 0 -1 2413 2.3466000333428383e-02 + + -4.3071001768112183e-02 -1.0636579990386963e+00 + <_> + + 0 -1 2414 3.4216001629829407e-02 + + 5.3780999034643173e-02 4.9707201123237610e-01 + <_> + + 0 -1 2415 2.5692999362945557e-02 + + -2.3800100386142731e-01 4.1651499271392822e-01 + <_> + + 0 -1 2416 -2.6565000414848328e-02 + + -8.8574802875518799e-01 1.3365900516510010e-01 + <_> + + 0 -1 2417 6.0942001640796661e-02 + + -2.0669700205326080e-01 5.8309000730514526e-01 + <_> + + 0 -1 2418 1.4474500715732574e-01 + + 1.3282300531864166e-01 -3.1449348926544189e+00 + <_> + + 0 -1 2419 5.3410999476909637e-02 + + -1.7325200140476227e-01 6.9190698862075806e-01 + <_> + + 0 -1 2420 1.1408000253140926e-02 + + 5.4822001606225967e-02 3.0240398645401001e-01 + <_> + + 0 -1 2421 -2.3179999552667141e-03 + + 1.5820899605751038e-01 -3.1973201036453247e-01 + <_> + + 0 -1 2422 -2.9695000499486923e-02 + + 7.1274799108505249e-01 5.8136001229286194e-02 + <_> + + 0 -1 2423 2.7249999344348907e-02 + + -1.5754100680351257e-01 9.2143797874450684e-01 + <_> + + 0 -1 2424 -3.6200000904500484e-03 + + -3.4548398852348328e-01 2.0220999419689178e-01 + <_> + + 0 -1 2425 -1.2578999623656273e-02 + + -5.5650299787521362e-01 2.0388999953866005e-02 + <_> + + 0 -1 2426 -8.8849000632762909e-02 + + -3.6100010871887207e+00 1.3164199888706207e-01 + <_> + + 0 -1 2427 -1.9256999716162682e-02 + + 5.1908999681472778e-01 -1.9284300506114960e-01 + <_> + + 0 -1 2428 -1.6666999086737633e-02 + + -8.7499998509883881e-02 1.5812499821186066e-01 + <_> + + 0 -1 2429 1.2931999750435352e-02 + + 2.7405999600887299e-02 -5.5123901367187500e-01 + <_> + + 0 -1 2430 -1.3431999832391739e-02 + + 2.3457799851894379e-01 -4.3235000222921371e-02 + <_> + + 0 -1 2431 1.8810000270605087e-02 + + -3.9680998772382736e-02 -9.4373297691345215e-01 + <_> + + 0 -1 2432 -6.4349998719990253e-03 + + 4.5703700184822083e-01 -4.0520001202821732e-03 + <_> + + 0 -1 2433 -2.4249000474810600e-02 + + -7.6248002052307129e-01 -1.9857000559568405e-02 + <_> + + 0 -1 2434 -2.9667999595403671e-02 + + -3.7412509918212891e+00 1.1250600218772888e-01 + <_> + + 0 -1 2435 5.1150000654160976e-03 + + -6.3781797885894775e-01 1.1223999783396721e-02 + <_> + + 0 -1 2436 -5.7819997891783714e-03 + + 1.9374400377273560e-01 -8.2042001187801361e-02 + <_> + + 0 -1 2437 1.6606999561190605e-02 + + -1.6192099452018738e-01 1.1334990262985229e+00 + <_> + + 0 -1 2438 3.8228001445531845e-02 + + 2.1105000749230385e-02 7.6264202594757080e-01 + <_> + + 0 -1 2439 -5.7094000279903412e-02 + + -1.6974929571151733e+00 -5.9762001037597656e-02 + <_> + + 0 -1 2440 -5.3883001208305359e-02 + + 1.1850190162658691e+00 9.0966999530792236e-02 + <_> + + 0 -1 2441 -2.6110000908374786e-03 + + -4.0941199660301208e-01 8.3820998668670654e-02 + <_> + + 0 -1 2442 2.9714399576187134e-01 + + 1.5529899299144745e-01 -1.0995409488677979e+00 + <_> + + 0 -1 2443 -8.9063003659248352e-02 + + 4.8947200179100037e-01 -2.0041200518608093e-01 + <_> + + 0 -1 2444 -5.6193001568317413e-02 + + -2.4581399559974670e-01 1.4365500211715698e-01 + <_> + + 0 -1 2445 3.7004999816417694e-02 + + -4.8168998211622238e-02 -1.2310709953308105e+00 + <_> + + 0 -1 2446 -8.4840003401041031e-03 + + 4.3372601270675659e-01 1.3779999688267708e-02 + <_> + + 0 -1 2447 -2.4379999376833439e-03 + + 1.8949699401855469e-01 -3.2294198870658875e-01 + <_> + + 0 -1 2448 -7.1639999747276306e-02 + + -4.3979001045227051e-01 2.2730199992656708e-01 + <_> + + 0 -1 2449 5.2260002121329308e-03 + + -2.0548400282859802e-01 5.0933301448822021e-01 + <_> + + 0 -1 2450 -6.1360001564025879e-03 + + 3.1157198548316956e-01 7.0680998265743256e-02 + <_> + + 0 -1 2451 1.5595000237226486e-02 + + -3.0934798717498779e-01 1.5627700090408325e-01 + <_> + + 0 -1 2452 2.5995999574661255e-02 + + 1.3821600377559662e-01 -1.7616599798202515e-01 + <_> + + 0 -1 2453 -1.2085000053048134e-02 + + -5.1070201396942139e-01 5.8440998196601868e-02 + <_> + + 0 -1 2454 -6.7836001515388489e-02 + + 4.7757101058959961e-01 -7.1446001529693604e-02 + <_> + + 0 -1 2455 -1.4715000055730343e-02 + + 4.5238900184631348e-01 -1.9861400127410889e-01 + <_> + + 0 -1 2456 2.5118999183177948e-02 + + 1.2954899668693542e-01 -8.6266398429870605e-01 + <_> + + 0 -1 2457 1.8826000392436981e-02 + + -4.1570000350475311e-02 -1.1354700326919556e+00 + <_> + + 0 -1 2458 -2.1263999864459038e-02 + + -3.4738001227378845e-01 1.5779499709606171e-01 + <_> + + 0 -1 2459 9.4609996303915977e-03 + + 4.8639997839927673e-03 -6.1654800176620483e-01 + <_> + + 0 -1 2460 2.2957700490951538e-01 + + 8.1372998654842377e-02 6.9841402769088745e-01 + <_> + + 0 -1 2461 -3.8061998784542084e-02 + + 1.1616369485855103e+00 -1.4976699650287628e-01 + <_> + + 0 -1 2462 -1.3484999537467957e-02 + + -3.2036399841308594e-01 1.7365099489688873e-01 + <_> + + 0 -1 2463 3.6238998174667358e-02 + + -1.8158499896526337e-01 6.1956697702407837e-01 + <_> + + 0 -1 2464 6.7210001870989799e-03 + + 7.9600000753998756e-04 4.2441400885581970e-01 + <_> + + 0 -1 2465 9.6525996923446655e-02 + + -1.4696800708770752e-01 1.2525680065155029e+00 + <_> + + 0 -1 2466 -3.5656999796628952e-02 + + -3.9781698584556580e-01 1.4191399514675140e-01 + <_> + + 0 -1 2467 1.0772000066936016e-02 + + -1.8194000422954559e-01 5.9762197732925415e-01 + <_> + + 0 -1 2468 7.9279996454715729e-02 + + 1.4642499387264252e-01 -7.8836899995803833e-01 + <_> + + 0 -1 2469 3.2841000705957413e-02 + + -6.2408000230789185e-02 -1.4227490425109863e+00 + <_> + + 0 -1 2470 -2.7781000360846519e-02 + + 3.4033098816871643e-01 3.0670000240206718e-02 + <_> + + 0 -1 2471 -4.0339999832212925e-03 + + 3.1084701418876648e-01 -2.2595700621604919e-01 + <_> + + 0 -1 2472 7.4260002002120018e-03 + + -3.8936998695135117e-02 3.1702101230621338e-01 + <_> + + 0 -1 2473 1.1213999986648560e-01 + + -1.7578299343585968e-01 6.5056598186492920e-01 + <_> + + 0 -1 2474 -1.1878100037574768e-01 + + -1.0092990398406982e+00 1.1069700121879578e-01 + <_> + + 0 -1 2475 -4.1584998369216919e-02 + + -5.3806400299072266e-01 1.9905000925064087e-02 + <_> + + 0 -1 2476 -2.7966000139713287e-02 + + 4.8143199086189270e-01 3.3590998500585556e-02 + <_> + + 0 -1 2477 -1.2506400048732758e-01 + + 2.6352199912071228e-01 -2.5737899541854858e-01 + <_> + + 0 -1 2478 2.3666900396347046e-01 + + 3.6508001387119293e-02 9.0655601024627686e-01 + <_> + + 0 -1 2479 -2.9475999996066093e-02 + + -6.0048800706863403e-01 9.5880003646016121e-03 + <_> + + 0 -1 2480 3.7792999297380447e-02 + + 1.5506200492382050e-01 -9.5733499526977539e-01 + <_> + + 0 -1 2481 7.2044000029563904e-02 + + -1.4525899291038513e-01 1.3676730394363403e+00 + <_> + + 0 -1 2482 9.7759999334812164e-03 + + 1.2915999628603458e-02 2.1640899777412415e-01 + <_> + + 0 -1 2483 5.2154000848531723e-02 + + -1.6359999775886536e-02 -8.8356298208236694e-01 + <_> + + 0 -1 2484 -4.3790999799966812e-02 + + 3.5829600691795349e-01 6.5131001174449921e-02 + <_> + + 0 -1 2485 -3.8378998637199402e-02 + + 1.1961040496826172e+00 -1.4971500635147095e-01 + <_> + + 0 -1 2486 -9.8838999867439270e-02 + + -6.1834001541137695e-01 1.2786200642585754e-01 + <_> + + 0 -1 2487 -1.2190700322389603e-01 + + -1.8276120424270630e+00 -6.4862996339797974e-02 + <_> + + 0 -1 2488 -1.1981700360774994e-01 + + -30. 1.1323300004005432e-01 + <_> + + 0 -1 2489 3.0910000205039978e-02 + + -2.3934000730514526e-01 3.6332899332046509e-01 + <_> + + 0 -1 2490 1.0800999589264393e-02 + + -3.5140000283718109e-02 2.7707898616790771e-01 + <_> + + 0 -1 2491 5.6844998151063919e-02 + + -1.5524299442768097e-01 1.0802700519561768e+00 + <_> + + 0 -1 2492 1.0280000278726220e-03 + + -6.1202999204397202e-02 2.0508000254631042e-01 + <_> + + 0 -1 2493 -2.8273999691009521e-02 + + -6.4778000116348267e-01 2.3917000740766525e-02 + <_> + + 0 -1 2494 -1.6013599932193756e-01 + + 1.0892050266265869e+00 5.8389000594615936e-02 + <_> + + 0 -1 2495 4.9629998393356800e-03 + + -2.5806298851966858e-01 2.0834599435329437e-01 + <_> + + 0 -1 2496 4.6937000006437302e-02 + + 1.3886299729347229e-01 -1.5662620067596436e+00 + <_> + + 0 -1 2497 2.4286000058054924e-02 + + -2.0728300511837006e-01 5.2430999279022217e-01 + <_> + + 0 -1 2498 7.0202000439167023e-02 + + 1.4796899259090424e-01 -1.3095090389251709e+00 + <_> + + 0 -1 2499 9.8120002076029778e-03 + + 2.7906000614166260e-02 -5.0864601135253906e-01 + <_> + + 0 -1 2500 -5.6200999766588211e-02 + + 1.2618130445480347e+00 6.3801996409893036e-02 + <_> + + 0 -1 2501 1.0982800275087357e-01 + + -1.2850099802017212e-01 3.0776169300079346e+00 + <_> + 211 + -3.3703000545501709e+00 + + <_> + + 0 -1 2502 2.0910000428557396e-02 + + -6.8559402227401733e-01 3.8984298706054688e-01 + <_> + + 0 -1 2503 3.5032000392675400e-02 + + -4.7724398970603943e-01 4.5027199387550354e-01 + <_> + + 0 -1 2504 3.9799001067876816e-02 + + -4.7011101245880127e-01 4.2702499032020569e-01 + <_> + + 0 -1 2505 -4.8409998416900635e-03 + + 2.5614300370216370e-01 -6.6556298732757568e-01 + <_> + + 0 -1 2506 2.3439999204128981e-03 + + -4.8083499073982239e-01 2.8013798594474792e-01 + <_> + + 0 -1 2507 2.5312999263405800e-02 + + -2.3948200047016144e-01 4.4191798567771912e-01 + <_> + + 0 -1 2508 -3.2193001359701157e-02 + + 7.6086699962615967e-01 -2.5059100985527039e-01 + <_> + + 0 -1 2509 7.5409002602100372e-02 + + -3.4974598884582520e-01 3.4380298852920532e-01 + <_> + + 0 -1 2510 -1.8469000235199928e-02 + + -7.9085600376129150e-01 3.4788001328706741e-02 + <_> + + 0 -1 2511 -1.2802000157535076e-02 + + 4.7107800841331482e-01 -6.0006000101566315e-02 + <_> + + 0 -1 2512 -2.6598000898957253e-02 + + 6.7116099596023560e-01 -2.4257500469684601e-01 + <_> + + 0 -1 2513 2.1988999098539352e-02 + + 2.4717499315738678e-01 -4.8301699757575989e-01 + <_> + + 0 -1 2514 1.4654099941253662e-01 + + -2.1504099667072296e-01 7.2055900096893311e-01 + <_> + + 0 -1 2515 3.5310001112520695e-03 + + 2.7930998802185059e-01 -3.4339898824691772e-01 + <_> + + 0 -1 2516 9.4010001048445702e-03 + + 5.5861998349428177e-02 -8.2143598794937134e-01 + <_> + + 0 -1 2517 -8.6390003561973572e-03 + + -9.9620598554611206e-01 1.8874999880790710e-01 + <_> + + 0 -1 2518 -3.9193000644445419e-02 + + -1.1945559978485107e+00 -2.9198000207543373e-02 + <_> + + 0 -1 2519 2.4855000898241997e-02 + + 1.4987599849700928e-01 -5.4137802124023438e-01 + <_> + + 0 -1 2520 -3.4995000809431076e-02 + + -1.4210180044174194e+00 -4.2314000427722931e-02 + <_> + + 0 -1 2521 -1.8378999084234238e-02 + + -2.8242599964141846e-01 1.5581800043582916e-01 + <_> + + 0 -1 2522 -1.3592000119388103e-02 + + 4.7317099571228027e-01 -2.1937200427055359e-01 + <_> + + 0 -1 2523 6.2629999592900276e-03 + + -5.9714000672101974e-02 6.0625898838043213e-01 + <_> + + 0 -1 2524 -1.8478000536561012e-02 + + -8.5647201538085938e-01 -1.3783999718725681e-02 + <_> + + 0 -1 2525 1.4236000366508961e-02 + + 1.6654799878597260e-01 -2.7713999152183533e-01 + <_> + + 0 -1 2526 -3.2547000795602798e-02 + + -1.1728240251541138e+00 -4.0185000747442245e-02 + <_> + + 0 -1 2527 -2.6410000864416361e-03 + + 2.6514300704002380e-01 -5.6343000382184982e-02 + <_> + + 0 -1 2528 -8.7799999164417386e-04 + + 3.6556001752614975e-02 -5.5075198411941528e-01 + <_> + + 0 -1 2529 4.7371998429298401e-02 + + -4.2614001780748367e-02 4.8194900155067444e-01 + <_> + + 0 -1 2530 -7.0790001191198826e-03 + + 2.8698998689651489e-01 -3.2923001050949097e-01 + <_> + + 0 -1 2531 -4.3145999312400818e-02 + + -1.4065419435501099e+00 1.2836399674415588e-01 + <_> + + 0 -1 2532 2.0592000335454941e-02 + + -2.1435299515724182e-01 5.3981798887252808e-01 + <_> + + 0 -1 2533 -2.2367000579833984e-02 + + 3.3718299865722656e-01 4.5212000608444214e-02 + <_> + + 0 -1 2534 5.0039999186992645e-02 + + -2.5121700763702393e-01 4.1750499606132507e-01 + <_> + + 0 -1 2535 6.1794999986886978e-02 + + 4.0084999054670334e-02 6.8779802322387695e-01 + <_> + + 0 -1 2536 -4.1861999779939651e-02 + + 5.3027397394180298e-01 -2.2901999950408936e-01 + <_> + + 0 -1 2537 -3.1959998887032270e-03 + + 2.5161498785018921e-01 -2.1514600515365601e-01 + <_> + + 0 -1 2538 2.4255000054836273e-02 + + 7.2320001199841499e-03 -7.2519099712371826e-01 + <_> + + 0 -1 2539 -1.7303999513387680e-02 + + -4.9958199262619019e-01 1.8394500017166138e-01 + <_> + + 0 -1 2540 -4.1470001451671124e-03 + + 8.5211999714374542e-02 -4.6364700794219971e-01 + <_> + + 0 -1 2541 -1.4369999989867210e-02 + + -5.2258902788162231e-01 2.3892599344253540e-01 + <_> + + 0 -1 2542 -9.0399999171495438e-03 + + -6.3250398635864258e-01 3.2551001757383347e-02 + <_> + + 0 -1 2543 -1.2373100221157074e-01 + + 1.2856210470199585e+00 7.6545000076293945e-02 + <_> + + 0 -1 2544 -8.2221999764442444e-02 + + 8.3208197355270386e-01 -1.8590599298477173e-01 + <_> + + 0 -1 2545 6.5659001469612122e-02 + + 1.1298800259828568e-01 -30. + <_> + + 0 -1 2546 -3.1582999974489212e-02 + + -1.3485900163650513e+00 -4.7097001224756241e-02 + <_> + + 0 -1 2547 -7.9636000096797943e-02 + + -1.3533639907836914e+00 1.5668800473213196e-01 + <_> + + 0 -1 2548 -1.8880000337958336e-02 + + 4.0300300717353821e-01 -2.5148901343345642e-01 + <_> + + 0 -1 2549 -5.0149997696280479e-03 + + -2.6287099719047546e-01 1.8582500517368317e-01 + <_> + + 0 -1 2550 -1.2218000367283821e-02 + + 5.8692401647567749e-01 -1.9427700340747833e-01 + <_> + + 0 -1 2551 1.2710000155493617e-03 + + -1.6688999533653259e-01 2.3006899654865265e-01 + <_> + + 0 -1 2552 2.9743999242782593e-02 + + 1.2520000338554382e-02 -6.6723597049713135e-01 + <_> + + 0 -1 2553 2.8175000101327896e-02 + + -1.7060000449419022e-02 6.4579397439956665e-01 + <_> + + 0 -1 2554 3.0345000326633453e-02 + + -2.4178700149059296e-01 3.4878900647163391e-01 + <_> + + 0 -1 2555 -1.7325999215245247e-02 + + -5.3599399328231812e-01 2.0995999872684479e-01 + <_> + + 0 -1 2556 -8.4178000688552856e-02 + + 7.5093299150466919e-01 -1.7593200504779816e-01 + <_> + + 0 -1 2557 7.4950000271201134e-03 + + -1.6188099980354309e-01 3.0657500028610229e-01 + <_> + + 0 -1 2558 5.6494999676942825e-02 + + -1.7318800091743469e-01 1.0016150474548340e+00 + <_> + + 0 -1 2559 -5.2939997985959053e-03 + + 2.3417599499225616e-01 -6.5347000956535339e-02 + <_> + + 0 -1 2560 -1.4945000410079956e-02 + + 2.5018900632858276e-01 -3.0591198801994324e-01 + <_> + + 0 -1 2561 5.4919000715017319e-02 + + 1.3121999800205231e-01 -9.3765097856521606e-01 + <_> + + 0 -1 2562 -1.9721999764442444e-02 + + -8.3978497982025146e-01 -2.3473000153899193e-02 + <_> + + 0 -1 2563 -6.7158997058868408e-02 + + 2.3586840629577637e+00 8.2970999181270599e-02 + <_> + + 0 -1 2564 -1.4325999654829502e-02 + + 1.8814499676227570e-01 -3.1221601366996765e-01 + <_> + + 0 -1 2565 2.9841000214219093e-02 + + 1.4825099706649780e-01 -8.4681701660156250e-01 + <_> + + 0 -1 2566 5.1883000880479813e-02 + + -4.3731000274419785e-02 -1.3366169929504395e+00 + <_> + + 0 -1 2567 4.1127000004053116e-02 + + 1.7660099267959595e-01 -6.0904097557067871e-01 + <_> + + 0 -1 2568 -1.2865099310874939e-01 + + -9.8701000213623047e-01 -3.7785001099109650e-02 + <_> + + 0 -1 2569 2.4170000106096268e-03 + + -1.6119599342346191e-01 3.2675701379776001e-01 + <_> + + 0 -1 2570 7.7030002139508724e-03 + + -2.3841500282287598e-01 2.9319399595260620e-01 + <_> + + 0 -1 2571 4.5520000159740448e-02 + + 1.4424599707126617e-01 -1.5010160207748413e+00 + <_> + + 0 -1 2572 -7.8700996935367584e-02 + + -1.0394560098648071e+00 -4.5375999063253403e-02 + <_> + + 0 -1 2573 7.8619997948408127e-03 + + 1.9633600115776062e-01 -1.4472399652004242e-01 + <_> + + 0 -1 2574 -1.3458999805152416e-02 + + -9.0634697675704956e-01 -3.8049001246690750e-02 + <_> + + 0 -1 2575 2.8827000409364700e-02 + + -2.9473999515175819e-02 6.0058397054672241e-01 + <_> + + 0 -1 2576 -2.7365999296307564e-02 + + -9.9804002046585083e-01 -3.8653001189231873e-02 + <_> + + 0 -1 2577 -7.2917997837066650e-02 + + 7.3361498117446899e-01 5.7440001517534256e-02 + <_> + + 0 -1 2578 -1.3988999649882317e-02 + + 2.7892601490020752e-01 -2.6516300439834595e-01 + <_> + + 0 -1 2579 4.3242998421192169e-02 + + 4.7760000452399254e-03 3.5925900936126709e-01 + <_> + + 0 -1 2580 2.9533000662922859e-02 + + -2.0083999633789062e-01 5.1202899217605591e-01 + <_> + + 0 -1 2581 -3.1897000968456268e-02 + + 6.4721697568893433e-01 -1.3760000001639128e-03 + <_> + + 0 -1 2582 3.7868998944759369e-02 + + -1.8363800644874573e-01 6.1343097686767578e-01 + <_> + + 0 -1 2583 -2.2417999804019928e-02 + + -2.9187899827957153e-01 1.8194800615310669e-01 + <_> + + 0 -1 2584 5.8958999812602997e-02 + + -6.6451996564865112e-02 -1.9290030002593994e+00 + <_> + + 0 -1 2585 3.1222999095916748e-02 + + -1.2732000090181828e-02 6.1560797691345215e-01 + <_> + + 0 -1 2586 3.7484999746084213e-02 + + -2.0856900513172150e-01 4.4363999366760254e-01 + <_> + + 0 -1 2587 -2.0966000854969025e-02 + + -3.5712799429893494e-01 2.4252200126647949e-01 + <_> + + 0 -1 2588 -2.5477999821305275e-02 + + 1.0846560001373291e+00 -1.5054400265216827e-01 + <_> + + 0 -1 2589 -7.2570000775158405e-03 + + 2.1302600204944611e-01 -1.8308199942111969e-01 + <_> + + 0 -1 2590 -5.0983000546693802e-02 + + 5.1736801862716675e-01 -1.8833099305629730e-01 + <_> + + 0 -1 2591 -2.0640000700950623e-02 + + -4.4030201435089111e-01 2.2745999693870544e-01 + <_> + + 0 -1 2592 1.0672999545931816e-02 + + 3.5059999674558640e-02 -5.1665002107620239e-01 + <_> + + 0 -1 2593 3.1895998865365982e-02 + + 1.3228000141680241e-02 3.4915199875831604e-01 + <_> + + 0 -1 2594 -2.3824999108910561e-02 + + 3.4118801355361938e-01 -2.1510200202465057e-01 + <_> + + 0 -1 2595 -6.0680001042783260e-03 + + 3.2937398552894592e-01 -2.8523799777030945e-01 + <_> + + 0 -1 2596 2.3881999775767326e-02 + + -2.5333800911903381e-01 2.6296100020408630e-01 + <_> + + 0 -1 2597 2.7966000139713287e-02 + + 1.4049099385738373e-01 -4.9887099862098694e-01 + <_> + + 0 -1 2598 1.4603000134229660e-02 + + -1.5395999886095524e-02 -7.6958000659942627e-01 + <_> + + 0 -1 2599 1.0872399806976318e-01 + + 1.9069600105285645e-01 -3.2393100857734680e-01 + <_> + + 0 -1 2600 -1.4038000255823135e-02 + + 3.4924700856208801e-01 -2.2358700633049011e-01 + <_> + + 0 -1 2601 4.0440000593662262e-03 + + -3.8329001516103745e-02 5.1177299022674561e-01 + <_> + + 0 -1 2602 -4.9769999459385872e-03 + + -4.2888298630714417e-01 4.9173999577760696e-02 + <_> + + 0 -1 2603 -8.5183002054691315e-02 + + 6.6624599695205688e-01 7.8079998493194580e-03 + <_> + + 0 -1 2604 2.1559998858720064e-03 + + -4.9135199189186096e-01 6.9555997848510742e-02 + <_> + + 0 -1 2605 3.6384499073028564e-01 + + 1.2997099757194519e-01 -1.8949509859085083e+00 + <_> + + 0 -1 2606 2.2082500159740448e-01 + + -5.7211998850107193e-02 -1.4281120300292969e+00 + <_> + + 0 -1 2607 -1.6140000894665718e-02 + + -5.7589399814605713e-01 1.8062500655651093e-01 + <_> + + 0 -1 2608 -4.8330001533031464e-02 + + 9.7308498620986938e-01 -1.6513000428676605e-01 + <_> + + 0 -1 2609 1.7529999837279320e-02 + + 1.7932699620723724e-01 -2.7948901057243347e-01 + <_> + + 0 -1 2610 -3.4309998154640198e-02 + + -8.1072497367858887e-01 -1.6596000641584396e-02 + <_> + + 0 -1 2611 -4.5830002054572105e-03 + + 2.7908998727798462e-01 -7.4519999325275421e-03 + <_> + + 0 -1 2612 1.2896400690078735e-01 + + -1.3508500158786774e-01 2.5411539077758789e+00 + <_> + + 0 -1 2613 3.0361000448465347e-02 + + -6.8419001996517181e-02 2.8734099864959717e-01 + <_> + + 0 -1 2614 4.4086001813411713e-02 + + -1.8135899305343628e-01 6.5413200855255127e-01 + <_> + + 0 -1 2615 3.0159999150782824e-03 + + -1.5690499544143677e-01 2.6963800191879272e-01 + <_> + + 0 -1 2616 -2.6336999610066414e-02 + + 2.9175600409507751e-01 -2.5274100899696350e-01 + <_> + + 0 -1 2617 -2.7866000309586525e-02 + + 4.4387501478195190e-01 5.5038001388311386e-02 + <_> + + 0 -1 2618 1.1725000105798244e-02 + + -1.9346499443054199e-01 4.6656700968742371e-01 + <_> + + 0 -1 2619 1.5689999563619494e-03 + + -8.2360003143548965e-03 2.5700899958610535e-01 + <_> + + 0 -1 2620 -3.5550000611692667e-03 + + -4.2430898547172546e-01 7.1174003183841705e-02 + <_> + + 0 -1 2621 -3.1695000827312469e-02 + + -8.5393500328063965e-01 1.6916200518608093e-01 + <_> + + 0 -1 2622 -3.2097000628709793e-02 + + 8.3784902095794678e-01 -1.7597299814224243e-01 + <_> + + 0 -1 2623 1.5544199943542480e-01 + + 9.9550001323223114e-02 2.3873300552368164e+00 + <_> + + 0 -1 2624 8.8045999407768250e-02 + + -1.8725299835205078e-01 6.2384301424026489e-01 + <_> + + 0 -1 2625 -1.6720000421628356e-03 + + 2.5008699297904968e-01 -6.5118998289108276e-02 + <_> + + 0 -1 2626 9.3409996479749680e-03 + + -3.5378900170326233e-01 1.0715000331401825e-01 + <_> + + 0 -1 2627 3.7138000130653381e-02 + + 1.6387000679969788e-01 -9.1718399524688721e-01 + <_> + + 0 -1 2628 8.0183997750282288e-02 + + -1.4812999963760376e-01 1.4895190000534058e+00 + <_> + + 0 -1 2629 -7.9100002767518163e-04 + + -2.1326899528503418e-01 1.9676400721073151e-01 + <_> + + 0 -1 2630 -5.0400001928210258e-03 + + -7.1318697929382324e-01 1.8240000354126096e-03 + <_> + + 0 -1 2631 1.1962399631738663e-01 + + 3.3098999410867691e-02 1.0441709756851196e+00 + <_> + + 0 -1 2632 -4.5280000194907188e-03 + + -2.7308499813079834e-01 2.7229800820350647e-01 + <_> + + 0 -1 2633 -2.9639000073075294e-02 + + 3.6225798726081848e-01 5.6795001029968262e-02 + <_> + + 0 -1 2634 2.6650000363588333e-02 + + -4.8041000962257385e-02 -9.6723502874374390e-01 + <_> + + 0 -1 2635 4.4422000646591187e-02 + + 1.3052900135517120e-01 -3.5077300667762756e-01 + <_> + + 0 -1 2636 -2.4359999224543571e-02 + + -1.0766899585723877e+00 -5.1222998648881912e-02 + <_> + + 0 -1 2637 1.9734999164938927e-02 + + 2.6238000020384789e-02 2.8070500493049622e-01 + <_> + + 0 -1 2638 5.4930001497268677e-03 + + -2.6111298799514771e-01 2.1011400222778320e-01 + <_> + + 0 -1 2639 -2.3200300335884094e-01 + + -1.7748440504074097e+00 1.1482600122690201e-01 + <_> + + 0 -1 2640 -2.5614000856876373e-02 + + 2.9900801181793213e-01 -2.2502499818801880e-01 + <_> + + 0 -1 2641 -6.4949998632073402e-03 + + 1.9563800096511841e-01 -9.9762998521327972e-02 + <_> + + 0 -1 2642 3.9840000681579113e-03 + + -4.3021500110626221e-01 8.1261001527309418e-02 + <_> + + 0 -1 2643 -3.5813000053167343e-02 + + -5.0987398624420166e-01 1.6345900297164917e-01 + <_> + + 0 -1 2644 -1.4169000089168549e-02 + + 7.7978098392486572e-01 -1.7476299405097961e-01 + <_> + + 0 -1 2645 -1.2642100453376770e-01 + + -6.3047897815704346e-01 1.2728300690650940e-01 + <_> + + 0 -1 2646 6.8677999079227448e-02 + + -4.6447999775409698e-02 -1.1128979921340942e+00 + <_> + + 0 -1 2647 8.5864998400211334e-02 + + 1.1835400015115738e-01 -4.8235158920288086e+00 + <_> + + 0 -1 2648 1.5511999838054180e-02 + + -1.7467999830842018e-02 -6.3693398237228394e-01 + <_> + + 0 -1 2649 8.1091001629829407e-02 + + 8.6133003234863281e-02 2.4559431076049805e+00 + <_> + + 0 -1 2650 1.8495000898838043e-02 + + 4.0229000151157379e-02 -5.0858199596405029e-01 + <_> + + 0 -1 2651 -8.6320996284484863e-02 + + -1.9006760120391846e+00 1.1019100248813629e-01 + <_> + + 0 -1 2652 7.2355002164840698e-02 + + -6.2111999839544296e-02 -1.4165179729461670e+00 + <_> + + 0 -1 2653 -7.8179001808166504e-02 + + 8.8849300146102905e-01 4.2369998991489410e-02 + <_> + + 0 -1 2654 9.6681997179985046e-02 + + -2.2094200551509857e-01 3.3575099706649780e-01 + <_> + + 0 -1 2655 -3.9875999093055725e-02 + + 5.7804799079895020e-01 4.5347999781370163e-02 + <_> + + 0 -1 2656 -9.5349997282028198e-03 + + -5.4175698757171631e-01 3.2399999909102917e-03 + <_> + + 0 -1 2657 4.0600000647827983e-04 + + -8.1549003720283508e-02 3.5837900638580322e-01 + <_> + + 0 -1 2658 1.2107999995350838e-02 + + -2.0280399918556213e-01 4.3768000602722168e-01 + <_> + + 0 -1 2659 -2.0873999223113060e-02 + + 4.1469898819923401e-01 -4.5568000525236130e-02 + <_> + + 0 -1 2660 5.7888001203536987e-02 + + -2.9009999707341194e-02 -9.1822302341461182e-01 + <_> + + 0 -1 2661 1.3200000103097409e-04 + + -1.1772400140762329e-01 2.0000000298023224e-01 + <_> + + 0 -1 2662 -1.7137000337243080e-02 + + 3.3004799485206604e-01 -2.3055200278759003e-01 + <_> + + 0 -1 2663 3.0655000358819962e-02 + + -2.1545000374317169e-02 2.6878198981285095e-01 + <_> + + 0 -1 2664 -7.8699999721720815e-04 + + -4.4100698828697205e-01 4.9157999455928802e-02 + <_> + + 0 -1 2665 8.8036999106407166e-02 + + 1.1782000213861465e-01 -2.8293309211730957e+00 + <_> + + 0 -1 2666 -3.9028998464345932e-02 + + 9.1777199506759644e-01 -1.5827399492263794e-01 + <_> + + 0 -1 2667 8.0105997622013092e-02 + + 1.1289200186729431e-01 -1.9937280416488647e+00 + <_> + + 0 -1 2668 3.9538998156785965e-02 + + -1.4357399940490723e-01 1.3085240125656128e+00 + <_> + + 0 -1 2669 2.0684000104665756e-02 + + 2.0048099756240845e-01 -4.4186998158693314e-02 + <_> + + 0 -1 2670 -6.7037999629974365e-02 + + 3.2618600130081177e-01 -2.0550400018692017e-01 + <_> + + 0 -1 2671 4.6815000474452972e-02 + + 1.5825299918651581e-01 -9.5535099506378174e-01 + <_> + + 0 -1 2672 7.8443996608257294e-02 + + -7.4651002883911133e-02 -2.1161499023437500e+00 + <_> + + 0 -1 2673 6.6380001604557037e-02 + + 1.1641900241374969e-01 -1.6113519668579102e+00 + <_> + + 0 -1 2674 3.0053999274969101e-02 + + -1.6562600433826447e-01 7.0025402307510376e-01 + <_> + + 0 -1 2675 1.7119999974966049e-02 + + 2.2627699375152588e-01 -4.0114998817443848e-01 + <_> + + 0 -1 2676 2.0073000341653824e-02 + + -1.9389699399471283e-01 4.4420298933982849e-01 + <_> + + 0 -1 2677 3.3101998269557953e-02 + + 1.1637499928474426e-01 -1.5771679878234863e+00 + <_> + + 0 -1 2678 -1.4882000163197517e-02 + + -8.9680302143096924e-01 -4.2010001838207245e-02 + <_> + + 0 -1 2679 -1.0281000286340714e-02 + + 3.5602998733520508e-01 -1.3124000281095505e-02 + <_> + + 0 -1 2680 -2.8695000335574150e-02 + + -4.6039599180221558e-01 2.6801999658346176e-02 + <_> + + 0 -1 2681 -4.7189998440444469e-03 + + 2.3788799345493317e-01 -6.5518997609615326e-02 + <_> + + 0 -1 2682 3.2201600074768066e-01 + + -2.8489999473094940e-02 -8.4234601259231567e-01 + <_> + + 0 -1 2683 -1.7045000568032265e-02 + + -5.0938802957534790e-01 1.6057600080966949e-01 + <_> + + 0 -1 2684 -7.3469998314976692e-03 + + -5.4154998064041138e-01 4.7320001758635044e-03 + <_> + + 0 -1 2685 -3.0001999810338020e-02 + + -8.8785797357559204e-01 1.3621799647808075e-01 + <_> + + 0 -1 2686 -1.1292999610304832e-02 + + 8.0615198612213135e-01 -1.6159500181674957e-01 + <_> + + 0 -1 2687 4.7749998047947884e-03 + + 1.2968000024557114e-02 5.5079901218414307e-01 + <_> + + 0 -1 2688 5.0710001960396767e-03 + + -4.5728001743555069e-02 -1.0766259431838989e+00 + <_> + + 0 -1 2689 1.9344100356101990e-01 + + 7.1262001991271973e-02 1.1694519519805908e+00 + <_> + + 0 -1 2690 5.3750001825392246e-03 + + -1.9736200571060181e-01 3.8206899166107178e-01 + <_> + + 0 -1 2691 -6.8276003003120422e-02 + + -5.4372339248657227e+00 1.1151900142431259e-01 + <_> + + 0 -1 2692 -3.4933000802993774e-02 + + 4.4793400168418884e-01 -1.8657900393009186e-01 + <_> + + 0 -1 2693 5.1219998858869076e-03 + + -1.4871999621391296e-02 1.8413899838924408e-01 + <_> + + 0 -1 2694 9.5311999320983887e-02 + + -1.5117099881172180e-01 9.4991499185562134e-01 + <_> + + 0 -1 2695 -6.2849000096321106e-02 + + 4.6473601460456848e-01 3.8405001163482666e-02 + <_> + + 0 -1 2696 -1.7040699720382690e-01 + + -1.6499999761581421e+00 -6.3236996531486511e-02 + <_> + + 0 -1 2697 1.0583999566733837e-02 + + -3.8348998874425888e-02 4.1913801431655884e-01 + <_> + + 0 -1 2698 -4.1579000651836395e-02 + + 3.4461900591850281e-01 -2.1187700331211090e-01 + <_> + + 0 -1 2699 1.2718600034713745e-01 + + 1.2398199737071991e-01 -2.1254889965057373e+00 + <_> + + 0 -1 2700 8.2557000219821930e-02 + + -6.2024001032114029e-02 -1.4875819683074951e+00 + <_> + + 0 -1 2701 8.5293002426624298e-02 + + 1.7087999731302261e-02 3.2076600193977356e-01 + <_> + + 0 -1 2702 5.5544000118970871e-02 + + -2.7414000034332275e-01 1.8976399302482605e-01 + <_> + + 0 -1 2703 4.5650000683963299e-03 + + -1.7920200526714325e-01 2.7967301011085510e-01 + <_> + + 0 -1 2704 1.2997999787330627e-02 + + -3.2297500967979431e-01 2.6941800117492676e-01 + <_> + + 0 -1 2705 5.7891998440027237e-02 + + 1.2644399702548981e-01 -6.0713499784469604e-01 + <_> + + 0 -1 2706 -2.2824000567197800e-02 + + -4.9682098627090454e-01 2.2376999258995056e-02 + <_> + + 0 -1 2707 4.8312000930309296e-02 + + 4.3607000261545181e-02 4.8537799715995789e-01 + <_> + + 0 -1 2708 2.5714000687003136e-02 + + -4.2950998991727829e-02 -9.3023502826690674e-01 + <_> + + 0 -1 2709 6.9269998930394650e-03 + + -2.9680000152438879e-03 3.4296301007270813e-01 + <_> + + 0 -1 2710 -3.4446999430656433e-02 + + -1.5299769639968872e+00 -6.1014998704195023e-02 + <_> + + 0 -1 2711 2.9387999325990677e-02 + + 3.7595998495817184e-02 6.4172399044036865e-01 + <_> + + 0 -1 2712 -2.4319998919963837e-03 + + 9.9088996648788452e-02 -3.9688101410865784e-01 + <_> + 200 + -2.9928278923034668e+00 + + <_> + + 0 -1 2713 -9.5944002270698547e-02 + + 6.2419098615646362e-01 -4.5875200629234314e-01 + <_> + + 0 -1 2714 1.6834000125527382e-02 + + -9.3072801828384399e-01 2.1563600003719330e-01 + <_> + + 0 -1 2715 2.6049999520182610e-02 + + -4.0532299876213074e-01 4.2256599664688110e-01 + <_> + + 0 -1 2716 3.6500001442618668e-04 + + 9.5288001000881195e-02 -6.3298100233078003e-01 + <_> + + 0 -1 2717 -6.6940002143383026e-03 + + 3.7243801355361938e-01 -3.0332401394844055e-01 + <_> + + 0 -1 2718 1.8874000757932663e-02 + + -2.3357200622558594e-01 4.0330699086189270e-01 + <_> + + 0 -1 2719 -1.6300000424962491e-04 + + 4.2886998504400253e-02 -7.7796798944473267e-01 + <_> + + 0 -1 2720 -7.6259002089500427e-02 + + -4.9628499150276184e-01 1.6335399448871613e-01 + <_> + + 0 -1 2721 5.0149001181125641e-02 + + 3.2747000455856323e-02 -8.0047899484634399e-01 + <_> + + 0 -1 2722 -2.9239999130368233e-03 + + -5.0002801418304443e-01 2.5480601191520691e-01 + <_> + + 0 -1 2723 1.6243999823927879e-02 + + 3.8913000375032425e-02 -7.0724898576736450e-01 + <_> + + 0 -1 2724 3.7811998277902603e-02 + + -6.6267997026443481e-02 7.3868799209594727e-01 + <_> + + 0 -1 2725 -1.2319999746978283e-02 + + 4.8696398735046387e-01 -2.4485599994659424e-01 + <_> + + 0 -1 2726 5.8003999292850494e-02 + + 1.3459099829196930e-01 -1.3232100009918213e-01 + <_> + + 0 -1 2727 4.8630000092089176e-03 + + -4.4172900915145874e-01 1.4005599915981293e-01 + <_> + + 0 -1 2728 4.5690998435020447e-02 + + 3.1217999756336212e-02 8.9818298816680908e-01 + <_> + + 0 -1 2729 2.1321000531315804e-02 + + 1.2008000165224075e-02 -8.6066198348999023e-01 + <_> + + 0 -1 2730 1.5679100155830383e-01 + + 1.4055999927222729e-02 8.5332900285720825e-01 + <_> + + 0 -1 2731 -1.0328999720513821e-02 + + 2.9022800922393799e-01 -2.9478800296783447e-01 + <_> + + 0 -1 2732 2.4290001019835472e-03 + + -4.0439900755882263e-01 1.9400200247764587e-01 + <_> + + 0 -1 2733 -2.3338999599218369e-02 + + 3.2945200800895691e-01 -2.5712698698043823e-01 + <_> + + 0 -1 2734 -6.8970001302659512e-03 + + -5.3352999687194824e-01 2.1635200083255768e-01 + <_> + + 0 -1 2735 -3.4403000026941299e-02 + + -1.4425489902496338e+00 -4.4682998210191727e-02 + <_> + + 0 -1 2736 -2.1235000342130661e-02 + + -7.9017502069473267e-01 1.9084100425243378e-01 + <_> + + 0 -1 2737 2.0620001014322042e-03 + + -2.6931199431419373e-01 3.1488001346588135e-01 + <_> + + 0 -1 2738 -4.2190002277493477e-03 + + -5.4464399814605713e-01 1.6574600338935852e-01 + <_> + + 0 -1 2739 -1.4334999956190586e-02 + + 2.2105000913143158e-02 -6.2342500686645508e-01 + <_> + + 0 -1 2740 -8.2120001316070557e-03 + + -4.9884998798370361e-01 1.9237099587917328e-01 + <_> + + 0 -1 2741 -9.3350000679492950e-03 + + -7.9131197929382324e-01 -1.4143999665975571e-02 + <_> + + 0 -1 2742 -3.7937998771667480e-02 + + 7.9841297864913940e-01 -3.3799000084400177e-02 + <_> + + 0 -1 2743 4.7059999778866768e-03 + + -3.3163401484489441e-01 2.0726299285888672e-01 + <_> + + 0 -1 2744 -4.4499998912215233e-03 + + -2.7256301045417786e-01 1.8402199447154999e-01 + <_> + + 0 -1 2745 5.2189999260008335e-03 + + -5.3096002340316772e-01 5.2607998251914978e-02 + <_> + + 0 -1 2746 -9.5399999991059303e-03 + + -5.6485402584075928e-01 1.9269399344921112e-01 + <_> + + 0 -1 2747 4.4969998300075531e-02 + + -1.7411500215530396e-01 9.5382601022720337e-01 + <_> + + 0 -1 2748 1.4209000393748283e-02 + + -9.1949000954627991e-02 2.4836100637912750e-01 + <_> + + 0 -1 2749 1.6380199790000916e-01 + + -5.8497000485658646e-02 -1.6404409408569336e+00 + <_> + + 0 -1 2750 2.5579999200999737e-03 + + 2.3447999358177185e-01 -9.2734001576900482e-02 + <_> + + 0 -1 2751 -3.8499999791383743e-03 + + 1.7880700528621674e-01 -3.5844099521636963e-01 + <_> + + 0 -1 2752 -2.5221999734640121e-02 + + -4.2903000116348267e-01 2.0244500041007996e-01 + <_> + + 0 -1 2753 -1.9415000453591347e-02 + + 5.8016300201416016e-01 -1.8806399405002594e-01 + <_> + + 0 -1 2754 1.4419999904930592e-02 + + 3.2846998423337936e-02 8.1980502605438232e-01 + <_> + + 0 -1 2755 5.1582999527454376e-02 + + 6.9176003336906433e-02 -4.5866298675537109e-01 + <_> + + 0 -1 2756 -3.7960000336170197e-02 + + -1.2553000450134277e+00 1.4332899451255798e-01 + <_> + + 0 -1 2757 -2.9560999944806099e-02 + + 5.3151798248291016e-01 -2.0596499741077423e-01 + <_> + + 0 -1 2758 -3.9110999554395676e-02 + + 1.1658719778060913e+00 5.3897000849246979e-02 + <_> + + 0 -1 2759 -2.9159000143408775e-02 + + 3.9307600259780884e-01 -2.2184500098228455e-01 + <_> + + 0 -1 2760 -8.3617001771926880e-02 + + -7.3744499683380127e-01 1.4268200099468231e-01 + <_> + + 0 -1 2761 4.2004001140594482e-01 + + -1.4277400076389313e-01 1.7894840240478516e+00 + <_> + + 0 -1 2762 6.0005001723766327e-02 + + 1.1976700276136398e-01 -1.8886189460754395e+00 + <_> + + 0 -1 2763 -1.8981000408530235e-02 + + -1.4148449897766113e+00 -5.6522998958826065e-02 + <_> + + 0 -1 2764 -6.0049998573958874e-03 + + 4.4170799851417542e-01 -1.0200800001621246e-01 + <_> + + 0 -1 2765 -5.8214001357555389e-02 + + -1.3918470144271851e+00 -4.8268999904394150e-02 + <_> + + 0 -1 2766 -1.2271000072360039e-02 + + 5.1317697763442993e-01 -9.3696996569633484e-02 + <_> + + 0 -1 2767 4.6585999429225922e-02 + + -5.7484000921249390e-02 -1.4283169507980347e+00 + <_> + + 0 -1 2768 1.2110000243410468e-03 + + -8.0891996622085571e-02 3.2333201169967651e-01 + <_> + + 0 -1 2769 -8.8642001152038574e-02 + + -8.6449098587036133e-01 -3.3146999776363373e-02 + <_> + + 0 -1 2770 -2.3184999823570251e-02 + + 5.2162200212478638e-01 -1.6168000176548958e-02 + <_> + + 0 -1 2771 4.3090000748634338e-02 + + -1.6153800487518311e-01 1.0915000438690186e+00 + <_> + + 0 -1 2772 2.0599999697878957e-04 + + -1.7091499269008636e-01 3.1236699223518372e-01 + <_> + + 0 -1 2773 8.9159999042749405e-03 + + -6.7039998248219490e-03 -6.8810397386550903e-01 + <_> + + 0 -1 2774 -1.7752999439835548e-02 + + 6.3292801380157471e-01 -4.2360001243650913e-03 + <_> + + 0 -1 2775 6.2299999408423901e-03 + + -3.3637198805809021e-01 1.2790599465370178e-01 + <_> + + 0 -1 2776 2.2770000621676445e-02 + + -3.4703999757766724e-02 3.9141800999641418e-01 + <_> + + 0 -1 2777 -2.1534999832510948e-02 + + 6.4765101671218872e-01 -2.0097799599170685e-01 + <_> + + 0 -1 2778 6.1758998781442642e-02 + + 5.4297000169754028e-02 9.0700101852416992e-01 + <_> + + 0 -1 2779 -7.8069999814033508e-02 + + 6.5523397922515869e-01 -1.9754399359226227e-01 + <_> + + 0 -1 2780 1.1315000243484974e-02 + + 1.9385300576686859e-01 -5.1707297563552856e-01 + <_> + + 0 -1 2781 -2.5590000674128532e-02 + + -9.3096500635147095e-01 -3.1546998769044876e-02 + <_> + + 0 -1 2782 -3.8058999925851822e-02 + + -6.8326902389526367e-01 1.2709100544452667e-01 + <_> + + 0 -1 2783 9.7970003262162209e-03 + + 1.5523999929428101e-02 -6.3347899913787842e-01 + <_> + + 0 -1 2784 -1.3841999694705009e-02 + + 1.0060529708862305e+00 6.2812998890876770e-02 + <_> + + 0 -1 2785 8.3459997549653053e-03 + + -2.3383200168609619e-01 3.0982699990272522e-01 + <_> + + 0 -1 2786 -7.1439996361732483e-02 + + -7.2505402565002441e-01 1.7148299515247345e-01 + <_> + + 0 -1 2787 1.0006000287830830e-02 + + -2.2071999311447144e-01 3.5266199707984924e-01 + <_> + + 0 -1 2788 1.1005300283432007e-01 + + 1.6662000119686127e-01 -7.4318999052047729e-01 + <_> + + 0 -1 2789 3.5310998558998108e-02 + + -2.3982700705528259e-01 4.1435998678207397e-01 + <_> + + 0 -1 2790 -1.1174699664115906e-01 + + 5.1045399904251099e-01 2.2319999989122152e-03 + <_> + + 0 -1 2791 -1.1367800086736679e-01 + + 9.0475201606750488e-01 -1.6615299880504608e-01 + <_> + + 0 -1 2792 1.6667999327182770e-02 + + 1.4024500548839569e-01 -5.2178502082824707e-01 + <_> + + 0 -1 2793 -8.0340001732110977e-03 + + -6.6178399324417114e-01 3.7640000227838755e-03 + <_> + + 0 -1 2794 -3.3096998929977417e-02 + + 8.0185902118682861e-01 5.9385001659393311e-02 + <_> + + 0 -1 2795 1.2547999620437622e-02 + + -3.3545500040054321e-01 1.4578600227832794e-01 + <_> + + 0 -1 2796 -4.2073998600244522e-02 + + -5.5509102344512939e-01 1.3266600668430328e-01 + <_> + + 0 -1 2797 2.5221999734640121e-02 + + -6.1631999909877777e-02 -1.3678770065307617e+00 + <_> + + 0 -1 2798 -2.4268999695777893e-02 + + 3.4185099601745605e-01 -7.4160001240670681e-03 + <_> + + 0 -1 2799 -1.2280000373721123e-02 + + 2.7745801210403442e-01 -3.1033900380134583e-01 + <_> + + 0 -1 2800 -1.1377099901437759e-01 + + 1.1719540357589722e+00 8.3681002259254456e-02 + <_> + + 0 -1 2801 -8.4771998226642609e-02 + + 8.1694799661636353e-01 -1.7837500572204590e-01 + <_> + + 0 -1 2802 -2.4552000686526299e-02 + + -1.8627299368381500e-01 1.4340099692344666e-01 + <_> + + 0 -1 2803 -9.0269995853304863e-03 + + 3.2659199833869934e-01 -2.3541299998760223e-01 + <_> + + 0 -1 2804 1.1177999898791313e-02 + + 1.9761200249195099e-01 -2.1701000630855560e-02 + <_> + + 0 -1 2805 -2.9366999864578247e-02 + + -9.3414801359176636e-01 -2.1704999729990959e-02 + <_> + + 0 -1 2806 6.3640000298619270e-03 + + 2.5573000311851501e-02 4.6412798762321472e-01 + <_> + + 0 -1 2807 1.4026000164449215e-02 + + -2.1228599548339844e-01 4.0078800916671753e-01 + <_> + + 0 -1 2808 -1.3341999612748623e-02 + + 7.4202698469161987e-01 2.9001999646425247e-02 + <_> + + 0 -1 2809 2.8422799706459045e-01 + + -1.9243599474430084e-01 4.3631199002265930e-01 + <_> + + 0 -1 2810 -2.3724000155925751e-01 + + 6.9736397266387939e-01 6.9307997822761536e-02 + <_> + + 0 -1 2811 -1.1169700324535370e-01 + + 3.9147201180458069e-01 -2.0922000706195831e-01 + <_> + + 0 -1 2812 1.2787500023841858e-01 + + -7.2555996477603912e-02 3.6088201403617859e-01 + <_> + + 0 -1 2813 -6.2900997698307037e-02 + + 9.5424997806549072e-01 -1.5402799844741821e-01 + <_> + + 0 -1 2814 1.7439000308513641e-02 + + -5.1134999841451645e-02 2.7750301361083984e-01 + <_> + + 0 -1 2815 1.2319999514147639e-03 + + 7.5627997517585754e-02 -3.6456099152565002e-01 + <_> + + 0 -1 2816 2.7495000511407852e-02 + + 5.1844000816345215e-02 4.1562598943710327e-01 + <_> + + 0 -1 2817 -4.3543998152017593e-02 + + 7.1969997882843018e-01 -1.7132200300693512e-01 + <_> + + 0 -1 2818 1.1025999672710896e-02 + + 1.4354600012302399e-01 -6.5403002500534058e-01 + <_> + + 0 -1 2819 2.0865999162197113e-02 + + 4.0089000016450882e-02 -4.5743298530578613e-01 + <_> + + 0 -1 2820 -2.2304000332951546e-02 + + 5.3855001926422119e-01 7.1662999689579010e-02 + <_> + + 0 -1 2821 3.2492000609636307e-02 + + -4.5991998165845871e-02 -1.0047069787979126e+00 + <_> + + 0 -1 2822 1.2269999831914902e-02 + + 3.4334998577833176e-02 4.2431798577308655e-01 + <_> + + 0 -1 2823 8.3820000290870667e-03 + + -2.5850600004196167e-01 2.6263499259948730e-01 + <_> + + 0 -1 2824 3.7353999912738800e-02 + + 1.5692499279975891e-01 -1.0429090261459351e+00 + <_> + + 0 -1 2825 -1.4111000113189220e-02 + + -7.3177701234817505e-01 -2.0276999101042747e-02 + <_> + + 0 -1 2826 5.7066999375820160e-02 + + 8.3360001444816589e-02 1.5661499500274658e+00 + <_> + + 0 -1 2827 4.9680001102387905e-03 + + -3.5318198800086975e-01 1.4698399603366852e-01 + <_> + + 0 -1 2828 -2.4492999538779259e-02 + + 2.8325900435447693e-01 -3.4640000667423010e-03 + <_> + + 0 -1 2829 -1.1254999786615372e-02 + + -8.4017497301101685e-01 -3.6251999437808990e-02 + <_> + + 0 -1 2830 3.4533001482486725e-02 + + 1.4998500049114227e-01 -8.7367099523544312e-01 + <_> + + 0 -1 2831 2.4303000420331955e-02 + + -1.8787500262260437e-01 5.9483999013900757e-01 + <_> + + 0 -1 2832 -7.8790001571178436e-03 + + 4.4315698742866516e-01 -5.6570999324321747e-02 + <_> + + 0 -1 2833 3.5142000764608383e-02 + + -5.6494999676942825e-02 -1.3617190122604370e+00 + <_> + + 0 -1 2834 4.6259998343884945e-03 + + -3.1161698698997498e-01 2.5447699427604675e-01 + <_> + + 0 -1 2835 -8.3131000399589539e-02 + + 1.6424349546432495e+00 -1.4429399371147156e-01 + <_> + + 0 -1 2836 -1.4015999622642994e-02 + + -7.7819502353668213e-01 1.7173300683498383e-01 + <_> + + 0 -1 2837 1.2450000504031777e-03 + + -2.3191399872303009e-01 2.8527900576591492e-01 + <_> + + 0 -1 2838 -1.6803000122308731e-02 + + -3.5965099930763245e-01 2.0412999391555786e-01 + <_> + + 0 -1 2839 -7.6747998595237732e-02 + + 7.8050500154495239e-01 -1.5612800419330597e-01 + <_> + + 0 -1 2840 -2.3671999573707581e-01 + + 1.1813700199127197e+00 7.8111998736858368e-02 + <_> + + 0 -1 2841 -1.0057400166988373e-01 + + -4.7104099392890930e-01 7.9172998666763306e-02 + <_> + + 0 -1 2842 1.3239999534562230e-03 + + 2.2262699902057648e-01 -3.7099799513816833e-01 + <_> + + 0 -1 2843 2.2152999415993690e-02 + + -3.8649000227451324e-02 -9.2274999618530273e-01 + <_> + + 0 -1 2844 -1.1246199905872345e-01 + + 4.1899600625038147e-01 8.0411002039909363e-02 + <_> + + 0 -1 2845 1.6481000930070877e-02 + + -1.6756699979305267e-01 7.1842402219772339e-01 + <_> + + 0 -1 2846 6.8113997578620911e-02 + + 1.5719899535179138e-01 -8.7681102752685547e-01 + <_> + + 0 -1 2847 1.6011999920010567e-02 + + -4.1600000113248825e-03 -5.9327799081802368e-01 + <_> + + 0 -1 2848 4.6640001237392426e-03 + + -3.0153999105095863e-02 4.8345300555229187e-01 + <_> + + 0 -1 2849 6.7579997703433037e-03 + + -2.2667400538921356e-01 3.3662301301956177e-01 + <_> + + 0 -1 2850 4.7289999201893806e-03 + + -6.0373999178409576e-02 3.1458100676536560e-01 + <_> + + 0 -1 2851 2.5869999080896378e-03 + + -2.9872599244117737e-01 1.7787499725818634e-01 + <_> + + 0 -1 2852 2.8989999555051327e-03 + + 2.1890200674533844e-01 -2.9567098617553711e-01 + <_> + + 0 -1 2853 -3.0053999274969101e-02 + + 1.2150429487228394e+00 -1.4354999363422394e-01 + <_> + + 0 -1 2854 1.4181000180542469e-02 + + 1.2451999820768833e-02 5.5490100383758545e-01 + <_> + + 0 -1 2855 -6.0527000576257706e-02 + + -1.4933999776840210e+00 -6.5227001905441284e-02 + <_> + + 0 -1 2856 -1.9882999360561371e-02 + + -3.8526400923728943e-01 1.9761200249195099e-01 + <_> + + 0 -1 2857 3.1218999996781349e-02 + + -2.1281200647354126e-01 2.9446500539779663e-01 + <_> + + 0 -1 2858 1.8271999433636665e-02 + + 9.7200000891461968e-04 6.6814202070236206e-01 + <_> + + 0 -1 2859 1.1089999461546540e-03 + + -6.2467902898788452e-01 -1.6599999507889152e-03 + <_> + + 0 -1 2860 -3.6713998764753342e-02 + + -4.2333900928497314e-01 1.2084700167179108e-01 + <_> + + 0 -1 2861 1.2044000439345837e-02 + + 2.5882000103592873e-02 -5.0732398033142090e-01 + <_> + + 0 -1 2862 7.4749000370502472e-02 + + 1.3184699416160583e-01 -2.1739600598812103e-01 + <_> + + 0 -1 2863 -2.3473200201988220e-01 + + 1.1775610446929932e+00 -1.5114699304103851e-01 + <_> + + 0 -1 2864 1.4096499979496002e-01 + + 3.3991001546382904e-02 3.9923098683357239e-01 + <_> + + 0 -1 2865 6.1789997853338718e-03 + + -3.1806701421737671e-01 1.1681699752807617e-01 + <_> + + 0 -1 2866 -5.7216998189687729e-02 + + 8.4399098157882690e-01 8.3889000117778778e-02 + <_> + + 0 -1 2867 -5.5227000266313553e-02 + + 3.6888301372528076e-01 -1.8913400173187256e-01 + <_> + + 0 -1 2868 -2.1583000198006630e-02 + + -5.2161800861358643e-01 1.5772600471973419e-01 + <_> + + 0 -1 2869 2.5747999548912048e-02 + + -5.9921998530626297e-02 -1.0674990415573120e+00 + <_> + + 0 -1 2870 -1.3098999857902527e-02 + + 7.8958398103713989e-01 5.2099999040365219e-02 + <_> + + 0 -1 2871 2.2799998987466097e-03 + + -1.1704430580139160e+00 -5.9356998652219772e-02 + <_> + + 0 -1 2872 8.8060004636645317e-03 + + 4.1717998683452606e-02 6.6352599859237671e-01 + <_> + + 0 -1 2873 -8.9699998497962952e-03 + + -3.5862699151039124e-01 6.0458000749349594e-02 + <_> + + 0 -1 2874 4.0230001322925091e-03 + + 2.0979399979114532e-01 -2.4806000292301178e-01 + <_> + + 0 -1 2875 2.5017000734806061e-02 + + -1.8795900046825409e-01 3.9547100663185120e-01 + <_> + + 0 -1 2876 -5.9009999968111515e-03 + + 2.5663900375366211e-01 -9.4919003546237946e-02 + <_> + + 0 -1 2877 4.3850000947713852e-03 + + 3.3139001578092575e-02 -4.6075400710105896e-01 + <_> + + 0 -1 2878 -3.3771999180316925e-02 + + -9.8881602287292480e-01 1.4636899530887604e-01 + <_> + + 0 -1 2879 4.4523000717163086e-02 + + -1.3286699354648590e-01 1.5796790122985840e+00 + <_> + + 0 -1 2880 -4.0929000824689865e-02 + + 3.3877098560333252e-01 7.4970997869968414e-02 + <_> + + 0 -1 2881 3.9351999759674072e-02 + + -1.8327899277210236e-01 4.6980699896812439e-01 + <_> + + 0 -1 2882 -7.0322997868061066e-02 + + -9.8322701454162598e-01 1.1808100342750549e-01 + <_> + + 0 -1 2883 3.5743001848459244e-02 + + -3.3050999045372009e-02 -8.3610898256301880e-01 + <_> + + 0 -1 2884 -4.2961999773979187e-02 + + 1.1670809984207153e+00 8.0692000687122345e-02 + <_> + + 0 -1 2885 -2.1007999777793884e-02 + + 6.3869798183441162e-01 -1.7626300454139709e-01 + <_> + + 0 -1 2886 -1.5742200613021851e-01 + + -2.3302499949932098e-01 1.2517499923706055e-01 + <_> + + 0 -1 2887 7.8659998252987862e-03 + + -2.2037999331951141e-01 2.7196800708770752e-01 + <_> + + 0 -1 2888 2.3622000589966774e-02 + + 1.6127300262451172e-01 -4.3329000473022461e-01 + <_> + + 0 -1 2889 7.4692003428936005e-02 + + -1.6991999745368958e-01 5.8884900808334351e-01 + <_> + + 0 -1 2890 -6.4799998654052615e-04 + + 2.5842899084091187e-01 -3.5911999642848969e-02 + <_> + + 0 -1 2891 -1.6290999948978424e-02 + + -7.6764398813247681e-01 -2.0472999662160873e-02 + <_> + + 0 -1 2892 -3.3133998513221741e-02 + + -2.7180099487304688e-01 1.4325700700283051e-01 + <_> + + 0 -1 2893 4.8797998577356339e-02 + + 7.6408997178077698e-02 -4.1445198655128479e-01 + <_> + + 0 -1 2894 2.2869999520480633e-03 + + -3.8628999143838882e-02 2.0753799378871918e-01 + <_> + + 0 -1 2895 4.5304000377655029e-02 + + -1.7777900397777557e-01 6.3461399078369141e-01 + <_> + + 0 -1 2896 1.0705800354480743e-01 + + 1.8972299993038177e-01 -5.1236200332641602e-01 + <_> + + 0 -1 2897 -4.0525000542402267e-02 + + 7.0614999532699585e-01 -1.7803299427032471e-01 + <_> + + 0 -1 2898 3.1968999654054642e-02 + + 6.8149998784065247e-02 6.8733102083206177e-01 + <_> + + 0 -1 2899 -5.7617001235485077e-02 + + 7.5170499086380005e-01 -1.5764999389648438e-01 + <_> + + 0 -1 2900 1.3593999668955803e-02 + + 1.9411900639533997e-01 -2.4561899900436401e-01 + <_> + + 0 -1 2901 7.1396000683307648e-02 + + -4.6881001442670822e-02 -8.8198298215866089e-01 + <_> + + 0 -1 2902 -1.4895999804139137e-02 + + -4.4532400369644165e-01 1.7679899930953979e-01 + <_> + + 0 -1 2903 -1.0026000440120697e-02 + + 6.5122699737548828e-01 -1.6709999740123749e-01 + <_> + + 0 -1 2904 3.7589999847114086e-03 + + -5.8301001787185669e-02 3.4483298659324646e-01 + <_> + + 0 -1 2905 1.6263000667095184e-02 + + -1.5581500530242920e-01 8.6432701349258423e-01 + <_> + + 0 -1 2906 -4.0176000446081161e-02 + + -6.1028599739074707e-01 1.1796399950981140e-01 + <_> + + 0 -1 2907 2.7080999687314034e-02 + + -4.9601998180150986e-02 -8.9990001916885376e-01 + <_> + + 0 -1 2908 5.2420001477003098e-02 + + 1.1297199875116348e-01 -1.0833640098571777e+00 + <_> + + 0 -1 2909 -1.9160000607371330e-02 + + -7.9880100488662720e-01 -3.4079000353813171e-02 + <_> + + 0 -1 2910 -3.7730000913143158e-03 + + -1.9124099612236023e-01 2.1535199880599976e-01 + <_> + + 0 -1 2911 7.5762003660202026e-02 + + -1.3421699404716492e-01 1.6807060241699219e+00 + <_> + + 0 -1 2912 -2.2173000499606133e-02 + + 4.8600998520851135e-01 3.6160000599920750e-03 + + <_> + + <_> + 6 4 12 9 -1. + <_> + 6 7 12 3 3. + <_> + + <_> + 6 4 12 7 -1. + <_> + 10 4 4 7 3. + <_> + + <_> + 3 9 18 9 -1. + <_> + 3 12 18 3 3. + <_> + + <_> + 8 18 9 6 -1. + <_> + 8 20 9 2 3. + <_> + + <_> + 3 5 4 19 -1. + <_> + 5 5 2 19 2. + <_> + + <_> + 6 5 12 16 -1. + <_> + 6 13 12 8 2. + <_> + + <_> + 5 8 12 6 -1. + <_> + 5 11 12 3 2. + <_> + + <_> + 11 14 4 10 -1. + <_> + 11 19 4 5 2. + <_> + + <_> + 4 0 7 6 -1. + <_> + 4 3 7 3 2. + <_> + + <_> + 6 6 12 6 -1. + <_> + 6 8 12 2 3. + <_> + + <_> + 6 4 12 7 -1. + <_> + 10 4 4 7 3. + <_> + + <_> + 1 8 19 12 -1. + <_> + 1 12 19 4 3. + <_> + + <_> + 0 2 24 3 -1. + <_> + 8 2 8 3 3. + <_> + + <_> + 9 9 6 15 -1. + <_> + 9 14 6 5 3. + <_> + + <_> + 5 6 14 10 -1. + <_> + 5 11 14 5 2. + <_> + + <_> + 5 0 14 9 -1. + <_> + 5 3 14 3 3. + <_> + + <_> + 13 11 9 6 -1. + <_> + 16 11 3 6 3. + <_> + + <_> + 7 5 6 10 -1. + <_> + 9 5 2 10 3. + <_> + + <_> + 10 8 6 10 -1. + <_> + 12 8 2 10 3. + <_> + + <_> + 2 5 4 9 -1. + <_> + 4 5 2 9 2. + <_> + + <_> + 18 0 6 11 -1. + <_> + 20 0 2 11 3. + <_> + + <_> + 0 6 24 13 -1. + <_> + 8 6 8 13 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 7 18 10 6 -1. + <_> + 7 20 10 2 3. + <_> + + <_> + 5 7 14 12 -1. + <_> + 5 13 14 6 2. + <_> + + <_> + 0 3 24 3 -1. + <_> + 8 3 8 3 3. + <_> + + <_> + 5 8 15 6 -1. + <_> + 5 11 15 3 2. + <_> + + <_> + 9 6 5 14 -1. + <_> + 9 13 5 7 2. + <_> + + <_> + 9 5 6 10 -1. + <_> + 11 5 2 10 3. + <_> + + <_> + 6 6 3 12 -1. + <_> + 6 12 3 6 2. + <_> + + <_> + 3 21 18 3 -1. + <_> + 9 21 6 3 3. + <_> + + <_> + 5 6 13 6 -1. + <_> + 5 8 13 2 3. + <_> + + <_> + 18 1 6 15 -1. + <_> + 18 1 3 15 2. + <_> + + <_> + 1 1 6 15 -1. + <_> + 4 1 3 15 2. + <_> + + <_> + 0 8 24 15 -1. + <_> + 8 8 8 15 3. + <_> + + <_> + 5 6 14 12 -1. + <_> + 5 6 7 6 2. + <_> + 12 12 7 6 2. + <_> + + <_> + 2 12 21 12 -1. + <_> + 2 16 21 4 3. + <_> + + <_> + 8 1 4 10 -1. + <_> + 10 1 2 10 2. + <_> + + <_> + 2 13 20 10 -1. + <_> + 2 13 10 10 2. + <_> + + <_> + 0 1 6 13 -1. + <_> + 2 1 2 13 3. + <_> + + <_> + 20 2 4 13 -1. + <_> + 20 2 2 13 2. + <_> + + <_> + 0 5 22 19 -1. + <_> + 11 5 11 19 2. + <_> + + <_> + 18 4 6 9 -1. + <_> + 20 4 2 9 3. + <_> + + <_> + 0 3 6 11 -1. + <_> + 2 3 2 11 3. + <_> + + <_> + 12 1 4 9 -1. + <_> + 12 1 2 9 2. + <_> + + <_> + 0 6 19 3 -1. + <_> + 0 7 19 1 3. + <_> + + <_> + 12 1 4 9 -1. + <_> + 12 1 2 9 2. + <_> + + <_> + 8 1 4 9 -1. + <_> + 10 1 2 9 2. + <_> + + <_> + 5 5 14 14 -1. + <_> + 12 5 7 7 2. + <_> + 5 12 7 7 2. + <_> + + <_> + 1 10 18 2 -1. + <_> + 1 11 18 1 2. + <_> + + <_> + 17 13 4 11 -1. + <_> + 17 13 2 11 2. + <_> + + <_> + 0 4 6 9 -1. + <_> + 0 7 6 3 3. + <_> + + <_> + 6 4 12 9 -1. + <_> + 6 7 12 3 3. + <_> + + <_> + 6 5 12 6 -1. + <_> + 10 5 4 6 3. + <_> + + <_> + 0 1 24 5 -1. + <_> + 8 1 8 5 3. + <_> + + <_> + 4 10 18 6 -1. + <_> + 4 12 18 2 3. + <_> + + <_> + 2 17 12 6 -1. + <_> + 2 17 6 3 2. + <_> + 8 20 6 3 2. + <_> + + <_> + 19 3 4 13 -1. + <_> + 19 3 2 13 2. + <_> + + <_> + 1 3 4 13 -1. + <_> + 3 3 2 13 2. + <_> + + <_> + 0 1 24 23 -1. + <_> + 8 1 8 23 3. + <_> + + <_> + 1 7 8 12 -1. + <_> + 1 11 8 4 3. + <_> + + <_> + 14 7 3 14 -1. + <_> + 14 14 3 7 2. + <_> + + <_> + 3 12 16 6 -1. + <_> + 3 12 8 3 2. + <_> + 11 15 8 3 2. + <_> + + <_> + 6 6 12 6 -1. + <_> + 6 8 12 2 3. + <_> + + <_> + 8 7 6 12 -1. + <_> + 8 13 6 6 2. + <_> + + <_> + 15 15 9 6 -1. + <_> + 15 17 9 2 3. + <_> + + <_> + 1 17 18 3 -1. + <_> + 1 18 18 1 3. + <_> + + <_> + 4 4 16 12 -1. + <_> + 4 10 16 6 2. + <_> + + <_> + 0 1 4 20 -1. + <_> + 2 1 2 20 2. + <_> + + <_> + 3 0 18 2 -1. + <_> + 3 1 18 1 2. + <_> + + <_> + 1 5 20 14 -1. + <_> + 1 5 10 7 2. + <_> + 11 12 10 7 2. + <_> + + <_> + 5 8 14 12 -1. + <_> + 5 12 14 4 3. + <_> + + <_> + 3 14 7 9 -1. + <_> + 3 17 7 3 3. + <_> + + <_> + 14 15 9 6 -1. + <_> + 14 17 9 2 3. + <_> + + <_> + 1 15 9 6 -1. + <_> + 1 17 9 2 3. + <_> + + <_> + 11 6 8 10 -1. + <_> + 15 6 4 5 2. + <_> + 11 11 4 5 2. + <_> + + <_> + 5 5 14 14 -1. + <_> + 5 5 7 7 2. + <_> + 12 12 7 7 2. + <_> + + <_> + 6 0 12 5 -1. + <_> + 10 0 4 5 3. + <_> + + <_> + 9 0 6 9 -1. + <_> + 9 3 6 3 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 8 6 6 9 -1. + <_> + 10 6 2 9 3. + <_> + + <_> + 3 8 18 4 -1. + <_> + 9 8 6 4 3. + <_> + + <_> + 6 0 12 9 -1. + <_> + 6 3 12 3 3. + <_> + + <_> + 0 0 24 6 -1. + <_> + 8 0 8 6 3. + <_> + + <_> + 4 7 16 12 -1. + <_> + 4 11 16 4 3. + <_> + + <_> + 11 6 6 6 -1. + <_> + 11 6 3 6 2. + <_> + + <_> + 0 20 24 3 -1. + <_> + 8 20 8 3 3. + <_> + + <_> + 11 6 4 9 -1. + <_> + 11 6 2 9 2. + <_> + + <_> + 4 13 15 4 -1. + <_> + 9 13 5 4 3. + <_> + + <_> + 11 6 4 9 -1. + <_> + 11 6 2 9 2. + <_> + + <_> + 9 6 4 9 -1. + <_> + 11 6 2 9 2. + <_> + + <_> + 9 12 6 12 -1. + <_> + 9 18 6 6 2. + <_> + + <_> + 1 22 18 2 -1. + <_> + 1 23 18 1 2. + <_> + + <_> + 10 7 4 10 -1. + <_> + 10 12 4 5 2. + <_> + + <_> + 6 7 8 10 -1. + <_> + 6 12 8 5 2. + <_> + + <_> + 7 6 10 6 -1. + <_> + 7 8 10 2 3. + <_> + + <_> + 0 14 10 4 -1. + <_> + 0 16 10 2 2. + <_> + + <_> + 6 18 18 2 -1. + <_> + 6 19 18 1 2. + <_> + + <_> + 1 1 22 3 -1. + <_> + 1 2 22 1 3. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 2 4 6 15 -1. + <_> + 5 4 3 15 2. + <_> + + <_> + 20 4 4 10 -1. + <_> + 20 4 2 10 2. + <_> + + <_> + 0 4 4 10 -1. + <_> + 2 4 2 10 2. + <_> + + <_> + 2 16 20 6 -1. + <_> + 12 16 10 3 2. + <_> + 2 19 10 3 2. + <_> + + <_> + 0 12 8 9 -1. + <_> + 4 12 4 9 2. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 5 10 6 6 -1. + <_> + 8 10 3 6 2. + <_> + + <_> + 11 8 12 6 -1. + <_> + 17 8 6 3 2. + <_> + 11 11 6 3 2. + <_> + + <_> + 0 8 12 6 -1. + <_> + 0 8 6 3 2. + <_> + 6 11 6 3 2. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 8 14 9 6 -1. + <_> + 8 16 9 2 3. + <_> + + <_> + 0 16 9 6 -1. + <_> + 0 18 9 2 3. + <_> + + <_> + 10 8 6 10 -1. + <_> + 12 8 2 10 3. + <_> + + <_> + 3 19 12 3 -1. + <_> + 9 19 6 3 2. + <_> + + <_> + 2 10 20 2 -1. + <_> + 2 11 20 1 2. + <_> + + <_> + 2 9 18 12 -1. + <_> + 2 9 9 6 2. + <_> + 11 15 9 6 2. + <_> + + <_> + 3 0 18 24 -1. + <_> + 3 0 9 24 2. + <_> + + <_> + 5 6 14 10 -1. + <_> + 5 6 7 5 2. + <_> + 12 11 7 5 2. + <_> + + <_> + 9 5 10 12 -1. + <_> + 14 5 5 6 2. + <_> + 9 11 5 6 2. + <_> + + <_> + 4 5 12 12 -1. + <_> + 4 5 6 6 2. + <_> + 10 11 6 6 2. + <_> + + <_> + 4 14 18 3 -1. + <_> + 4 15 18 1 3. + <_> + + <_> + 6 13 8 8 -1. + <_> + 6 17 8 4 2. + <_> + + <_> + 3 16 18 6 -1. + <_> + 3 19 18 3 2. + <_> + + <_> + 0 0 6 6 -1. + <_> + 3 0 3 6 2. + <_> + + <_> + 6 6 12 18 -1. + <_> + 10 6 4 18 3. + <_> + + <_> + 6 1 4 14 -1. + <_> + 8 1 2 14 2. + <_> + + <_> + 3 2 19 2 -1. + <_> + 3 3 19 1 2. + <_> + + <_> + 1 8 22 13 -1. + <_> + 12 8 11 13 2. + <_> + + <_> + 8 9 11 4 -1. + <_> + 8 11 11 2 2. + <_> + + <_> + 0 12 15 10 -1. + <_> + 5 12 5 10 3. + <_> + + <_> + 12 16 12 6 -1. + <_> + 16 16 4 6 3. + <_> + + <_> + 0 16 12 6 -1. + <_> + 4 16 4 6 3. + <_> + + <_> + 19 1 5 12 -1. + <_> + 19 5 5 4 3. + <_> + + <_> + 0 2 24 4 -1. + <_> + 8 2 8 4 3. + <_> + + <_> + 6 8 12 4 -1. + <_> + 6 10 12 2 2. + <_> + + <_> + 7 5 9 6 -1. + <_> + 10 5 3 6 3. + <_> + + <_> + 9 17 6 6 -1. + <_> + 9 20 6 3 2. + <_> + + <_> + 0 7 22 15 -1. + <_> + 0 12 22 5 3. + <_> + + <_> + 4 1 17 9 -1. + <_> + 4 4 17 3 3. + <_> + + <_> + 7 5 6 10 -1. + <_> + 9 5 2 10 3. + <_> + + <_> + 18 1 6 8 -1. + <_> + 18 1 3 8 2. + <_> + + <_> + 0 1 6 7 -1. + <_> + 3 1 3 7 2. + <_> + + <_> + 18 0 6 22 -1. + <_> + 18 0 3 22 2. + <_> + + <_> + 0 0 6 22 -1. + <_> + 3 0 3 22 2. + <_> + + <_> + 16 7 8 16 -1. + <_> + 16 7 4 16 2. + <_> + + <_> + 2 10 19 6 -1. + <_> + 2 12 19 2 3. + <_> + + <_> + 9 9 6 12 -1. + <_> + 9 13 6 4 3. + <_> + + <_> + 2 15 17 6 -1. + <_> + 2 17 17 2 3. + <_> + + <_> + 14 7 3 14 -1. + <_> + 14 14 3 7 2. + <_> + + <_> + 5 6 8 10 -1. + <_> + 5 6 4 5 2. + <_> + 9 11 4 5 2. + <_> + + <_> + 15 8 9 11 -1. + <_> + 18 8 3 11 3. + <_> + + <_> + 0 8 9 11 -1. + <_> + 3 8 3 11 3. + <_> + + <_> + 8 6 10 18 -1. + <_> + 8 15 10 9 2. + <_> + + <_> + 7 7 3 14 -1. + <_> + 7 14 3 7 2. + <_> + + <_> + 0 14 24 8 -1. + <_> + 8 14 8 8 3. + <_> + + <_> + 1 10 18 14 -1. + <_> + 10 10 9 14 2. + <_> + + <_> + 14 12 6 6 -1. + <_> + 14 15 6 3 2. + <_> + + <_> + 7 0 10 16 -1. + <_> + 7 0 5 8 2. + <_> + 12 8 5 8 2. + <_> + + <_> + 10 0 9 6 -1. + <_> + 13 0 3 6 3. + <_> + + <_> + 4 3 16 4 -1. + <_> + 12 3 8 4 2. + <_> + + <_> + 10 0 9 6 -1. + <_> + 13 0 3 6 3. + <_> + + <_> + 1 1 20 4 -1. + <_> + 1 1 10 2 2. + <_> + 11 3 10 2 2. + <_> + + <_> + 10 0 9 6 -1. + <_> + 13 0 3 6 3. + <_> + + <_> + 5 0 9 6 -1. + <_> + 8 0 3 6 3. + <_> + + <_> + 8 18 10 6 -1. + <_> + 8 20 10 2 3. + <_> + + <_> + 6 3 6 9 -1. + <_> + 8 3 2 9 3. + <_> + + <_> + 7 3 12 6 -1. + <_> + 7 5 12 2 3. + <_> + + <_> + 0 10 18 3 -1. + <_> + 0 11 18 1 3. + <_> + + <_> + 1 10 22 3 -1. + <_> + 1 11 22 1 3. + <_> + + <_> + 5 11 8 8 -1. + <_> + 9 11 4 8 2. + <_> + + <_> + 12 11 6 6 -1. + <_> + 12 11 3 6 2. + <_> + + <_> + 6 11 6 6 -1. + <_> + 9 11 3 6 2. + <_> + + <_> + 7 10 11 6 -1. + <_> + 7 12 11 2 3. + <_> + + <_> + 0 13 24 4 -1. + <_> + 0 13 12 2 2. + <_> + 12 15 12 2 2. + <_> + + <_> + 2 4 22 12 -1. + <_> + 13 4 11 6 2. + <_> + 2 10 11 6 2. + <_> + + <_> + 2 0 20 17 -1. + <_> + 12 0 10 17 2. + <_> + + <_> + 14 0 2 24 -1. + <_> + 14 0 1 24 2. + <_> + + <_> + 8 0 2 24 -1. + <_> + 9 0 1 24 2. + <_> + + <_> + 14 1 2 22 -1. + <_> + 14 1 1 22 2. + <_> + + <_> + 8 1 2 22 -1. + <_> + 9 1 1 22 2. + <_> + + <_> + 17 6 3 18 -1. + <_> + 18 6 1 18 3. + <_> + + <_> + 6 14 9 6 -1. + <_> + 6 16 9 2 3. + <_> + + <_> + 13 14 9 4 -1. + <_> + 13 16 9 2 2. + <_> + + <_> + 3 18 18 3 -1. + <_> + 3 19 18 1 3. + <_> + + <_> + 9 4 8 18 -1. + <_> + 13 4 4 9 2. + <_> + 9 13 4 9 2. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 0 2 12 4 -1. + <_> + 6 2 6 4 2. + <_> + + <_> + 6 8 14 6 -1. + <_> + 6 11 14 3 2. + <_> + + <_> + 7 5 6 6 -1. + <_> + 10 5 3 6 2. + <_> + + <_> + 10 5 6 16 -1. + <_> + 10 13 6 8 2. + <_> + + <_> + 1 4 9 16 -1. + <_> + 4 4 3 16 3. + <_> + + <_> + 5 0 18 9 -1. + <_> + 5 3 18 3 3. + <_> + + <_> + 9 15 5 8 -1. + <_> + 9 19 5 4 2. + <_> + + <_> + 20 0 4 9 -1. + <_> + 20 0 2 9 2. + <_> + + <_> + 2 0 18 3 -1. + <_> + 2 1 18 1 3. + <_> + + <_> + 5 22 19 2 -1. + <_> + 5 23 19 1 2. + <_> + + <_> + 0 0 4 9 -1. + <_> + 2 0 2 9 2. + <_> + + <_> + 5 6 19 18 -1. + <_> + 5 12 19 6 3. + <_> + + <_> + 0 1 6 9 -1. + <_> + 2 1 2 9 3. + <_> + + <_> + 6 5 14 12 -1. + <_> + 13 5 7 6 2. + <_> + 6 11 7 6 2. + <_> + + <_> + 0 1 20 2 -1. + <_> + 0 2 20 1 2. + <_> + + <_> + 1 2 22 3 -1. + <_> + 1 3 22 1 3. + <_> + + <_> + 2 8 7 9 -1. + <_> + 2 11 7 3 3. + <_> + + <_> + 2 12 22 4 -1. + <_> + 13 12 11 2 2. + <_> + 2 14 11 2 2. + <_> + + <_> + 0 12 22 4 -1. + <_> + 0 12 11 2 2. + <_> + 11 14 11 2 2. + <_> + + <_> + 9 7 6 11 -1. + <_> + 11 7 2 11 3. + <_> + + <_> + 7 1 9 6 -1. + <_> + 10 1 3 6 3. + <_> + + <_> + 11 2 4 10 -1. + <_> + 11 7 4 5 2. + <_> + + <_> + 6 4 12 12 -1. + <_> + 6 10 12 6 2. + <_> + + <_> + 18 1 6 15 -1. + <_> + 18 6 6 5 3. + <_> + + <_> + 3 15 18 3 -1. + <_> + 3 16 18 1 3. + <_> + + <_> + 18 5 6 9 -1. + <_> + 18 8 6 3 3. + <_> + + <_> + 1 5 16 6 -1. + <_> + 1 5 8 3 2. + <_> + 9 8 8 3 2. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 0 4 24 14 -1. + <_> + 0 4 12 7 2. + <_> + 12 11 12 7 2. + <_> + + <_> + 13 0 4 13 -1. + <_> + 13 0 2 13 2. + <_> + + <_> + 7 0 4 13 -1. + <_> + 9 0 2 13 2. + <_> + + <_> + 11 6 6 9 -1. + <_> + 13 6 2 9 3. + <_> + + <_> + 8 7 6 9 -1. + <_> + 10 7 2 9 3. + <_> + + <_> + 13 17 9 6 -1. + <_> + 13 19 9 2 3. + <_> + + <_> + 2 18 14 6 -1. + <_> + 2 18 7 3 2. + <_> + 9 21 7 3 2. + <_> + + <_> + 3 18 18 4 -1. + <_> + 12 18 9 2 2. + <_> + 3 20 9 2 2. + <_> + + <_> + 0 20 15 4 -1. + <_> + 5 20 5 4 3. + <_> + + <_> + 9 15 15 9 -1. + <_> + 14 15 5 9 3. + <_> + + <_> + 4 4 16 4 -1. + <_> + 4 6 16 2 2. + <_> + + <_> + 7 6 10 6 -1. + <_> + 7 8 10 2 3. + <_> + + <_> + 0 14 15 10 -1. + <_> + 5 14 5 10 3. + <_> + + <_> + 7 9 10 14 -1. + <_> + 12 9 5 7 2. + <_> + 7 16 5 7 2. + <_> + + <_> + 7 6 6 9 -1. + <_> + 9 6 2 9 3. + <_> + + <_> + 3 6 18 3 -1. + <_> + 3 7 18 1 3. + <_> + + <_> + 0 10 18 3 -1. + <_> + 0 11 18 1 3. + <_> + + <_> + 3 16 18 4 -1. + <_> + 12 16 9 2 2. + <_> + 3 18 9 2 2. + <_> + + <_> + 4 6 14 6 -1. + <_> + 4 6 7 3 2. + <_> + 11 9 7 3 2. + <_> + + <_> + 13 0 2 18 -1. + <_> + 13 0 1 18 2. + <_> + + <_> + 9 0 2 18 -1. + <_> + 10 0 1 18 2. + <_> + + <_> + 5 7 15 10 -1. + <_> + 10 7 5 10 3. + <_> + + <_> + 1 20 21 4 -1. + <_> + 8 20 7 4 3. + <_> + + <_> + 10 5 5 18 -1. + <_> + 10 14 5 9 2. + <_> + + <_> + 0 2 24 6 -1. + <_> + 0 2 12 3 2. + <_> + 12 5 12 3 2. + <_> + + <_> + 1 1 22 8 -1. + <_> + 12 1 11 4 2. + <_> + 1 5 11 4 2. + <_> + + <_> + 4 0 15 9 -1. + <_> + 4 3 15 3 3. + <_> + + <_> + 0 0 24 19 -1. + <_> + 8 0 8 19 3. + <_> + + <_> + 2 21 18 3 -1. + <_> + 11 21 9 3 2. + <_> + + <_> + 9 7 10 4 -1. + <_> + 9 7 5 4 2. + <_> + + <_> + 5 7 10 4 -1. + <_> + 10 7 5 4 2. + <_> + + <_> + 17 8 6 16 -1. + <_> + 20 8 3 8 2. + <_> + 17 16 3 8 2. + <_> + + <_> + 1 15 20 4 -1. + <_> + 1 15 10 2 2. + <_> + 11 17 10 2 2. + <_> + + <_> + 14 15 10 6 -1. + <_> + 14 17 10 2 3. + <_> + + <_> + 3 0 16 9 -1. + <_> + 3 3 16 3 3. + <_> + + <_> + 15 6 7 15 -1. + <_> + 15 11 7 5 3. + <_> + + <_> + 9 1 6 13 -1. + <_> + 11 1 2 13 3. + <_> + + <_> + 17 2 6 14 -1. + <_> + 17 2 3 14 2. + <_> + + <_> + 3 14 12 10 -1. + <_> + 3 14 6 5 2. + <_> + 9 19 6 5 2. + <_> + + <_> + 7 6 10 6 -1. + <_> + 7 8 10 2 3. + <_> + + <_> + 1 2 6 14 -1. + <_> + 4 2 3 14 2. + <_> + + <_> + 10 4 5 12 -1. + <_> + 10 8 5 4 3. + <_> + + <_> + 0 17 24 5 -1. + <_> + 8 17 8 5 3. + <_> + + <_> + 15 7 5 12 -1. + <_> + 15 11 5 4 3. + <_> + + <_> + 3 1 6 12 -1. + <_> + 3 1 3 6 2. + <_> + 6 7 3 6 2. + <_> + + <_> + 12 13 6 6 -1. + <_> + 12 16 6 3 2. + <_> + + <_> + 6 13 6 6 -1. + <_> + 6 16 6 3 2. + <_> + + <_> + 14 6 3 16 -1. + <_> + 14 14 3 8 2. + <_> + + <_> + 1 12 13 6 -1. + <_> + 1 14 13 2 3. + <_> + + <_> + 13 1 4 9 -1. + <_> + 13 1 2 9 2. + <_> + + <_> + 7 0 9 6 -1. + <_> + 10 0 3 6 3. + <_> + + <_> + 12 2 6 9 -1. + <_> + 12 2 3 9 2. + <_> + + <_> + 6 2 6 9 -1. + <_> + 9 2 3 9 2. + <_> + + <_> + 6 18 12 6 -1. + <_> + 6 20 12 2 3. + <_> + + <_> + 7 6 6 9 -1. + <_> + 9 6 2 9 3. + <_> + + <_> + 7 7 12 3 -1. + <_> + 7 7 6 3 2. + <_> + + <_> + 8 3 8 21 -1. + <_> + 8 10 8 7 3. + <_> + + <_> + 7 4 10 12 -1. + <_> + 7 8 10 4 3. + <_> + + <_> + 0 1 6 9 -1. + <_> + 0 4 6 3 3. + <_> + + <_> + 15 2 2 20 -1. + <_> + 15 2 1 20 2. + <_> + + <_> + 0 3 6 9 -1. + <_> + 0 6 6 3 3. + <_> + + <_> + 15 3 2 21 -1. + <_> + 15 3 1 21 2. + <_> + + <_> + 7 0 2 23 -1. + <_> + 8 0 1 23 2. + <_> + + <_> + 15 8 9 4 -1. + <_> + 15 10 9 2 2. + <_> + + <_> + 0 8 9 4 -1. + <_> + 0 10 9 2 2. + <_> + + <_> + 8 14 9 6 -1. + <_> + 8 16 9 2 3. + <_> + + <_> + 0 14 9 6 -1. + <_> + 0 16 9 2 3. + <_> + + <_> + 3 10 18 4 -1. + <_> + 9 10 6 4 3. + <_> + + <_> + 0 0 24 19 -1. + <_> + 8 0 8 19 3. + <_> + + <_> + 9 1 8 12 -1. + <_> + 9 7 8 6 2. + <_> + + <_> + 10 6 4 10 -1. + <_> + 12 6 2 10 2. + <_> + + <_> + 7 9 10 12 -1. + <_> + 12 9 5 6 2. + <_> + 7 15 5 6 2. + <_> + + <_> + 5 0 3 19 -1. + <_> + 6 0 1 19 3. + <_> + + <_> + 14 0 6 10 -1. + <_> + 16 0 2 10 3. + <_> + + <_> + 2 0 6 12 -1. + <_> + 2 0 3 6 2. + <_> + 5 6 3 6 2. + <_> + + <_> + 0 11 24 2 -1. + <_> + 0 12 24 1 2. + <_> + + <_> + 4 9 13 4 -1. + <_> + 4 11 13 2 2. + <_> + + <_> + 9 8 6 9 -1. + <_> + 9 11 6 3 3. + <_> + + <_> + 0 12 16 4 -1. + <_> + 0 14 16 2 2. + <_> + + <_> + 18 12 6 9 -1. + <_> + 18 15 6 3 3. + <_> + + <_> + 0 12 6 9 -1. + <_> + 0 15 6 3 3. + <_> + + <_> + 8 7 10 4 -1. + <_> + 8 7 5 4 2. + <_> + + <_> + 8 7 6 9 -1. + <_> + 10 7 2 9 3. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 12 3 6 15 -1. + <_> + 14 3 2 15 3. + <_> + + <_> + 6 3 6 15 -1. + <_> + 8 3 2 15 3. + <_> + + <_> + 15 2 9 4 -1. + <_> + 15 4 9 2 2. + <_> + + <_> + 5 10 6 7 -1. + <_> + 8 10 3 7 2. + <_> + + <_> + 9 14 6 10 -1. + <_> + 9 19 6 5 2. + <_> + + <_> + 7 13 5 8 -1. + <_> + 7 17 5 4 2. + <_> + + <_> + 14 5 3 16 -1. + <_> + 14 13 3 8 2. + <_> + + <_> + 2 17 18 3 -1. + <_> + 2 18 18 1 3. + <_> + + <_> + 5 18 19 3 -1. + <_> + 5 19 19 1 3. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 12 4 3 18 -1. + <_> + 13 4 1 18 3. + <_> + + <_> + 9 4 3 18 -1. + <_> + 10 4 1 18 3. + <_> + + <_> + 3 3 18 9 -1. + <_> + 9 3 6 9 3. + <_> + + <_> + 6 1 6 14 -1. + <_> + 8 1 2 14 3. + <_> + + <_> + 12 16 9 6 -1. + <_> + 12 19 9 3 2. + <_> + + <_> + 1 3 20 16 -1. + <_> + 1 3 10 8 2. + <_> + 11 11 10 8 2. + <_> + + <_> + 12 5 6 12 -1. + <_> + 15 5 3 6 2. + <_> + 12 11 3 6 2. + <_> + + <_> + 1 2 22 16 -1. + <_> + 1 2 11 8 2. + <_> + 12 10 11 8 2. + <_> + + <_> + 10 14 5 10 -1. + <_> + 10 19 5 5 2. + <_> + + <_> + 3 21 18 3 -1. + <_> + 3 22 18 1 3. + <_> + + <_> + 10 14 6 10 -1. + <_> + 12 14 2 10 3. + <_> + + <_> + 0 2 24 4 -1. + <_> + 8 2 8 4 3. + <_> + + <_> + 6 4 12 9 -1. + <_> + 6 7 12 3 3. + <_> + + <_> + 6 6 12 5 -1. + <_> + 10 6 4 5 3. + <_> + + <_> + 5 8 14 12 -1. + <_> + 5 12 14 4 3. + <_> + + <_> + 4 14 8 10 -1. + <_> + 4 14 4 5 2. + <_> + 8 19 4 5 2. + <_> + + <_> + 11 6 5 14 -1. + <_> + 11 13 5 7 2. + <_> + + <_> + 7 6 3 16 -1. + <_> + 7 14 3 8 2. + <_> + + <_> + 3 7 18 8 -1. + <_> + 9 7 6 8 3. + <_> + + <_> + 2 3 20 2 -1. + <_> + 2 4 20 1 2. + <_> + + <_> + 3 12 19 6 -1. + <_> + 3 14 19 2 3. + <_> + + <_> + 8 6 6 9 -1. + <_> + 10 6 2 9 3. + <_> + + <_> + 16 6 6 14 -1. + <_> + 16 6 3 14 2. + <_> + + <_> + 7 9 6 12 -1. + <_> + 9 9 2 12 3. + <_> + + <_> + 18 6 6 18 -1. + <_> + 21 6 3 9 2. + <_> + 18 15 3 9 2. + <_> + + <_> + 0 6 6 18 -1. + <_> + 0 6 3 9 2. + <_> + 3 15 3 9 2. + <_> + + <_> + 18 2 6 9 -1. + <_> + 18 5 6 3 3. + <_> + + <_> + 3 18 15 6 -1. + <_> + 3 20 15 2 3. + <_> + + <_> + 18 2 6 9 -1. + <_> + 18 5 6 3 3. + <_> + + <_> + 0 2 6 9 -1. + <_> + 0 5 6 3 3. + <_> + + <_> + 5 10 18 2 -1. + <_> + 5 11 18 1 2. + <_> + + <_> + 6 0 12 6 -1. + <_> + 6 2 12 2 3. + <_> + + <_> + 10 0 6 9 -1. + <_> + 12 0 2 9 3. + <_> + + <_> + 8 0 6 9 -1. + <_> + 10 0 2 9 3. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 3 6 13 6 -1. + <_> + 3 8 13 2 3. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 2 5 6 15 -1. + <_> + 5 5 3 15 2. + <_> + + <_> + 8 8 9 6 -1. + <_> + 11 8 3 6 3. + <_> + + <_> + 8 6 3 14 -1. + <_> + 8 13 3 7 2. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 4 12 10 4 -1. + <_> + 9 12 5 4 2. + <_> + + <_> + 13 1 4 19 -1. + <_> + 13 1 2 19 2. + <_> + + <_> + 7 1 4 19 -1. + <_> + 9 1 2 19 2. + <_> + + <_> + 18 9 6 9 -1. + <_> + 18 12 6 3 3. + <_> + + <_> + 1 21 18 3 -1. + <_> + 1 22 18 1 3. + <_> + + <_> + 14 13 10 9 -1. + <_> + 14 16 10 3 3. + <_> + + <_> + 1 13 22 4 -1. + <_> + 1 13 11 2 2. + <_> + 12 15 11 2 2. + <_> + + <_> + 4 6 16 6 -1. + <_> + 12 6 8 3 2. + <_> + 4 9 8 3 2. + <_> + + <_> + 1 0 18 22 -1. + <_> + 1 0 9 11 2. + <_> + 10 11 9 11 2. + <_> + + <_> + 10 7 8 14 -1. + <_> + 14 7 4 7 2. + <_> + 10 14 4 7 2. + <_> + + <_> + 0 4 6 20 -1. + <_> + 0 4 3 10 2. + <_> + 3 14 3 10 2. + <_> + + <_> + 15 0 6 9 -1. + <_> + 17 0 2 9 3. + <_> + + <_> + 3 0 6 9 -1. + <_> + 5 0 2 9 3. + <_> + + <_> + 15 12 6 12 -1. + <_> + 18 12 3 6 2. + <_> + 15 18 3 6 2. + <_> + + <_> + 3 12 6 12 -1. + <_> + 3 12 3 6 2. + <_> + 6 18 3 6 2. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 0 12 9 6 -1. + <_> + 0 14 9 2 3. + <_> + + <_> + 4 14 19 3 -1. + <_> + 4 15 19 1 3. + <_> + + <_> + 2 13 19 3 -1. + <_> + 2 14 19 1 3. + <_> + + <_> + 14 15 10 6 -1. + <_> + 14 17 10 2 3. + <_> + + <_> + 6 0 10 12 -1. + <_> + 6 0 5 6 2. + <_> + 11 6 5 6 2. + <_> + + <_> + 17 1 6 12 -1. + <_> + 20 1 3 6 2. + <_> + 17 7 3 6 2. + <_> + + <_> + 1 1 6 12 -1. + <_> + 1 1 3 6 2. + <_> + 4 7 3 6 2. + <_> + + <_> + 16 14 6 9 -1. + <_> + 16 17 6 3 3. + <_> + + <_> + 7 3 9 12 -1. + <_> + 7 9 9 6 2. + <_> + + <_> + 12 1 4 12 -1. + <_> + 12 7 4 6 2. + <_> + + <_> + 4 0 14 8 -1. + <_> + 4 4 14 4 2. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 2 10 18 3 -1. + <_> + 8 10 6 3 3. + <_> + + <_> + 15 15 9 6 -1. + <_> + 15 17 9 2 3. + <_> + + <_> + 0 1 21 23 -1. + <_> + 7 1 7 23 3. + <_> + + <_> + 6 9 17 4 -1. + <_> + 6 11 17 2 2. + <_> + + <_> + 1 0 11 18 -1. + <_> + 1 6 11 6 3. + <_> + + <_> + 6 15 13 6 -1. + <_> + 6 17 13 2 3. + <_> + + <_> + 0 15 9 6 -1. + <_> + 0 17 9 2 3. + <_> + + <_> + 8 7 15 4 -1. + <_> + 13 7 5 4 3. + <_> + + <_> + 9 12 6 9 -1. + <_> + 9 15 6 3 3. + <_> + + <_> + 6 8 18 3 -1. + <_> + 12 8 6 3 3. + <_> + + <_> + 0 14 24 4 -1. + <_> + 8 14 8 4 3. + <_> + + <_> + 16 10 3 12 -1. + <_> + 16 16 3 6 2. + <_> + + <_> + 0 3 24 3 -1. + <_> + 0 4 24 1 3. + <_> + + <_> + 14 17 10 6 -1. + <_> + 14 19 10 2 3. + <_> + + <_> + 1 13 18 3 -1. + <_> + 7 13 6 3 3. + <_> + + <_> + 5 0 18 9 -1. + <_> + 5 3 18 3 3. + <_> + + <_> + 4 3 16 9 -1. + <_> + 4 6 16 3 3. + <_> + + <_> + 16 5 3 12 -1. + <_> + 16 11 3 6 2. + <_> + + <_> + 0 7 18 4 -1. + <_> + 6 7 6 4 3. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 9 8 6 10 -1. + <_> + 11 8 2 10 3. + <_> + + <_> + 9 15 6 9 -1. + <_> + 11 15 2 9 3. + <_> + + <_> + 3 1 18 21 -1. + <_> + 12 1 9 21 2. + <_> + + <_> + 6 8 12 7 -1. + <_> + 6 8 6 7 2. + <_> + + <_> + 8 5 6 9 -1. + <_> + 10 5 2 9 3. + <_> + + <_> + 0 2 24 4 -1. + <_> + 8 2 8 4 3. + <_> + + <_> + 14 7 5 12 -1. + <_> + 14 11 5 4 3. + <_> + + <_> + 5 7 5 12 -1. + <_> + 5 11 5 4 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 0 1 6 17 -1. + <_> + 3 1 3 17 2. + <_> + + <_> + 3 1 19 9 -1. + <_> + 3 4 19 3 3. + <_> + + <_> + 3 18 12 6 -1. + <_> + 3 18 6 3 2. + <_> + 9 21 6 3 2. + <_> + + <_> + 20 4 4 19 -1. + <_> + 20 4 2 19 2. + <_> + + <_> + 0 16 10 7 -1. + <_> + 5 16 5 7 2. + <_> + + <_> + 8 7 10 12 -1. + <_> + 13 7 5 6 2. + <_> + 8 13 5 6 2. + <_> + + <_> + 6 7 10 12 -1. + <_> + 6 7 5 6 2. + <_> + 11 13 5 6 2. + <_> + + <_> + 9 2 9 6 -1. + <_> + 12 2 3 6 3. + <_> + + <_> + 1 20 21 4 -1. + <_> + 8 20 7 4 3. + <_> + + <_> + 9 12 9 6 -1. + <_> + 9 14 9 2 3. + <_> + + <_> + 7 2 9 6 -1. + <_> + 10 2 3 6 3. + <_> + + <_> + 13 0 4 14 -1. + <_> + 13 0 2 14 2. + <_> + + <_> + 7 0 4 14 -1. + <_> + 9 0 2 14 2. + <_> + + <_> + 14 15 9 6 -1. + <_> + 14 17 9 2 3. + <_> + + <_> + 2 8 18 5 -1. + <_> + 8 8 6 5 3. + <_> + + <_> + 18 3 6 11 -1. + <_> + 20 3 2 11 3. + <_> + + <_> + 6 5 11 14 -1. + <_> + 6 12 11 7 2. + <_> + + <_> + 18 4 6 9 -1. + <_> + 18 7 6 3 3. + <_> + + <_> + 7 6 9 6 -1. + <_> + 7 8 9 2 3. + <_> + + <_> + 18 4 6 9 -1. + <_> + 18 7 6 3 3. + <_> + + <_> + 0 4 6 9 -1. + <_> + 0 7 6 3 3. + <_> + + <_> + 9 4 9 4 -1. + <_> + 9 6 9 2 2. + <_> + + <_> + 0 22 19 2 -1. + <_> + 0 23 19 1 2. + <_> + + <_> + 17 14 6 9 -1. + <_> + 17 17 6 3 3. + <_> + + <_> + 1 14 6 9 -1. + <_> + 1 17 6 3 3. + <_> + + <_> + 14 11 4 9 -1. + <_> + 14 11 2 9 2. + <_> + + <_> + 6 11 4 9 -1. + <_> + 8 11 2 9 2. + <_> + + <_> + 3 9 18 7 -1. + <_> + 9 9 6 7 3. + <_> + + <_> + 9 12 6 10 -1. + <_> + 9 17 6 5 2. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 6 17 18 3 -1. + <_> + 6 18 18 1 3. + <_> + + <_> + 1 17 18 3 -1. + <_> + 1 18 18 1 3. + <_> + + <_> + 10 6 11 12 -1. + <_> + 10 12 11 6 2. + <_> + + <_> + 5 6 14 6 -1. + <_> + 5 6 7 3 2. + <_> + 12 9 7 3 2. + <_> + + <_> + 5 4 15 4 -1. + <_> + 5 6 15 2 2. + <_> + + <_> + 0 0 22 2 -1. + <_> + 0 1 22 1 2. + <_> + + <_> + 0 0 24 24 -1. + <_> + 8 0 8 24 3. + <_> + + <_> + 1 15 18 4 -1. + <_> + 10 15 9 4 2. + <_> + + <_> + 6 8 12 9 -1. + <_> + 6 11 12 3 3. + <_> + + <_> + 4 12 7 12 -1. + <_> + 4 16 7 4 3. + <_> + + <_> + 1 2 22 6 -1. + <_> + 12 2 11 3 2. + <_> + 1 5 11 3 2. + <_> + + <_> + 5 20 14 3 -1. + <_> + 12 20 7 3 2. + <_> + + <_> + 0 0 24 16 -1. + <_> + 12 0 12 8 2. + <_> + 0 8 12 8 2. + <_> + + <_> + 3 13 18 4 -1. + <_> + 3 13 9 2 2. + <_> + 12 15 9 2 2. + <_> + + <_> + 2 10 22 2 -1. + <_> + 2 11 22 1 2. + <_> + + <_> + 6 3 11 8 -1. + <_> + 6 7 11 4 2. + <_> + + <_> + 14 5 6 6 -1. + <_> + 14 8 6 3 2. + <_> + + <_> + 0 7 24 6 -1. + <_> + 0 9 24 2 3. + <_> + + <_> + 14 0 10 10 -1. + <_> + 19 0 5 5 2. + <_> + 14 5 5 5 2. + <_> + + <_> + 0 0 10 10 -1. + <_> + 0 0 5 5 2. + <_> + 5 5 5 5 2. + <_> + + <_> + 0 1 24 4 -1. + <_> + 12 1 12 2 2. + <_> + 0 3 12 2 2. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 5 15 16 6 -1. + <_> + 13 15 8 3 2. + <_> + 5 18 8 3 2. + <_> + + <_> + 3 15 16 6 -1. + <_> + 3 15 8 3 2. + <_> + 11 18 8 3 2. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 0 13 21 10 -1. + <_> + 0 18 21 5 2. + <_> + + <_> + 13 0 6 24 -1. + <_> + 15 0 2 24 3. + <_> + + <_> + 7 4 6 11 -1. + <_> + 9 4 2 11 3. + <_> + + <_> + 9 5 9 6 -1. + <_> + 12 5 3 6 3. + <_> + + <_> + 1 4 2 20 -1. + <_> + 1 14 2 10 2. + <_> + + <_> + 13 0 6 24 -1. + <_> + 15 0 2 24 3. + <_> + + <_> + 5 0 6 24 -1. + <_> + 7 0 2 24 3. + <_> + + <_> + 16 7 6 14 -1. + <_> + 19 7 3 7 2. + <_> + 16 14 3 7 2. + <_> + + <_> + 4 7 4 12 -1. + <_> + 6 7 2 12 2. + <_> + + <_> + 0 5 24 14 -1. + <_> + 8 5 8 14 3. + <_> + + <_> + 5 13 10 6 -1. + <_> + 5 15 10 2 3. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 2 7 6 14 -1. + <_> + 2 7 3 7 2. + <_> + 5 14 3 7 2. + <_> + + <_> + 15 2 9 15 -1. + <_> + 18 2 3 15 3. + <_> + + <_> + 0 2 6 9 -1. + <_> + 2 2 2 9 3. + <_> + + <_> + 12 2 10 14 -1. + <_> + 17 2 5 7 2. + <_> + 12 9 5 7 2. + <_> + + <_> + 11 6 2 18 -1. + <_> + 12 6 1 18 2. + <_> + + <_> + 9 5 15 6 -1. + <_> + 14 5 5 6 3. + <_> + + <_> + 8 6 6 10 -1. + <_> + 10 6 2 10 3. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 3 3 9 7 -1. + <_> + 6 3 3 7 3. + <_> + + <_> + 6 7 14 3 -1. + <_> + 6 7 7 3 2. + <_> + + <_> + 7 7 8 6 -1. + <_> + 11 7 4 6 2. + <_> + + <_> + 12 7 7 12 -1. + <_> + 12 13 7 6 2. + <_> + + <_> + 10 6 4 18 -1. + <_> + 10 6 2 9 2. + <_> + 12 15 2 9 2. + <_> + + <_> + 16 14 6 9 -1. + <_> + 16 17 6 3 3. + <_> + + <_> + 4 0 6 13 -1. + <_> + 6 0 2 13 3. + <_> + + <_> + 2 2 21 3 -1. + <_> + 9 2 7 3 3. + <_> + + <_> + 5 4 5 12 -1. + <_> + 5 8 5 4 3. + <_> + + <_> + 10 3 4 10 -1. + <_> + 10 8 4 5 2. + <_> + + <_> + 8 4 5 8 -1. + <_> + 8 8 5 4 2. + <_> + + <_> + 6 0 11 9 -1. + <_> + 6 3 11 3 3. + <_> + + <_> + 6 6 12 5 -1. + <_> + 10 6 4 5 3. + <_> + + <_> + 0 0 24 5 -1. + <_> + 8 0 8 5 3. + <_> + + <_> + 1 10 23 6 -1. + <_> + 1 12 23 2 3. + <_> + + <_> + 3 21 18 3 -1. + <_> + 9 21 6 3 3. + <_> + + <_> + 3 6 21 6 -1. + <_> + 3 8 21 2 3. + <_> + + <_> + 0 5 6 12 -1. + <_> + 2 5 2 12 3. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 8 7 8 10 -1. + <_> + 8 12 8 5 2. + <_> + + <_> + 5 7 15 12 -1. + <_> + 10 7 5 12 3. + <_> + + <_> + 0 17 10 6 -1. + <_> + 0 19 10 2 3. + <_> + + <_> + 14 18 9 6 -1. + <_> + 14 20 9 2 3. + <_> + + <_> + 9 6 6 16 -1. + <_> + 9 14 6 8 2. + <_> + + <_> + 14 18 9 6 -1. + <_> + 14 20 9 2 3. + <_> + + <_> + 1 18 9 6 -1. + <_> + 1 20 9 2 3. + <_> + + <_> + 15 9 9 6 -1. + <_> + 15 11 9 2 3. + <_> + + <_> + 0 9 9 6 -1. + <_> + 0 11 9 2 3. + <_> + + <_> + 17 3 6 9 -1. + <_> + 19 3 2 9 3. + <_> + + <_> + 2 17 18 3 -1. + <_> + 2 18 18 1 3. + <_> + + <_> + 3 15 21 6 -1. + <_> + 3 17 21 2 3. + <_> + + <_> + 9 17 6 6 -1. + <_> + 9 20 6 3 2. + <_> + + <_> + 18 3 6 9 -1. + <_> + 18 6 6 3 3. + <_> + + <_> + 0 3 6 9 -1. + <_> + 0 6 6 3 3. + <_> + + <_> + 4 0 16 10 -1. + <_> + 12 0 8 5 2. + <_> + 4 5 8 5 2. + <_> + + <_> + 2 0 10 16 -1. + <_> + 2 0 5 8 2. + <_> + 7 8 5 8 2. + <_> + + <_> + 14 0 10 5 -1. + <_> + 14 0 5 5 2. + <_> + + <_> + 0 0 10 5 -1. + <_> + 5 0 5 5 2. + <_> + + <_> + 18 3 6 10 -1. + <_> + 18 3 3 10 2. + <_> + + <_> + 5 11 12 6 -1. + <_> + 5 11 6 3 2. + <_> + 11 14 6 3 2. + <_> + + <_> + 21 0 3 18 -1. + <_> + 22 0 1 18 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 8 8 9 7 -1. + <_> + 11 8 3 7 3. + <_> + + <_> + 7 12 8 10 -1. + <_> + 7 12 4 5 2. + <_> + 11 17 4 5 2. + <_> + + <_> + 21 0 3 18 -1. + <_> + 22 0 1 18 3. + <_> + + <_> + 10 6 4 9 -1. + <_> + 12 6 2 9 2. + <_> + + <_> + 15 0 9 6 -1. + <_> + 15 2 9 2 3. + <_> + + <_> + 0 2 24 3 -1. + <_> + 0 3 24 1 3. + <_> + + <_> + 11 7 6 9 -1. + <_> + 13 7 2 9 3. + <_> + + <_> + 7 6 6 10 -1. + <_> + 9 6 2 10 3. + <_> + + <_> + 12 1 6 12 -1. + <_> + 14 1 2 12 3. + <_> + + <_> + 6 4 12 12 -1. + <_> + 6 10 12 6 2. + <_> + + <_> + 14 3 2 21 -1. + <_> + 14 3 1 21 2. + <_> + + <_> + 6 1 12 8 -1. + <_> + 6 5 12 4 2. + <_> + + <_> + 3 0 18 8 -1. + <_> + 3 4 18 4 2. + <_> + + <_> + 3 0 18 3 -1. + <_> + 3 1 18 1 3. + <_> + + <_> + 0 13 24 4 -1. + <_> + 12 13 12 2 2. + <_> + 0 15 12 2 2. + <_> + + <_> + 10 5 4 9 -1. + <_> + 12 5 2 9 2. + <_> + + <_> + 11 1 6 9 -1. + <_> + 13 1 2 9 3. + <_> + + <_> + 6 2 6 22 -1. + <_> + 8 2 2 22 3. + <_> + + <_> + 16 10 8 14 -1. + <_> + 20 10 4 7 2. + <_> + 16 17 4 7 2. + <_> + + <_> + 3 4 16 15 -1. + <_> + 3 9 16 5 3. + <_> + + <_> + 16 10 8 14 -1. + <_> + 20 10 4 7 2. + <_> + 16 17 4 7 2. + <_> + + <_> + 0 10 8 14 -1. + <_> + 0 10 4 7 2. + <_> + 4 17 4 7 2. + <_> + + <_> + 10 14 11 6 -1. + <_> + 10 17 11 3 2. + <_> + + <_> + 0 7 24 9 -1. + <_> + 8 7 8 9 3. + <_> + + <_> + 13 1 4 16 -1. + <_> + 13 1 2 16 2. + <_> + + <_> + 7 1 4 16 -1. + <_> + 9 1 2 16 2. + <_> + + <_> + 5 5 16 8 -1. + <_> + 13 5 8 4 2. + <_> + 5 9 8 4 2. + <_> + + <_> + 0 9 6 9 -1. + <_> + 0 12 6 3 3. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 3 12 6 9 -1. + <_> + 3 15 6 3 3. + <_> + + <_> + 8 14 9 6 -1. + <_> + 8 16 9 2 3. + <_> + + <_> + 2 13 8 10 -1. + <_> + 2 13 4 5 2. + <_> + 6 18 4 5 2. + <_> + + <_> + 15 5 3 18 -1. + <_> + 15 11 3 6 3. + <_> + + <_> + 3 5 18 3 -1. + <_> + 3 6 18 1 3. + <_> + + <_> + 17 5 6 11 -1. + <_> + 19 5 2 11 3. + <_> + + <_> + 1 5 6 11 -1. + <_> + 3 5 2 11 3. + <_> + + <_> + 19 1 4 9 -1. + <_> + 19 1 2 9 2. + <_> + + <_> + 1 1 4 9 -1. + <_> + 3 1 2 9 2. + <_> + + <_> + 4 15 18 9 -1. + <_> + 4 15 9 9 2. + <_> + + <_> + 6 9 12 4 -1. + <_> + 6 11 12 2 2. + <_> + + <_> + 15 2 9 6 -1. + <_> + 15 4 9 2 3. + <_> + + <_> + 0 2 9 6 -1. + <_> + 0 4 9 2 3. + <_> + + <_> + 15 0 6 17 -1. + <_> + 17 0 2 17 3. + <_> + + <_> + 3 0 6 17 -1. + <_> + 5 0 2 17 3. + <_> + + <_> + 8 17 9 4 -1. + <_> + 8 19 9 2 2. + <_> + + <_> + 6 5 3 18 -1. + <_> + 6 11 3 6 3. + <_> + + <_> + 5 2 14 12 -1. + <_> + 5 8 14 6 2. + <_> + + <_> + 10 2 3 12 -1. + <_> + 10 8 3 6 2. + <_> + + <_> + 10 7 14 15 -1. + <_> + 10 12 14 5 3. + <_> + + <_> + 0 7 14 15 -1. + <_> + 0 12 14 5 3. + <_> + + <_> + 15 0 9 6 -1. + <_> + 15 2 9 2 3. + <_> + + <_> + 0 0 9 6 -1. + <_> + 0 2 9 2 3. + <_> + + <_> + 12 6 6 14 -1. + <_> + 14 6 2 14 3. + <_> + + <_> + 9 7 6 9 -1. + <_> + 11 7 2 9 3. + <_> + + <_> + 12 6 6 15 -1. + <_> + 14 6 2 15 3. + <_> + + <_> + 6 6 6 15 -1. + <_> + 8 6 2 15 3. + <_> + + <_> + 15 3 8 9 -1. + <_> + 15 3 4 9 2. + <_> + + <_> + 0 0 9 21 -1. + <_> + 3 0 3 21 3. + <_> + + <_> + 11 9 8 12 -1. + <_> + 11 13 8 4 3. + <_> + + <_> + 6 7 10 12 -1. + <_> + 6 7 5 6 2. + <_> + 11 13 5 6 2. + <_> + + <_> + 10 6 4 18 -1. + <_> + 12 6 2 9 2. + <_> + 10 15 2 9 2. + <_> + + <_> + 0 0 6 9 -1. + <_> + 0 3 6 3 3. + <_> + + <_> + 3 14 18 3 -1. + <_> + 3 15 18 1 3. + <_> + + <_> + 3 14 8 10 -1. + <_> + 3 14 4 5 2. + <_> + 7 19 4 5 2. + <_> + + <_> + 0 12 24 4 -1. + <_> + 12 12 12 2 2. + <_> + 0 14 12 2 2. + <_> + + <_> + 0 2 3 20 -1. + <_> + 1 2 1 20 3. + <_> + + <_> + 12 16 10 8 -1. + <_> + 17 16 5 4 2. + <_> + 12 20 5 4 2. + <_> + + <_> + 2 16 10 8 -1. + <_> + 2 16 5 4 2. + <_> + 7 20 5 4 2. + <_> + + <_> + 7 0 10 9 -1. + <_> + 7 3 10 3 3. + <_> + + <_> + 0 0 24 3 -1. + <_> + 8 0 8 3 3. + <_> + + <_> + 3 8 15 4 -1. + <_> + 3 10 15 2 2. + <_> + + <_> + 6 5 12 6 -1. + <_> + 10 5 4 6 3. + <_> + + <_> + 5 13 14 6 -1. + <_> + 5 16 14 3 2. + <_> + + <_> + 11 14 4 10 -1. + <_> + 11 19 4 5 2. + <_> + + <_> + 0 6 6 7 -1. + <_> + 3 6 3 7 2. + <_> + + <_> + 18 0 6 6 -1. + <_> + 18 0 3 6 2. + <_> + + <_> + 3 1 18 3 -1. + <_> + 3 2 18 1 3. + <_> + + <_> + 9 6 14 18 -1. + <_> + 9 12 14 6 3. + <_> + + <_> + 0 0 6 6 -1. + <_> + 3 0 3 6 2. + <_> + + <_> + 13 11 6 6 -1. + <_> + 13 11 3 6 2. + <_> + + <_> + 0 20 24 3 -1. + <_> + 8 20 8 3 3. + <_> + + <_> + 13 11 6 7 -1. + <_> + 13 11 3 7 2. + <_> + + <_> + 4 12 10 6 -1. + <_> + 4 14 10 2 3. + <_> + + <_> + 13 11 6 6 -1. + <_> + 13 11 3 6 2. + <_> + + <_> + 5 11 6 7 -1. + <_> + 8 11 3 7 2. + <_> + + <_> + 7 4 11 12 -1. + <_> + 7 8 11 4 3. + <_> + + <_> + 6 15 10 4 -1. + <_> + 6 17 10 2 2. + <_> + + <_> + 14 0 6 9 -1. + <_> + 16 0 2 9 3. + <_> + + <_> + 4 0 6 9 -1. + <_> + 6 0 2 9 3. + <_> + + <_> + 11 2 4 15 -1. + <_> + 11 7 4 5 3. + <_> + + <_> + 0 0 20 3 -1. + <_> + 0 1 20 1 3. + <_> + + <_> + 13 18 10 6 -1. + <_> + 13 20 10 2 3. + <_> + + <_> + 2 7 6 11 -1. + <_> + 5 7 3 11 2. + <_> + + <_> + 10 14 10 9 -1. + <_> + 10 17 10 3 3. + <_> + + <_> + 8 2 4 9 -1. + <_> + 10 2 2 9 2. + <_> + + <_> + 14 3 10 4 -1. + <_> + 14 3 5 4 2. + <_> + + <_> + 6 6 12 6 -1. + <_> + 6 6 6 3 2. + <_> + 12 9 6 3 2. + <_> + + <_> + 8 8 8 10 -1. + <_> + 12 8 4 5 2. + <_> + 8 13 4 5 2. + <_> + + <_> + 7 4 4 16 -1. + <_> + 7 12 4 8 2. + <_> + + <_> + 8 8 9 4 -1. + <_> + 8 10 9 2 2. + <_> + + <_> + 5 2 14 9 -1. + <_> + 5 5 14 3 3. + <_> + + <_> + 3 16 19 8 -1. + <_> + 3 20 19 4 2. + <_> + + <_> + 0 0 10 8 -1. + <_> + 5 0 5 8 2. + <_> + + <_> + 5 2 16 18 -1. + <_> + 5 2 8 18 2. + <_> + + <_> + 0 11 24 11 -1. + <_> + 8 11 8 11 3. + <_> + + <_> + 3 3 18 5 -1. + <_> + 3 3 9 5 2. + <_> + + <_> + 1 16 18 3 -1. + <_> + 1 17 18 1 3. + <_> + + <_> + 5 17 18 3 -1. + <_> + 5 18 18 1 3. + <_> + + <_> + 1 13 9 6 -1. + <_> + 1 15 9 2 3. + <_> + + <_> + 1 9 23 10 -1. + <_> + 1 14 23 5 2. + <_> + + <_> + 3 7 18 3 -1. + <_> + 3 8 18 1 3. + <_> + + <_> + 6 8 12 3 -1. + <_> + 6 8 6 3 2. + <_> + + <_> + 6 2 3 22 -1. + <_> + 7 2 1 22 3. + <_> + + <_> + 14 17 10 6 -1. + <_> + 14 19 10 2 3. + <_> + + <_> + 1 18 10 6 -1. + <_> + 1 20 10 2 3. + <_> + + <_> + 11 3 6 12 -1. + <_> + 13 3 2 12 3. + <_> + + <_> + 10 6 4 9 -1. + <_> + 12 6 2 9 2. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 12 10 9 6 -1. + <_> + 15 10 3 6 3. + <_> + + <_> + 2 11 6 9 -1. + <_> + 5 11 3 9 2. + <_> + + <_> + 14 5 3 19 -1. + <_> + 15 5 1 19 3. + <_> + + <_> + 6 6 9 6 -1. + <_> + 6 8 9 2 3. + <_> + + <_> + 14 5 3 19 -1. + <_> + 15 5 1 19 3. + <_> + + <_> + 0 3 6 9 -1. + <_> + 0 6 6 3 3. + <_> + + <_> + 5 21 18 3 -1. + <_> + 5 22 18 1 3. + <_> + + <_> + 1 10 18 4 -1. + <_> + 7 10 6 4 3. + <_> + + <_> + 13 4 8 10 -1. + <_> + 17 4 4 5 2. + <_> + 13 9 4 5 2. + <_> + + <_> + 7 8 9 6 -1. + <_> + 10 8 3 6 3. + <_> + + <_> + 12 9 9 8 -1. + <_> + 15 9 3 8 3. + <_> + + <_> + 0 6 5 12 -1. + <_> + 0 10 5 4 3. + <_> + + <_> + 7 6 14 6 -1. + <_> + 14 6 7 3 2. + <_> + 7 9 7 3 2. + <_> + + <_> + 7 5 3 19 -1. + <_> + 8 5 1 19 3. + <_> + + <_> + 8 4 15 20 -1. + <_> + 13 4 5 20 3. + <_> + + <_> + 1 4 15 20 -1. + <_> + 6 4 5 20 3. + <_> + + <_> + 13 10 6 6 -1. + <_> + 13 10 3 6 2. + <_> + + <_> + 5 10 6 6 -1. + <_> + 8 10 3 6 2. + <_> + + <_> + 14 2 6 14 -1. + <_> + 17 2 3 7 2. + <_> + 14 9 3 7 2. + <_> + + <_> + 4 2 6 14 -1. + <_> + 4 2 3 7 2. + <_> + 7 9 3 7 2. + <_> + + <_> + 12 4 6 7 -1. + <_> + 12 4 3 7 2. + <_> + + <_> + 9 4 6 9 -1. + <_> + 11 4 2 9 3. + <_> + + <_> + 11 4 8 10 -1. + <_> + 11 4 4 10 2. + <_> + + <_> + 5 4 8 10 -1. + <_> + 9 4 4 10 2. + <_> + + <_> + 8 18 10 6 -1. + <_> + 8 20 10 2 3. + <_> + + <_> + 1 18 21 6 -1. + <_> + 1 20 21 2 3. + <_> + + <_> + 9 2 12 6 -1. + <_> + 9 2 6 6 2. + <_> + + <_> + 3 2 12 6 -1. + <_> + 9 2 6 6 2. + <_> + + <_> + 12 5 12 6 -1. + <_> + 18 5 6 3 2. + <_> + 12 8 6 3 2. + <_> + + <_> + 8 8 6 9 -1. + <_> + 8 11 6 3 3. + <_> + + <_> + 2 7 20 6 -1. + <_> + 2 9 20 2 3. + <_> + + <_> + 0 5 12 6 -1. + <_> + 0 5 6 3 2. + <_> + 6 8 6 3 2. + <_> + + <_> + 14 14 8 10 -1. + <_> + 18 14 4 5 2. + <_> + 14 19 4 5 2. + <_> + + <_> + 2 14 8 10 -1. + <_> + 2 14 4 5 2. + <_> + 6 19 4 5 2. + <_> + + <_> + 2 11 20 13 -1. + <_> + 2 11 10 13 2. + <_> + + <_> + 6 9 12 5 -1. + <_> + 12 9 6 5 2. + <_> + + <_> + 5 6 16 6 -1. + <_> + 13 6 8 3 2. + <_> + 5 9 8 3 2. + <_> + + <_> + 1 19 9 4 -1. + <_> + 1 21 9 2 2. + <_> + + <_> + 7 5 12 5 -1. + <_> + 11 5 4 5 3. + <_> + + <_> + 3 5 14 12 -1. + <_> + 3 5 7 6 2. + <_> + 10 11 7 6 2. + <_> + + <_> + 9 4 9 6 -1. + <_> + 12 4 3 6 3. + <_> + + <_> + 2 6 19 3 -1. + <_> + 2 7 19 1 3. + <_> + + <_> + 18 10 6 9 -1. + <_> + 18 13 6 3 3. + <_> + + <_> + 3 7 18 2 -1. + <_> + 3 8 18 1 2. + <_> + + <_> + 20 2 4 18 -1. + <_> + 22 2 2 9 2. + <_> + 20 11 2 9 2. + <_> + + <_> + 2 18 20 3 -1. + <_> + 2 19 20 1 3. + <_> + + <_> + 1 9 22 3 -1. + <_> + 1 10 22 1 3. + <_> + + <_> + 0 2 4 18 -1. + <_> + 0 2 2 9 2. + <_> + 2 11 2 9 2. + <_> + + <_> + 19 0 4 23 -1. + <_> + 19 0 2 23 2. + <_> + + <_> + 0 3 6 19 -1. + <_> + 3 3 3 19 2. + <_> + + <_> + 18 2 6 9 -1. + <_> + 20 2 2 9 3. + <_> + + <_> + 0 5 10 6 -1. + <_> + 0 7 10 2 3. + <_> + + <_> + 7 0 12 12 -1. + <_> + 13 0 6 6 2. + <_> + 7 6 6 6 2. + <_> + + <_> + 0 3 24 6 -1. + <_> + 0 3 12 3 2. + <_> + 12 6 12 3 2. + <_> + + <_> + 10 14 4 10 -1. + <_> + 10 19 4 5 2. + <_> + + <_> + 8 9 4 15 -1. + <_> + 8 14 4 5 3. + <_> + + <_> + 4 11 17 6 -1. + <_> + 4 14 17 3 2. + <_> + + <_> + 2 5 18 8 -1. + <_> + 2 5 9 4 2. + <_> + 11 9 9 4 2. + <_> + + <_> + 7 6 14 6 -1. + <_> + 14 6 7 3 2. + <_> + 7 9 7 3 2. + <_> + + <_> + 3 6 14 6 -1. + <_> + 3 6 7 3 2. + <_> + 10 9 7 3 2. + <_> + + <_> + 16 5 3 18 -1. + <_> + 17 5 1 18 3. + <_> + + <_> + 5 5 3 18 -1. + <_> + 6 5 1 18 3. + <_> + + <_> + 10 10 14 4 -1. + <_> + 10 12 14 2 2. + <_> + + <_> + 4 10 9 4 -1. + <_> + 4 12 9 2 2. + <_> + + <_> + 2 0 18 9 -1. + <_> + 2 3 18 3 3. + <_> + + <_> + 6 3 12 8 -1. + <_> + 10 3 4 8 3. + <_> + + <_> + 1 1 8 5 -1. + <_> + 5 1 4 5 2. + <_> + + <_> + 12 7 7 8 -1. + <_> + 12 11 7 4 2. + <_> + + <_> + 0 12 22 4 -1. + <_> + 0 14 22 2 2. + <_> + + <_> + 15 6 4 15 -1. + <_> + 15 11 4 5 3. + <_> + + <_> + 5 7 7 8 -1. + <_> + 5 11 7 4 2. + <_> + + <_> + 8 18 9 4 -1. + <_> + 8 20 9 2 2. + <_> + + <_> + 1 2 22 4 -1. + <_> + 1 4 22 2 2. + <_> + + <_> + 17 3 6 17 -1. + <_> + 19 3 2 17 3. + <_> + + <_> + 8 2 8 18 -1. + <_> + 8 11 8 9 2. + <_> + + <_> + 17 0 6 12 -1. + <_> + 20 0 3 6 2. + <_> + 17 6 3 6 2. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 15 5 9 12 -1. + <_> + 15 11 9 6 2. + <_> + + <_> + 2 22 18 2 -1. + <_> + 2 23 18 1 2. + <_> + + <_> + 10 10 12 6 -1. + <_> + 16 10 6 3 2. + <_> + 10 13 6 3 2. + <_> + + <_> + 0 1 4 11 -1. + <_> + 2 1 2 11 2. + <_> + + <_> + 20 0 4 10 -1. + <_> + 20 0 2 10 2. + <_> + + <_> + 1 3 6 17 -1. + <_> + 3 3 2 17 3. + <_> + + <_> + 15 15 9 6 -1. + <_> + 15 17 9 2 3. + <_> + + <_> + 0 13 8 9 -1. + <_> + 0 16 8 3 3. + <_> + + <_> + 16 8 6 12 -1. + <_> + 16 12 6 4 3. + <_> + + <_> + 2 8 6 12 -1. + <_> + 2 12 6 4 3. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 1 5 19 3 -1. + <_> + 1 6 19 1 3. + <_> + + <_> + 11 8 9 7 -1. + <_> + 14 8 3 7 3. + <_> + + <_> + 3 8 12 9 -1. + <_> + 3 11 12 3 3. + <_> + + <_> + 3 6 18 3 -1. + <_> + 3 7 18 1 3. + <_> + + <_> + 10 0 4 12 -1. + <_> + 10 6 4 6 2. + <_> + + <_> + 3 9 18 14 -1. + <_> + 3 9 9 14 2. + <_> + + <_> + 0 0 4 9 -1. + <_> + 2 0 2 9 2. + <_> + + <_> + 12 5 4 18 -1. + <_> + 12 5 2 18 2. + <_> + + <_> + 8 5 4 18 -1. + <_> + 10 5 2 18 2. + <_> + + <_> + 10 5 6 10 -1. + <_> + 12 5 2 10 3. + <_> + + <_> + 9 4 4 11 -1. + <_> + 11 4 2 11 2. + <_> + + <_> + 4 16 18 3 -1. + <_> + 4 17 18 1 3. + <_> + + <_> + 0 16 20 3 -1. + <_> + 0 17 20 1 3. + <_> + + <_> + 9 9 6 12 -1. + <_> + 9 13 6 4 3. + <_> + + <_> + 8 13 8 8 -1. + <_> + 8 17 8 4 2. + <_> + + <_> + 13 10 3 12 -1. + <_> + 13 16 3 6 2. + <_> + + <_> + 5 9 14 14 -1. + <_> + 5 9 7 7 2. + <_> + 12 16 7 7 2. + <_> + + <_> + 0 0 24 10 -1. + <_> + 12 0 12 5 2. + <_> + 0 5 12 5 2. + <_> + + <_> + 1 11 18 2 -1. + <_> + 1 12 18 1 2. + <_> + + <_> + 19 5 5 12 -1. + <_> + 19 9 5 4 3. + <_> + + <_> + 0 5 5 12 -1. + <_> + 0 9 5 4 3. + <_> + + <_> + 16 6 8 18 -1. + <_> + 20 6 4 9 2. + <_> + 16 15 4 9 2. + <_> + + <_> + 0 6 8 18 -1. + <_> + 0 6 4 9 2. + <_> + 4 15 4 9 2. + <_> + + <_> + 12 5 12 12 -1. + <_> + 18 5 6 6 2. + <_> + 12 11 6 6 2. + <_> + + <_> + 7 6 6 9 -1. + <_> + 9 6 2 9 3. + <_> + + <_> + 9 13 6 11 -1. + <_> + 11 13 2 11 3. + <_> + + <_> + 0 5 12 12 -1. + <_> + 0 5 6 6 2. + <_> + 6 11 6 6 2. + <_> + + <_> + 1 2 23 3 -1. + <_> + 1 3 23 1 3. + <_> + + <_> + 1 15 19 3 -1. + <_> + 1 16 19 1 3. + <_> + + <_> + 13 17 11 4 -1. + <_> + 13 19 11 2 2. + <_> + + <_> + 0 13 8 5 -1. + <_> + 4 13 4 5 2. + <_> + + <_> + 12 10 10 4 -1. + <_> + 12 10 5 4 2. + <_> + + <_> + 4 6 9 9 -1. + <_> + 4 9 9 3 3. + <_> + + <_> + 15 14 9 6 -1. + <_> + 15 16 9 2 3. + <_> + + <_> + 1 12 9 6 -1. + <_> + 1 14 9 2 3. + <_> + + <_> + 3 10 20 8 -1. + <_> + 13 10 10 4 2. + <_> + 3 14 10 4 2. + <_> + + <_> + 2 0 9 18 -1. + <_> + 5 0 3 18 3. + <_> + + <_> + 13 11 9 10 -1. + <_> + 16 11 3 10 3. + <_> + + <_> + 1 2 8 5 -1. + <_> + 5 2 4 5 2. + <_> + + <_> + 3 4 21 6 -1. + <_> + 10 4 7 6 3. + <_> + + <_> + 7 0 10 14 -1. + <_> + 7 0 5 7 2. + <_> + 12 7 5 7 2. + <_> + + <_> + 12 17 12 4 -1. + <_> + 12 19 12 2 2. + <_> + + <_> + 0 6 23 4 -1. + <_> + 0 8 23 2 2. + <_> + + <_> + 13 10 8 10 -1. + <_> + 17 10 4 5 2. + <_> + 13 15 4 5 2. + <_> + + <_> + 0 16 18 3 -1. + <_> + 0 17 18 1 3. + <_> + + <_> + 15 16 9 4 -1. + <_> + 15 18 9 2 2. + <_> + + <_> + 0 16 9 4 -1. + <_> + 0 18 9 2 2. + <_> + + <_> + 13 11 6 6 -1. + <_> + 13 11 3 6 2. + <_> + + <_> + 5 11 6 6 -1. + <_> + 8 11 3 6 2. + <_> + + <_> + 0 3 24 6 -1. + <_> + 12 3 12 3 2. + <_> + 0 6 12 3 2. + <_> + + <_> + 2 4 18 3 -1. + <_> + 2 5 18 1 3. + <_> + + <_> + 0 0 24 4 -1. + <_> + 12 0 12 2 2. + <_> + 0 2 12 2 2. + <_> + + <_> + 1 16 18 3 -1. + <_> + 1 17 18 1 3. + <_> + + <_> + 15 15 9 6 -1. + <_> + 15 17 9 2 3. + <_> + + <_> + 0 15 9 6 -1. + <_> + 0 17 9 2 3. + <_> + + <_> + 6 17 18 3 -1. + <_> + 6 18 18 1 3. + <_> + + <_> + 8 8 6 10 -1. + <_> + 10 8 2 10 3. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 8 8 5 8 -1. + <_> + 8 12 5 4 2. + <_> + + <_> + 12 8 6 8 -1. + <_> + 12 12 6 4 2. + <_> + + <_> + 6 5 6 11 -1. + <_> + 8 5 2 11 3. + <_> + + <_> + 13 6 8 9 -1. + <_> + 13 9 8 3 3. + <_> + + <_> + 1 7 21 6 -1. + <_> + 1 9 21 2 3. + <_> + + <_> + 15 5 3 12 -1. + <_> + 15 11 3 6 2. + <_> + + <_> + 6 9 11 12 -1. + <_> + 6 13 11 4 3. + <_> + + <_> + 13 8 10 8 -1. + <_> + 18 8 5 4 2. + <_> + 13 12 5 4 2. + <_> + + <_> + 5 8 12 3 -1. + <_> + 11 8 6 3 2. + <_> + + <_> + 6 11 18 4 -1. + <_> + 12 11 6 4 3. + <_> + + <_> + 0 0 22 22 -1. + <_> + 0 11 22 11 2. + <_> + + <_> + 11 2 6 8 -1. + <_> + 11 6 6 4 2. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 10 0 6 9 -1. + <_> + 12 0 2 9 3. + <_> + + <_> + 8 3 6 14 -1. + <_> + 8 3 3 7 2. + <_> + 11 10 3 7 2. + <_> + + <_> + 3 10 18 8 -1. + <_> + 9 10 6 8 3. + <_> + + <_> + 10 0 3 14 -1. + <_> + 10 7 3 7 2. + <_> + + <_> + 4 3 16 20 -1. + <_> + 4 13 16 10 2. + <_> + + <_> + 9 4 6 10 -1. + <_> + 11 4 2 10 3. + <_> + + <_> + 5 0 16 4 -1. + <_> + 5 2 16 2 2. + <_> + + <_> + 2 5 18 4 -1. + <_> + 8 5 6 4 3. + <_> + + <_> + 13 0 6 9 -1. + <_> + 15 0 2 9 3. + <_> + + <_> + 8 4 8 5 -1. + <_> + 12 4 4 5 2. + <_> + + <_> + 12 10 10 4 -1. + <_> + 12 10 5 4 2. + <_> + + <_> + 2 10 10 4 -1. + <_> + 7 10 5 4 2. + <_> + + <_> + 7 11 12 5 -1. + <_> + 11 11 4 5 3. + <_> + + <_> + 3 10 8 10 -1. + <_> + 3 10 4 5 2. + <_> + 7 15 4 5 2. + <_> + + <_> + 11 12 9 8 -1. + <_> + 14 12 3 8 3. + <_> + + <_> + 0 21 24 3 -1. + <_> + 8 21 8 3 3. + <_> + + <_> + 3 20 18 4 -1. + <_> + 9 20 6 4 3. + <_> + + <_> + 1 15 9 6 -1. + <_> + 1 17 9 2 3. + <_> + + <_> + 11 17 10 4 -1. + <_> + 11 19 10 2 2. + <_> + + <_> + 9 12 4 12 -1. + <_> + 9 18 4 6 2. + <_> + + <_> + 9 6 9 6 -1. + <_> + 12 6 3 6 3. + <_> + + <_> + 1 13 6 9 -1. + <_> + 1 16 6 3 3. + <_> + + <_> + 6 16 12 4 -1. + <_> + 6 18 12 2 2. + <_> + + <_> + 1 5 20 3 -1. + <_> + 1 6 20 1 3. + <_> + + <_> + 8 1 9 9 -1. + <_> + 8 4 9 3 3. + <_> + + <_> + 2 19 9 4 -1. + <_> + 2 21 9 2 2. + <_> + + <_> + 11 1 4 18 -1. + <_> + 11 7 4 6 3. + <_> + + <_> + 7 2 8 12 -1. + <_> + 7 2 4 6 2. + <_> + 11 8 4 6 2. + <_> + + <_> + 11 10 9 8 -1. + <_> + 14 10 3 8 3. + <_> + + <_> + 5 11 12 5 -1. + <_> + 9 11 4 5 3. + <_> + + <_> + 11 9 9 6 -1. + <_> + 14 9 3 6 3. + <_> + + <_> + 5 10 6 9 -1. + <_> + 7 10 2 9 3. + <_> + + <_> + 4 7 5 12 -1. + <_> + 4 11 5 4 3. + <_> + + <_> + 2 0 21 6 -1. + <_> + 9 0 7 6 3. + <_> + + <_> + 7 6 10 6 -1. + <_> + 7 8 10 2 3. + <_> + + <_> + 9 0 6 15 -1. + <_> + 11 0 2 15 3. + <_> + + <_> + 2 2 18 2 -1. + <_> + 2 3 18 1 2. + <_> + + <_> + 8 17 8 6 -1. + <_> + 8 20 8 3 2. + <_> + + <_> + 3 0 18 2 -1. + <_> + 3 1 18 1 2. + <_> + + <_> + 8 0 9 6 -1. + <_> + 11 0 3 6 3. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 6 7 12 5 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 0 3 6 9 -1. + <_> + 2 3 2 9 3. + <_> + + <_> + 20 2 4 9 -1. + <_> + 20 2 2 9 2. + <_> + + <_> + 0 2 4 9 -1. + <_> + 2 2 2 9 2. + <_> + + <_> + 0 1 24 4 -1. + <_> + 12 1 12 2 2. + <_> + 0 3 12 2 2. + <_> + + <_> + 0 16 9 6 -1. + <_> + 0 18 9 2 3. + <_> + + <_> + 14 13 9 6 -1. + <_> + 14 15 9 2 3. + <_> + + <_> + 0 15 19 3 -1. + <_> + 0 16 19 1 3. + <_> + + <_> + 1 5 22 12 -1. + <_> + 12 5 11 6 2. + <_> + 1 11 11 6 2. + <_> + + <_> + 5 13 6 6 -1. + <_> + 8 13 3 6 2. + <_> + + <_> + 4 2 20 3 -1. + <_> + 4 3 20 1 3. + <_> + + <_> + 8 14 6 10 -1. + <_> + 10 14 2 10 3. + <_> + + <_> + 6 12 16 6 -1. + <_> + 14 12 8 3 2. + <_> + 6 15 8 3 2. + <_> + + <_> + 2 13 8 9 -1. + <_> + 2 16 8 3 3. + <_> + + <_> + 11 8 6 14 -1. + <_> + 14 8 3 7 2. + <_> + 11 15 3 7 2. + <_> + + <_> + 2 12 16 6 -1. + <_> + 2 12 8 3 2. + <_> + 10 15 8 3 2. + <_> + + <_> + 5 16 16 8 -1. + <_> + 5 20 16 4 2. + <_> + + <_> + 9 1 4 12 -1. + <_> + 9 7 4 6 2. + <_> + + <_> + 8 2 8 10 -1. + <_> + 12 2 4 5 2. + <_> + 8 7 4 5 2. + <_> + + <_> + 6 6 12 6 -1. + <_> + 6 6 6 3 2. + <_> + 12 9 6 3 2. + <_> + + <_> + 10 7 6 9 -1. + <_> + 12 7 2 9 3. + <_> + + <_> + 0 0 8 12 -1. + <_> + 0 0 4 6 2. + <_> + 4 6 4 6 2. + <_> + + <_> + 18 8 6 9 -1. + <_> + 18 11 6 3 3. + <_> + + <_> + 2 12 6 6 -1. + <_> + 5 12 3 6 2. + <_> + + <_> + 3 21 21 3 -1. + <_> + 10 21 7 3 3. + <_> + + <_> + 2 0 16 6 -1. + <_> + 2 3 16 3 2. + <_> + + <_> + 13 6 7 6 -1. + <_> + 13 9 7 3 2. + <_> + + <_> + 6 4 4 14 -1. + <_> + 6 11 4 7 2. + <_> + + <_> + 9 7 6 9 -1. + <_> + 11 7 2 9 3. + <_> + + <_> + 7 8 6 14 -1. + <_> + 7 8 3 7 2. + <_> + 10 15 3 7 2. + <_> + + <_> + 18 8 4 16 -1. + <_> + 18 16 4 8 2. + <_> + + <_> + 9 14 6 10 -1. + <_> + 11 14 2 10 3. + <_> + + <_> + 6 11 12 5 -1. + <_> + 10 11 4 5 3. + <_> + + <_> + 0 12 23 3 -1. + <_> + 0 13 23 1 3. + <_> + + <_> + 13 0 6 12 -1. + <_> + 15 0 2 12 3. + <_> + + <_> + 0 10 12 5 -1. + <_> + 4 10 4 5 3. + <_> + + <_> + 13 2 10 4 -1. + <_> + 13 4 10 2 2. + <_> + + <_> + 5 0 6 12 -1. + <_> + 7 0 2 12 3. + <_> + + <_> + 11 6 9 6 -1. + <_> + 14 6 3 6 3. + <_> + + <_> + 4 6 9 6 -1. + <_> + 7 6 3 6 3. + <_> + + <_> + 6 11 18 13 -1. + <_> + 12 11 6 13 3. + <_> + + <_> + 0 11 18 13 -1. + <_> + 6 11 6 13 3. + <_> + + <_> + 12 16 12 6 -1. + <_> + 16 16 4 6 3. + <_> + + <_> + 0 6 21 3 -1. + <_> + 0 7 21 1 3. + <_> + + <_> + 12 16 12 6 -1. + <_> + 16 16 4 6 3. + <_> + + <_> + 5 7 6 14 -1. + <_> + 5 14 6 7 2. + <_> + + <_> + 5 10 19 2 -1. + <_> + 5 11 19 1 2. + <_> + + <_> + 5 4 14 4 -1. + <_> + 5 6 14 2 2. + <_> + + <_> + 3 18 18 4 -1. + <_> + 9 18 6 4 3. + <_> + + <_> + 7 0 4 9 -1. + <_> + 9 0 2 9 2. + <_> + + <_> + 13 3 11 4 -1. + <_> + 13 5 11 2 2. + <_> + + <_> + 2 0 9 6 -1. + <_> + 5 0 3 6 3. + <_> + + <_> + 19 1 4 23 -1. + <_> + 19 1 2 23 2. + <_> + + <_> + 1 1 4 23 -1. + <_> + 3 1 2 23 2. + <_> + + <_> + 5 16 18 3 -1. + <_> + 5 17 18 1 3. + <_> + + <_> + 0 3 11 4 -1. + <_> + 0 5 11 2 2. + <_> + + <_> + 2 16 20 3 -1. + <_> + 2 17 20 1 3. + <_> + + <_> + 5 3 13 4 -1. + <_> + 5 5 13 2 2. + <_> + + <_> + 1 9 22 15 -1. + <_> + 1 9 11 15 2. + <_> + + <_> + 3 4 14 3 -1. + <_> + 10 4 7 3 2. + <_> + + <_> + 8 7 10 4 -1. + <_> + 8 7 5 4 2. + <_> + + <_> + 6 7 10 4 -1. + <_> + 11 7 5 4 2. + <_> + + <_> + 10 4 6 9 -1. + <_> + 12 4 2 9 3. + <_> + + <_> + 1 12 9 6 -1. + <_> + 4 12 3 6 3. + <_> + + <_> + 8 3 8 10 -1. + <_> + 12 3 4 5 2. + <_> + 8 8 4 5 2. + <_> + + <_> + 3 6 16 6 -1. + <_> + 3 6 8 3 2. + <_> + 11 9 8 3 2. + <_> + + <_> + 5 6 14 6 -1. + <_> + 5 9 14 3 2. + <_> + + <_> + 4 3 9 6 -1. + <_> + 4 5 9 2 3. + <_> + + <_> + 6 3 18 2 -1. + <_> + 6 4 18 1 2. + <_> + + <_> + 7 6 9 6 -1. + <_> + 10 6 3 6 3. + <_> + + <_> + 0 1 24 3 -1. + <_> + 0 2 24 1 3. + <_> + + <_> + 0 17 10 6 -1. + <_> + 0 19 10 2 3. + <_> + + <_> + 3 18 18 3 -1. + <_> + 3 19 18 1 3. + <_> + + <_> + 2 5 6 16 -1. + <_> + 2 5 3 8 2. + <_> + 5 13 3 8 2. + <_> + + <_> + 7 6 11 6 -1. + <_> + 7 8 11 2 3. + <_> + + <_> + 5 2 12 22 -1. + <_> + 5 13 12 11 2. + <_> + + <_> + 10 7 4 10 -1. + <_> + 10 12 4 5 2. + <_> + + <_> + 9 0 4 18 -1. + <_> + 9 6 4 6 3. + <_> + + <_> + 18 8 6 9 -1. + <_> + 18 11 6 3 3. + <_> + + <_> + 4 7 15 10 -1. + <_> + 9 7 5 10 3. + <_> + + <_> + 10 5 6 9 -1. + <_> + 12 5 2 9 3. + <_> + + <_> + 9 9 6 10 -1. + <_> + 11 9 2 10 3. + <_> + + <_> + 11 14 6 10 -1. + <_> + 13 14 2 10 3. + <_> + + <_> + 7 14 6 10 -1. + <_> + 9 14 2 10 3. + <_> + + <_> + 4 8 16 9 -1. + <_> + 4 11 16 3 3. + <_> + + <_> + 2 11 20 3 -1. + <_> + 2 12 20 1 3. + <_> + + <_> + 13 0 4 13 -1. + <_> + 13 0 2 13 2. + <_> + + <_> + 7 0 4 13 -1. + <_> + 9 0 2 13 2. + <_> + + <_> + 3 1 18 7 -1. + <_> + 9 1 6 7 3. + <_> + + <_> + 1 11 6 9 -1. + <_> + 1 14 6 3 3. + <_> + + <_> + 8 18 9 6 -1. + <_> + 8 20 9 2 3. + <_> + + <_> + 3 9 15 6 -1. + <_> + 3 11 15 2 3. + <_> + + <_> + 5 10 19 2 -1. + <_> + 5 11 19 1 2. + <_> + + <_> + 8 6 7 16 -1. + <_> + 8 14 7 8 2. + <_> + + <_> + 9 14 9 6 -1. + <_> + 9 16 9 2 3. + <_> + + <_> + 0 7 8 12 -1. + <_> + 0 11 8 4 3. + <_> + + <_> + 6 4 18 3 -1. + <_> + 6 5 18 1 3. + <_> + + <_> + 0 16 12 6 -1. + <_> + 4 16 4 6 3. + <_> + + <_> + 13 13 9 4 -1. + <_> + 13 15 9 2 2. + <_> + + <_> + 5 8 14 14 -1. + <_> + 5 8 7 7 2. + <_> + 12 15 7 7 2. + <_> + + <_> + 1 16 22 6 -1. + <_> + 12 16 11 3 2. + <_> + 1 19 11 3 2. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 9 5 10 10 -1. + <_> + 14 5 5 5 2. + <_> + 9 10 5 5 2. + <_> + + <_> + 5 5 10 10 -1. + <_> + 5 5 5 5 2. + <_> + 10 10 5 5 2. + <_> + + <_> + 4 6 16 6 -1. + <_> + 12 6 8 3 2. + <_> + 4 9 8 3 2. + <_> + + <_> + 0 7 6 9 -1. + <_> + 0 10 6 3 3. + <_> + + <_> + 16 10 8 14 -1. + <_> + 20 10 4 7 2. + <_> + 16 17 4 7 2. + <_> + + <_> + 9 12 6 12 -1. + <_> + 9 18 6 6 2. + <_> + + <_> + 8 10 8 12 -1. + <_> + 12 10 4 6 2. + <_> + 8 16 4 6 2. + <_> + + <_> + 8 0 4 9 -1. + <_> + 10 0 2 9 2. + <_> + + <_> + 10 4 8 16 -1. + <_> + 14 4 4 8 2. + <_> + 10 12 4 8 2. + <_> + + <_> + 7 10 10 6 -1. + <_> + 7 12 10 2 3. + <_> + + <_> + 5 6 14 14 -1. + <_> + 12 6 7 7 2. + <_> + 5 13 7 7 2. + <_> + + <_> + 2 11 20 2 -1. + <_> + 2 12 20 1 2. + <_> + + <_> + 18 8 4 16 -1. + <_> + 18 16 4 8 2. + <_> + + <_> + 1 11 12 10 -1. + <_> + 1 11 6 5 2. + <_> + 7 16 6 5 2. + <_> + + <_> + 6 9 12 4 -1. + <_> + 6 11 12 2 2. + <_> + + <_> + 9 12 6 7 -1. + <_> + 12 12 3 7 2. + <_> + + <_> + 10 4 8 16 -1. + <_> + 14 4 4 8 2. + <_> + 10 12 4 8 2. + <_> + + <_> + 6 4 8 16 -1. + <_> + 6 4 4 8 2. + <_> + 10 12 4 8 2. + <_> + + <_> + 8 9 9 6 -1. + <_> + 11 9 3 6 3. + <_> + + <_> + 1 5 16 12 -1. + <_> + 1 5 8 6 2. + <_> + 9 11 8 6 2. + <_> + + <_> + 9 9 6 8 -1. + <_> + 9 9 3 8 2. + <_> + + <_> + 6 0 3 18 -1. + <_> + 7 0 1 18 3. + <_> + + <_> + 17 9 5 14 -1. + <_> + 17 16 5 7 2. + <_> + + <_> + 2 9 5 14 -1. + <_> + 2 16 5 7 2. + <_> + + <_> + 7 4 10 6 -1. + <_> + 7 7 10 3 2. + <_> + + <_> + 1 3 23 18 -1. + <_> + 1 9 23 6 3. + <_> + + <_> + 1 1 21 3 -1. + <_> + 8 1 7 3 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 3 18 12 6 -1. + <_> + 3 18 6 3 2. + <_> + 9 21 6 3 2. + <_> + + <_> + 16 8 8 16 -1. + <_> + 20 8 4 8 2. + <_> + 16 16 4 8 2. + <_> + + <_> + 0 19 24 4 -1. + <_> + 8 19 8 4 3. + <_> + + <_> + 16 8 8 16 -1. + <_> + 20 8 4 8 2. + <_> + 16 16 4 8 2. + <_> + + <_> + 0 8 8 16 -1. + <_> + 0 8 4 8 2. + <_> + 4 16 4 8 2. + <_> + + <_> + 8 12 8 10 -1. + <_> + 8 17 8 5 2. + <_> + + <_> + 5 7 5 8 -1. + <_> + 5 11 5 4 2. + <_> + + <_> + 4 1 19 2 -1. + <_> + 4 2 19 1 2. + <_> + + <_> + 0 12 24 9 -1. + <_> + 8 12 8 9 3. + <_> + + <_> + 6 0 13 8 -1. + <_> + 6 4 13 4 2. + <_> + + <_> + 0 0 24 3 -1. + <_> + 0 1 24 1 3. + <_> + + <_> + 20 3 4 11 -1. + <_> + 20 3 2 11 2. + <_> + + <_> + 8 6 6 9 -1. + <_> + 10 6 2 9 3. + <_> + + <_> + 6 11 12 8 -1. + <_> + 12 11 6 4 2. + <_> + 6 15 6 4 2. + <_> + + <_> + 0 8 12 6 -1. + <_> + 0 8 6 3 2. + <_> + 6 11 6 3 2. + <_> + + <_> + 6 17 18 3 -1. + <_> + 6 18 18 1 3. + <_> + + <_> + 0 14 9 6 -1. + <_> + 0 16 9 2 3. + <_> + + <_> + 20 3 4 9 -1. + <_> + 20 3 2 9 2. + <_> + + <_> + 0 3 4 9 -1. + <_> + 2 3 2 9 2. + <_> + + <_> + 15 0 9 19 -1. + <_> + 18 0 3 19 3. + <_> + + <_> + 0 0 9 19 -1. + <_> + 3 0 3 19 3. + <_> + + <_> + 13 11 6 8 -1. + <_> + 13 11 3 8 2. + <_> + + <_> + 5 11 6 8 -1. + <_> + 8 11 3 8 2. + <_> + + <_> + 5 11 19 3 -1. + <_> + 5 12 19 1 3. + <_> + + <_> + 3 20 18 4 -1. + <_> + 9 20 6 4 3. + <_> + + <_> + 6 6 16 6 -1. + <_> + 6 8 16 2 3. + <_> + + <_> + 6 0 9 6 -1. + <_> + 9 0 3 6 3. + <_> + + <_> + 10 3 4 14 -1. + <_> + 10 10 4 7 2. + <_> + + <_> + 1 5 15 12 -1. + <_> + 1 11 15 6 2. + <_> + + <_> + 11 12 8 5 -1. + <_> + 11 12 4 5 2. + <_> + + <_> + 5 0 6 9 -1. + <_> + 7 0 2 9 3. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 5 5 12 8 -1. + <_> + 5 5 6 4 2. + <_> + 11 9 6 4 2. + <_> + + <_> + 13 12 11 6 -1. + <_> + 13 14 11 2 3. + <_> + + <_> + 0 13 21 3 -1. + <_> + 0 14 21 1 3. + <_> + + <_> + 8 1 8 12 -1. + <_> + 12 1 4 6 2. + <_> + 8 7 4 6 2. + <_> + + <_> + 1 0 6 12 -1. + <_> + 1 0 3 6 2. + <_> + 4 6 3 6 2. + <_> + + <_> + 2 2 21 2 -1. + <_> + 2 3 21 1 2. + <_> + + <_> + 2 2 19 3 -1. + <_> + 2 3 19 1 3. + <_> + + <_> + 17 10 6 14 -1. + <_> + 20 10 3 7 2. + <_> + 17 17 3 7 2. + <_> + + <_> + 1 10 6 14 -1. + <_> + 1 10 3 7 2. + <_> + 4 17 3 7 2. + <_> + + <_> + 7 6 14 14 -1. + <_> + 14 6 7 7 2. + <_> + 7 13 7 7 2. + <_> + + <_> + 0 12 9 6 -1. + <_> + 0 14 9 2 3. + <_> + + <_> + 15 14 8 9 -1. + <_> + 15 17 8 3 3. + <_> + + <_> + 1 1 22 4 -1. + <_> + 1 1 11 2 2. + <_> + 12 3 11 2 2. + <_> + + <_> + 9 11 9 6 -1. + <_> + 9 13 9 2 3. + <_> + + <_> + 0 15 18 3 -1. + <_> + 0 16 18 1 3. + <_> + + <_> + 16 14 7 9 -1. + <_> + 16 17 7 3 3. + <_> + + <_> + 4 3 16 4 -1. + <_> + 12 3 8 4 2. + <_> + + <_> + 7 6 12 5 -1. + <_> + 7 6 6 5 2. + <_> + + <_> + 9 6 4 9 -1. + <_> + 11 6 2 9 2. + <_> + + <_> + 12 1 4 10 -1. + <_> + 12 1 2 10 2. + <_> + + <_> + 8 1 4 10 -1. + <_> + 10 1 2 10 2. + <_> + + <_> + 15 15 6 9 -1. + <_> + 15 18 6 3 3. + <_> + + <_> + 3 15 6 9 -1. + <_> + 3 18 6 3 3. + <_> + + <_> + 15 1 3 19 -1. + <_> + 16 1 1 19 3. + <_> + + <_> + 1 3 6 9 -1. + <_> + 3 3 2 9 3. + <_> + + <_> + 15 0 3 19 -1. + <_> + 16 0 1 19 3. + <_> + + <_> + 6 3 12 4 -1. + <_> + 12 3 6 4 2. + <_> + + <_> + 10 5 4 9 -1. + <_> + 10 5 2 9 2. + <_> + + <_> + 6 0 3 19 -1. + <_> + 7 0 1 19 3. + <_> + + <_> + 11 1 3 12 -1. + <_> + 11 7 3 6 2. + <_> + + <_> + 6 7 10 5 -1. + <_> + 11 7 5 5 2. + <_> + + <_> + 11 3 3 18 -1. + <_> + 12 3 1 18 3. + <_> + + <_> + 9 3 6 12 -1. + <_> + 11 3 2 12 3. + <_> + + <_> + 3 7 19 3 -1. + <_> + 3 8 19 1 3. + <_> + + <_> + 2 7 18 3 -1. + <_> + 2 8 18 1 3. + <_> + + <_> + 3 13 18 4 -1. + <_> + 12 13 9 2 2. + <_> + 3 15 9 2 2. + <_> + + <_> + 3 5 6 9 -1. + <_> + 5 5 2 9 3. + <_> + + <_> + 4 1 20 4 -1. + <_> + 14 1 10 2 2. + <_> + 4 3 10 2 2. + <_> + + <_> + 0 1 20 4 -1. + <_> + 0 1 10 2 2. + <_> + 10 3 10 2 2. + <_> + + <_> + 10 15 6 6 -1. + <_> + 10 15 3 6 2. + <_> + + <_> + 0 2 24 8 -1. + <_> + 8 2 8 8 3. + <_> + + <_> + 5 5 18 3 -1. + <_> + 5 6 18 1 3. + <_> + + <_> + 8 15 6 6 -1. + <_> + 11 15 3 6 2. + <_> + + <_> + 11 12 8 5 -1. + <_> + 11 12 4 5 2. + <_> + + <_> + 5 12 8 5 -1. + <_> + 9 12 4 5 2. + <_> + + <_> + 5 0 14 6 -1. + <_> + 5 2 14 2 3. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 10 7 5 12 -1. + <_> + 10 11 5 4 3. + <_> + + <_> + 7 9 8 14 -1. + <_> + 7 9 4 7 2. + <_> + 11 16 4 7 2. + <_> + + <_> + 1 5 22 6 -1. + <_> + 12 5 11 3 2. + <_> + 1 8 11 3 2. + <_> + + <_> + 0 5 6 6 -1. + <_> + 0 8 6 3 2. + <_> + + <_> + 12 17 9 4 -1. + <_> + 12 19 9 2 2. + <_> + + <_> + 2 18 19 3 -1. + <_> + 2 19 19 1 3. + <_> + + <_> + 12 17 9 4 -1. + <_> + 12 19 9 2 2. + <_> + + <_> + 1 17 18 3 -1. + <_> + 1 18 18 1 3. + <_> + + <_> + 12 17 9 4 -1. + <_> + 12 19 9 2 2. + <_> + + <_> + 0 0 24 3 -1. + <_> + 0 1 24 1 3. + <_> + + <_> + 5 0 14 4 -1. + <_> + 5 2 14 2 2. + <_> + + <_> + 6 14 9 6 -1. + <_> + 6 16 9 2 3. + <_> + + <_> + 14 13 6 9 -1. + <_> + 14 16 6 3 3. + <_> + + <_> + 5 20 13 4 -1. + <_> + 5 22 13 2 2. + <_> + + <_> + 9 9 6 12 -1. + <_> + 9 13 6 4 3. + <_> + + <_> + 1 10 21 3 -1. + <_> + 8 10 7 3 3. + <_> + + <_> + 8 8 9 6 -1. + <_> + 11 8 3 6 3. + <_> + + <_> + 3 10 9 7 -1. + <_> + 6 10 3 7 3. + <_> + + <_> + 12 10 10 8 -1. + <_> + 17 10 5 4 2. + <_> + 12 14 5 4 2. + <_> + + <_> + 0 15 24 3 -1. + <_> + 8 15 8 3 3. + <_> + + <_> + 8 5 9 6 -1. + <_> + 8 7 9 2 3. + <_> + + <_> + 4 13 6 9 -1. + <_> + 4 16 6 3 3. + <_> + + <_> + 12 17 9 4 -1. + <_> + 12 19 9 2 2. + <_> + + <_> + 9 12 6 6 -1. + <_> + 9 15 6 3 2. + <_> + + <_> + 9 9 14 10 -1. + <_> + 16 9 7 5 2. + <_> + 9 14 7 5 2. + <_> + + <_> + 1 9 14 10 -1. + <_> + 1 9 7 5 2. + <_> + 8 14 7 5 2. + <_> + + <_> + 8 7 9 17 -1. + <_> + 11 7 3 17 3. + <_> + + <_> + 3 4 6 20 -1. + <_> + 3 4 3 10 2. + <_> + 6 14 3 10 2. + <_> + + <_> + 7 8 10 4 -1. + <_> + 7 8 5 4 2. + <_> + + <_> + 10 7 4 9 -1. + <_> + 12 7 2 9 2. + <_> + + <_> + 10 15 6 9 -1. + <_> + 12 15 2 9 3. + <_> + + <_> + 3 8 6 16 -1. + <_> + 3 8 3 8 2. + <_> + 6 16 3 8 2. + <_> + + <_> + 12 17 9 4 -1. + <_> + 12 19 9 2 2. + <_> + + <_> + 3 17 9 4 -1. + <_> + 3 19 9 2 2. + <_> + + <_> + 10 1 9 6 -1. + <_> + 13 1 3 6 3. + <_> + + <_> + 5 7 4 10 -1. + <_> + 5 12 4 5 2. + <_> + + <_> + 7 5 12 6 -1. + <_> + 11 5 4 6 3. + <_> + + <_> + 6 4 9 8 -1. + <_> + 9 4 3 8 3. + <_> + + <_> + 12 16 10 8 -1. + <_> + 17 16 5 4 2. + <_> + 12 20 5 4 2. + <_> + + <_> + 2 16 10 8 -1. + <_> + 2 16 5 4 2. + <_> + 7 20 5 4 2. + <_> + + <_> + 0 0 24 4 -1. + <_> + 12 0 12 2 2. + <_> + 0 2 12 2 2. + <_> + + <_> + 0 6 9 6 -1. + <_> + 0 8 9 2 3. + <_> + + <_> + 0 4 24 6 -1. + <_> + 12 4 12 3 2. + <_> + 0 7 12 3 2. + <_> + + <_> + 5 0 11 4 -1. + <_> + 5 2 11 2 2. + <_> + + <_> + 1 1 22 4 -1. + <_> + 12 1 11 2 2. + <_> + 1 3 11 2 2. + <_> + + <_> + 9 6 6 18 -1. + <_> + 9 15 6 9 2. + <_> + + <_> + 2 9 20 4 -1. + <_> + 2 11 20 2 2. + <_> + + <_> + 5 2 14 14 -1. + <_> + 5 9 14 7 2. + <_> + + <_> + 4 2 16 6 -1. + <_> + 4 5 16 3 2. + <_> + + <_> + 2 3 19 3 -1. + <_> + 2 4 19 1 3. + <_> + + <_> + 7 1 10 4 -1. + <_> + 7 3 10 2 2. + <_> + + <_> + 0 9 4 15 -1. + <_> + 0 14 4 5 3. + <_> + + <_> + 2 10 21 3 -1. + <_> + 2 11 21 1 3. + <_> + + <_> + 3 0 6 6 -1. + <_> + 6 0 3 6 2. + <_> + + <_> + 6 4 14 9 -1. + <_> + 6 7 14 3 3. + <_> + + <_> + 9 1 6 9 -1. + <_> + 11 1 2 9 3. + <_> + + <_> + 15 8 9 9 -1. + <_> + 15 11 9 3 3. + <_> + + <_> + 8 0 4 21 -1. + <_> + 8 7 4 7 3. + <_> + + <_> + 3 22 19 2 -1. + <_> + 3 23 19 1 2. + <_> + + <_> + 2 15 20 3 -1. + <_> + 2 16 20 1 3. + <_> + + <_> + 19 0 4 13 -1. + <_> + 19 0 2 13 2. + <_> + + <_> + 1 7 8 8 -1. + <_> + 1 11 8 4 2. + <_> + + <_> + 14 14 6 9 -1. + <_> + 14 17 6 3 3. + <_> + + <_> + 4 14 6 9 -1. + <_> + 4 17 6 3 3. + <_> + + <_> + 14 5 4 10 -1. + <_> + 14 5 2 10 2. + <_> + + <_> + 6 5 4 10 -1. + <_> + 8 5 2 10 2. + <_> + + <_> + 14 5 6 6 -1. + <_> + 14 8 6 3 2. + <_> + + <_> + 4 5 6 6 -1. + <_> + 4 8 6 3 2. + <_> + + <_> + 0 2 24 21 -1. + <_> + 8 2 8 21 3. + <_> + + <_> + 1 2 6 13 -1. + <_> + 3 2 2 13 3. + <_> + + <_> + 20 0 4 21 -1. + <_> + 20 0 2 21 2. + <_> + + <_> + 0 4 4 20 -1. + <_> + 2 4 2 20 2. + <_> + + <_> + 8 16 9 6 -1. + <_> + 8 18 9 2 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 16 12 7 9 -1. + <_> + 16 15 7 3 3. + <_> + + <_> + 5 21 14 3 -1. + <_> + 12 21 7 3 2. + <_> + + <_> + 11 5 6 9 -1. + <_> + 11 5 3 9 2. + <_> + + <_> + 10 5 4 10 -1. + <_> + 12 5 2 10 2. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 7 5 6 9 -1. + <_> + 10 5 3 9 2. + <_> + + <_> + 14 14 10 4 -1. + <_> + 14 16 10 2 2. + <_> + + <_> + 5 5 14 14 -1. + <_> + 5 5 7 7 2. + <_> + 12 12 7 7 2. + <_> + + <_> + 12 8 12 6 -1. + <_> + 18 8 6 3 2. + <_> + 12 11 6 3 2. + <_> + + <_> + 6 6 12 12 -1. + <_> + 6 6 6 6 2. + <_> + 12 12 6 6 2. + <_> + + <_> + 11 13 6 10 -1. + <_> + 13 13 2 10 3. + <_> + + <_> + 1 10 20 8 -1. + <_> + 1 10 10 4 2. + <_> + 11 14 10 4 2. + <_> + + <_> + 15 13 9 6 -1. + <_> + 15 15 9 2 3. + <_> + + <_> + 9 0 6 9 -1. + <_> + 9 3 6 3 3. + <_> + + <_> + 10 1 5 14 -1. + <_> + 10 8 5 7 2. + <_> + + <_> + 3 4 16 6 -1. + <_> + 3 6 16 2 3. + <_> + + <_> + 16 3 8 9 -1. + <_> + 16 6 8 3 3. + <_> + + <_> + 7 13 6 10 -1. + <_> + 9 13 2 10 3. + <_> + + <_> + 15 13 9 6 -1. + <_> + 15 15 9 2 3. + <_> + + <_> + 0 13 9 6 -1. + <_> + 0 15 9 2 3. + <_> + + <_> + 13 16 9 6 -1. + <_> + 13 18 9 2 3. + <_> + + <_> + 2 16 9 6 -1. + <_> + 2 18 9 2 3. + <_> + + <_> + 5 16 18 3 -1. + <_> + 5 17 18 1 3. + <_> + + <_> + 1 16 18 3 -1. + <_> + 1 17 18 1 3. + <_> + + <_> + 5 0 18 3 -1. + <_> + 5 1 18 1 3. + <_> + + <_> + 1 1 19 2 -1. + <_> + 1 2 19 1 2. + <_> + + <_> + 14 2 6 11 -1. + <_> + 16 2 2 11 3. + <_> + + <_> + 4 15 15 6 -1. + <_> + 9 15 5 6 3. + <_> + + <_> + 14 2 6 11 -1. + <_> + 16 2 2 11 3. + <_> + + <_> + 4 2 6 11 -1. + <_> + 6 2 2 11 3. + <_> + + <_> + 18 2 6 9 -1. + <_> + 18 5 6 3 3. + <_> + + <_> + 1 2 22 4 -1. + <_> + 1 2 11 2 2. + <_> + 12 4 11 2 2. + <_> + + <_> + 2 0 21 12 -1. + <_> + 9 0 7 12 3. + <_> + + <_> + 0 12 18 3 -1. + <_> + 0 13 18 1 3. + <_> + + <_> + 12 2 6 9 -1. + <_> + 14 2 2 9 3. + <_> + + <_> + 3 10 18 3 -1. + <_> + 3 11 18 1 3. + <_> + + <_> + 16 3 8 9 -1. + <_> + 16 6 8 3 3. + <_> + + <_> + 3 7 18 3 -1. + <_> + 3 8 18 1 3. + <_> + + <_> + 9 11 6 9 -1. + <_> + 11 11 2 9 3. + <_> + + <_> + 9 8 6 9 -1. + <_> + 11 8 2 9 3. + <_> + + <_> + 15 0 2 18 -1. + <_> + 15 0 1 18 2. + <_> + + <_> + 7 0 2 18 -1. + <_> + 8 0 1 18 2. + <_> + + <_> + 17 3 7 9 -1. + <_> + 17 6 7 3 3. + <_> + + <_> + 3 18 9 6 -1. + <_> + 3 20 9 2 3. + <_> + + <_> + 3 18 21 3 -1. + <_> + 3 19 21 1 3. + <_> + + <_> + 0 3 7 9 -1. + <_> + 0 6 7 3 3. + <_> + + <_> + 2 7 22 3 -1. + <_> + 2 8 22 1 3. + <_> + + <_> + 0 3 24 16 -1. + <_> + 0 3 12 8 2. + <_> + 12 11 12 8 2. + <_> + + <_> + 13 17 9 4 -1. + <_> + 13 19 9 2 2. + <_> + + <_> + 5 5 12 8 -1. + <_> + 5 5 6 4 2. + <_> + 11 9 6 4 2. + <_> + + <_> + 5 6 14 6 -1. + <_> + 12 6 7 3 2. + <_> + 5 9 7 3 2. + <_> + + <_> + 5 16 14 6 -1. + <_> + 5 16 7 3 2. + <_> + 12 19 7 3 2. + <_> + + <_> + 18 2 6 9 -1. + <_> + 18 5 6 3 3. + <_> + + <_> + 0 2 6 9 -1. + <_> + 0 5 6 3 3. + <_> + + <_> + 3 4 20 10 -1. + <_> + 13 4 10 5 2. + <_> + 3 9 10 5 2. + <_> + + <_> + 2 13 9 8 -1. + <_> + 5 13 3 8 3. + <_> + + <_> + 2 1 21 15 -1. + <_> + 9 1 7 15 3. + <_> + + <_> + 5 12 14 8 -1. + <_> + 12 12 7 8 2. + <_> + + <_> + 6 7 12 4 -1. + <_> + 6 7 6 4 2. + <_> + + <_> + 6 5 9 6 -1. + <_> + 9 5 3 6 3. + <_> + + <_> + 13 11 6 6 -1. + <_> + 13 11 3 6 2. + <_> + + <_> + 5 11 6 6 -1. + <_> + 8 11 3 6 2. + <_> + + <_> + 6 4 18 2 -1. + <_> + 6 5 18 1 2. + <_> + + <_> + 0 2 6 11 -1. + <_> + 2 2 2 11 3. + <_> + + <_> + 18 0 6 15 -1. + <_> + 20 0 2 15 3. + <_> + + <_> + 0 0 6 13 -1. + <_> + 2 0 2 13 3. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 0 2 24 4 -1. + <_> + 8 2 8 4 3. + <_> + + <_> + 3 13 18 4 -1. + <_> + 12 13 9 4 2. + <_> + + <_> + 9 7 10 4 -1. + <_> + 9 7 5 4 2. + <_> + + <_> + 5 8 12 3 -1. + <_> + 11 8 6 3 2. + <_> + + <_> + 4 14 19 3 -1. + <_> + 4 15 19 1 3. + <_> + + <_> + 10 0 4 20 -1. + <_> + 10 10 4 10 2. + <_> + + <_> + 8 15 9 6 -1. + <_> + 8 17 9 2 3. + <_> + + <_> + 2 9 15 4 -1. + <_> + 7 9 5 4 3. + <_> + + <_> + 8 4 12 7 -1. + <_> + 12 4 4 7 3. + <_> + + <_> + 0 10 6 9 -1. + <_> + 0 13 6 3 3. + <_> + + <_> + 18 5 6 9 -1. + <_> + 18 8 6 3 3. + <_> + + <_> + 0 18 16 6 -1. + <_> + 0 18 8 3 2. + <_> + 8 21 8 3 2. + <_> + + <_> + 9 18 14 6 -1. + <_> + 16 18 7 3 2. + <_> + 9 21 7 3 2. + <_> + + <_> + 1 20 20 4 -1. + <_> + 1 20 10 2 2. + <_> + 11 22 10 2 2. + <_> + + <_> + 2 8 20 6 -1. + <_> + 12 8 10 3 2. + <_> + 2 11 10 3 2. + <_> + + <_> + 7 8 6 9 -1. + <_> + 9 8 2 9 3. + <_> + + <_> + 8 5 12 8 -1. + <_> + 12 5 4 8 3. + <_> + + <_> + 4 5 12 8 -1. + <_> + 8 5 4 8 3. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 2 0 6 16 -1. + <_> + 4 0 2 16 3. + <_> + + <_> + 15 4 6 12 -1. + <_> + 15 8 6 4 3. + <_> + + <_> + 3 4 6 12 -1. + <_> + 3 8 6 4 3. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 4 0 15 22 -1. + <_> + 4 11 15 11 2. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 0 12 9 6 -1. + <_> + 0 14 9 2 3. + <_> + + <_> + 15 15 9 6 -1. + <_> + 15 17 9 2 3. + <_> + + <_> + 0 15 9 6 -1. + <_> + 0 17 9 2 3. + <_> + + <_> + 10 0 8 10 -1. + <_> + 14 0 4 5 2. + <_> + 10 5 4 5 2. + <_> + + <_> + 1 0 4 16 -1. + <_> + 3 0 2 16 2. + <_> + + <_> + 7 6 10 6 -1. + <_> + 7 8 10 2 3. + <_> + + <_> + 10 12 4 10 -1. + <_> + 10 17 4 5 2. + <_> + + <_> + 8 4 10 6 -1. + <_> + 8 6 10 2 3. + <_> + + <_> + 3 22 18 2 -1. + <_> + 12 22 9 2 2. + <_> + + <_> + 7 7 11 6 -1. + <_> + 7 9 11 2 3. + <_> + + <_> + 0 0 12 10 -1. + <_> + 0 0 6 5 2. + <_> + 6 5 6 5 2. + <_> + + <_> + 10 1 12 6 -1. + <_> + 16 1 6 3 2. + <_> + 10 4 6 3 2. + <_> + + <_> + 7 16 9 4 -1. + <_> + 7 18 9 2 2. + <_> + + <_> + 5 7 15 16 -1. + <_> + 10 7 5 16 3. + <_> + + <_> + 5 10 12 13 -1. + <_> + 11 10 6 13 2. + <_> + + <_> + 6 2 12 6 -1. + <_> + 12 2 6 3 2. + <_> + 6 5 6 3 2. + <_> + + <_> + 3 9 12 9 -1. + <_> + 3 12 12 3 3. + <_> + + <_> + 16 2 8 6 -1. + <_> + 16 5 8 3 2. + <_> + + <_> + 0 2 8 6 -1. + <_> + 0 5 8 3 2. + <_> + + <_> + 0 3 24 11 -1. + <_> + 0 3 12 11 2. + <_> + + <_> + 0 13 8 10 -1. + <_> + 0 13 4 5 2. + <_> + 4 18 4 5 2. + <_> + + <_> + 10 14 4 10 -1. + <_> + 10 19 4 5 2. + <_> + + <_> + 10 2 4 21 -1. + <_> + 10 9 4 7 3. + <_> + + <_> + 4 4 15 9 -1. + <_> + 4 7 15 3 3. + <_> + + <_> + 0 1 24 6 -1. + <_> + 8 1 8 6 3. + <_> + + <_> + 9 6 5 16 -1. + <_> + 9 14 5 8 2. + <_> + + <_> + 3 21 18 3 -1. + <_> + 9 21 6 3 3. + <_> + + <_> + 6 5 3 12 -1. + <_> + 6 11 3 6 2. + <_> + + <_> + 11 6 4 9 -1. + <_> + 11 6 2 9 2. + <_> + + <_> + 5 6 9 8 -1. + <_> + 8 6 3 8 3. + <_> + + <_> + 4 3 20 2 -1. + <_> + 4 4 20 1 2. + <_> + + <_> + 2 10 18 3 -1. + <_> + 8 10 6 3 3. + <_> + + <_> + 7 15 10 6 -1. + <_> + 7 17 10 2 3. + <_> + + <_> + 1 4 4 18 -1. + <_> + 1 4 2 9 2. + <_> + 3 13 2 9 2. + <_> + + <_> + 13 0 6 9 -1. + <_> + 15 0 2 9 3. + <_> + + <_> + 5 0 6 9 -1. + <_> + 7 0 2 9 3. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 6 7 9 6 -1. + <_> + 9 7 3 6 3. + <_> + + <_> + 3 0 18 2 -1. + <_> + 3 1 18 1 2. + <_> + + <_> + 0 10 20 4 -1. + <_> + 0 10 10 2 2. + <_> + 10 12 10 2 2. + <_> + + <_> + 10 2 4 12 -1. + <_> + 10 8 4 6 2. + <_> + + <_> + 6 5 6 12 -1. + <_> + 6 5 3 6 2. + <_> + 9 11 3 6 2. + <_> + + <_> + 6 0 18 22 -1. + <_> + 15 0 9 11 2. + <_> + 6 11 9 11 2. + <_> + + <_> + 0 0 18 22 -1. + <_> + 0 0 9 11 2. + <_> + 9 11 9 11 2. + <_> + + <_> + 18 2 6 11 -1. + <_> + 20 2 2 11 3. + <_> + + <_> + 0 2 6 11 -1. + <_> + 2 2 2 11 3. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 0 0 20 3 -1. + <_> + 0 1 20 1 3. + <_> + + <_> + 2 2 20 2 -1. + <_> + 2 3 20 1 2. + <_> + + <_> + 1 10 18 2 -1. + <_> + 1 11 18 1 2. + <_> + + <_> + 18 7 6 9 -1. + <_> + 18 10 6 3 3. + <_> + + <_> + 0 0 22 9 -1. + <_> + 0 3 22 3 3. + <_> + + <_> + 17 3 6 9 -1. + <_> + 17 6 6 3 3. + <_> + + <_> + 0 7 6 9 -1. + <_> + 0 10 6 3 3. + <_> + + <_> + 0 6 24 6 -1. + <_> + 0 8 24 2 3. + <_> + + <_> + 0 2 6 10 -1. + <_> + 2 2 2 10 3. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 15 0 6 9 -1. + <_> + 17 0 2 9 3. + <_> + + <_> + 3 0 6 9 -1. + <_> + 5 0 2 9 3. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 15 14 9 6 -1. + <_> + 15 16 9 2 3. + <_> + + <_> + 0 15 23 6 -1. + <_> + 0 17 23 2 3. + <_> + + <_> + 5 15 18 3 -1. + <_> + 5 16 18 1 3. + <_> + + <_> + 0 14 9 6 -1. + <_> + 0 16 9 2 3. + <_> + + <_> + 9 8 8 10 -1. + <_> + 13 8 4 5 2. + <_> + 9 13 4 5 2. + <_> + + <_> + 3 7 15 6 -1. + <_> + 8 7 5 6 3. + <_> + + <_> + 9 8 8 10 -1. + <_> + 13 8 4 5 2. + <_> + 9 13 4 5 2. + <_> + + <_> + 5 0 6 12 -1. + <_> + 8 0 3 12 2. + <_> + + <_> + 9 8 8 10 -1. + <_> + 13 8 4 5 2. + <_> + 9 13 4 5 2. + <_> + + <_> + 8 5 6 9 -1. + <_> + 10 5 2 9 3. + <_> + + <_> + 10 6 4 18 -1. + <_> + 12 6 2 9 2. + <_> + 10 15 2 9 2. + <_> + + <_> + 5 7 12 4 -1. + <_> + 11 7 6 4 2. + <_> + + <_> + 9 8 8 10 -1. + <_> + 13 8 4 5 2. + <_> + 9 13 4 5 2. + <_> + + <_> + 7 8 8 10 -1. + <_> + 7 8 4 5 2. + <_> + 11 13 4 5 2. + <_> + + <_> + 11 10 6 14 -1. + <_> + 14 10 3 7 2. + <_> + 11 17 3 7 2. + <_> + + <_> + 9 5 6 19 -1. + <_> + 12 5 3 19 2. + <_> + + <_> + 6 12 12 6 -1. + <_> + 12 12 6 3 2. + <_> + 6 15 6 3 2. + <_> + + <_> + 1 9 18 6 -1. + <_> + 1 9 9 3 2. + <_> + 10 12 9 3 2. + <_> + + <_> + 16 14 8 10 -1. + <_> + 20 14 4 5 2. + <_> + 16 19 4 5 2. + <_> + + <_> + 0 9 22 8 -1. + <_> + 0 9 11 4 2. + <_> + 11 13 11 4 2. + <_> + + <_> + 8 18 12 6 -1. + <_> + 14 18 6 3 2. + <_> + 8 21 6 3 2. + <_> + + <_> + 0 6 20 18 -1. + <_> + 0 6 10 9 2. + <_> + 10 15 10 9 2. + <_> + + <_> + 3 6 20 12 -1. + <_> + 13 6 10 6 2. + <_> + 3 12 10 6 2. + <_> + + <_> + 0 16 10 8 -1. + <_> + 0 16 5 4 2. + <_> + 5 20 5 4 2. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 0 11 19 3 -1. + <_> + 0 12 19 1 3. + <_> + + <_> + 14 6 6 9 -1. + <_> + 14 9 6 3 3. + <_> + + <_> + 1 7 22 4 -1. + <_> + 1 7 11 2 2. + <_> + 12 9 11 2 2. + <_> + + <_> + 13 6 7 12 -1. + <_> + 13 10 7 4 3. + <_> + + <_> + 4 7 11 9 -1. + <_> + 4 10 11 3 3. + <_> + + <_> + 12 10 10 8 -1. + <_> + 17 10 5 4 2. + <_> + 12 14 5 4 2. + <_> + + <_> + 2 12 9 7 -1. + <_> + 5 12 3 7 3. + <_> + + <_> + 16 14 6 9 -1. + <_> + 16 17 6 3 3. + <_> + + <_> + 3 12 6 12 -1. + <_> + 3 16 6 4 3. + <_> + + <_> + 14 13 6 6 -1. + <_> + 14 16 6 3 2. + <_> + + <_> + 8 0 6 9 -1. + <_> + 10 0 2 9 3. + <_> + + <_> + 9 1 6 23 -1. + <_> + 11 1 2 23 3. + <_> + + <_> + 0 16 9 6 -1. + <_> + 0 18 9 2 3. + <_> + + <_> + 4 17 18 3 -1. + <_> + 4 18 18 1 3. + <_> + + <_> + 5 2 13 14 -1. + <_> + 5 9 13 7 2. + <_> + + <_> + 15 0 8 12 -1. + <_> + 19 0 4 6 2. + <_> + 15 6 4 6 2. + <_> + + <_> + 0 0 8 12 -1. + <_> + 0 0 4 6 2. + <_> + 4 6 4 6 2. + <_> + + <_> + 8 2 8 7 -1. + <_> + 8 2 4 7 2. + <_> + + <_> + 1 1 6 9 -1. + <_> + 3 1 2 9 3. + <_> + + <_> + 14 8 6 12 -1. + <_> + 17 8 3 6 2. + <_> + 14 14 3 6 2. + <_> + + <_> + 4 8 6 12 -1. + <_> + 4 8 3 6 2. + <_> + 7 14 3 6 2. + <_> + + <_> + 16 5 5 15 -1. + <_> + 16 10 5 5 3. + <_> + + <_> + 3 5 5 15 -1. + <_> + 3 10 5 5 3. + <_> + + <_> + 18 4 6 9 -1. + <_> + 18 7 6 3 3. + <_> + + <_> + 1 7 6 15 -1. + <_> + 1 12 6 5 3. + <_> + + <_> + 11 15 12 8 -1. + <_> + 17 15 6 4 2. + <_> + 11 19 6 4 2. + <_> + + <_> + 0 2 24 4 -1. + <_> + 0 2 12 2 2. + <_> + 12 4 12 2 2. + <_> + + <_> + 15 1 2 19 -1. + <_> + 15 1 1 19 2. + <_> + + <_> + 7 1 2 19 -1. + <_> + 8 1 1 19 2. + <_> + + <_> + 22 1 2 20 -1. + <_> + 22 1 1 20 2. + <_> + + <_> + 0 1 2 20 -1. + <_> + 1 1 1 20 2. + <_> + + <_> + 18 11 6 12 -1. + <_> + 20 11 2 12 3. + <_> + + <_> + 0 11 6 12 -1. + <_> + 2 11 2 12 3. + <_> + + <_> + 3 6 18 14 -1. + <_> + 3 13 18 7 2. + <_> + + <_> + 6 10 7 8 -1. + <_> + 6 14 7 4 2. + <_> + + <_> + 7 9 12 12 -1. + <_> + 7 13 12 4 3. + <_> + + <_> + 2 18 18 5 -1. + <_> + 11 18 9 5 2. + <_> + + <_> + 4 21 20 3 -1. + <_> + 4 22 20 1 3. + <_> + + <_> + 9 12 6 12 -1. + <_> + 9 12 3 6 2. + <_> + 12 18 3 6 2. + <_> + + <_> + 4 6 18 3 -1. + <_> + 4 7 18 1 3. + <_> + + <_> + 3 6 18 3 -1. + <_> + 3 7 18 1 3. + <_> + + <_> + 18 4 6 9 -1. + <_> + 18 7 6 3 3. + <_> + + <_> + 2 12 9 6 -1. + <_> + 2 14 9 2 3. + <_> + + <_> + 4 14 18 4 -1. + <_> + 13 14 9 2 2. + <_> + 4 16 9 2 2. + <_> + + <_> + 7 7 6 14 -1. + <_> + 7 7 3 7 2. + <_> + 10 14 3 7 2. + <_> + + <_> + 7 13 12 6 -1. + <_> + 13 13 6 3 2. + <_> + 7 16 6 3 2. + <_> + + <_> + 6 7 12 9 -1. + <_> + 10 7 4 9 3. + <_> + + <_> + 12 12 6 6 -1. + <_> + 12 12 3 6 2. + <_> + + <_> + 0 2 4 10 -1. + <_> + 0 7 4 5 2. + <_> + + <_> + 8 0 9 6 -1. + <_> + 11 0 3 6 3. + <_> + + <_> + 2 9 12 6 -1. + <_> + 2 12 12 3 2. + <_> + + <_> + 13 10 6 9 -1. + <_> + 13 13 6 3 3. + <_> + + <_> + 5 10 6 9 -1. + <_> + 5 13 6 3 3. + <_> + + <_> + 9 15 9 6 -1. + <_> + 9 17 9 2 3. + <_> + + <_> + 5 16 12 6 -1. + <_> + 5 19 12 3 2. + <_> + + <_> + 3 2 20 3 -1. + <_> + 3 3 20 1 3. + <_> + + <_> + 2 5 12 6 -1. + <_> + 6 5 4 6 3. + <_> + + <_> + 11 0 3 24 -1. + <_> + 12 0 1 24 3. + <_> + + <_> + 3 16 15 4 -1. + <_> + 8 16 5 4 3. + <_> + + <_> + 9 12 6 12 -1. + <_> + 9 18 6 6 2. + <_> + + <_> + 1 15 12 8 -1. + <_> + 1 15 6 4 2. + <_> + 7 19 6 4 2. + <_> + + <_> + 15 10 8 14 -1. + <_> + 19 10 4 7 2. + <_> + 15 17 4 7 2. + <_> + + <_> + 1 9 8 14 -1. + <_> + 1 9 4 7 2. + <_> + 5 16 4 7 2. + <_> + + <_> + 9 11 9 10 -1. + <_> + 9 16 9 5 2. + <_> + + <_> + 6 7 12 6 -1. + <_> + 6 9 12 2 3. + <_> + + <_> + 10 15 6 9 -1. + <_> + 12 15 2 9 3. + <_> + + <_> + 7 8 9 7 -1. + <_> + 10 8 3 7 3. + <_> + + <_> + 10 4 8 10 -1. + <_> + 14 4 4 5 2. + <_> + 10 9 4 5 2. + <_> + + <_> + 4 6 6 9 -1. + <_> + 4 9 6 3 3. + <_> + + <_> + 0 6 24 12 -1. + <_> + 8 6 8 12 3. + <_> + + <_> + 3 7 6 14 -1. + <_> + 6 7 3 14 2. + <_> + + <_> + 19 8 5 8 -1. + <_> + 19 12 5 4 2. + <_> + + <_> + 0 8 5 8 -1. + <_> + 0 12 5 4 2. + <_> + + <_> + 17 3 6 6 -1. + <_> + 17 6 6 3 2. + <_> + + <_> + 1 3 6 6 -1. + <_> + 1 6 6 3 2. + <_> + + <_> + 18 2 6 9 -1. + <_> + 18 5 6 3 3. + <_> + + <_> + 0 2 6 9 -1. + <_> + 0 5 6 3 3. + <_> + + <_> + 3 3 18 6 -1. + <_> + 3 5 18 2 3. + <_> + + <_> + 2 3 9 6 -1. + <_> + 2 5 9 2 3. + <_> + + <_> + 9 3 10 8 -1. + <_> + 14 3 5 4 2. + <_> + 9 7 5 4 2. + <_> + + <_> + 5 3 10 8 -1. + <_> + 5 3 5 4 2. + <_> + 10 7 5 4 2. + <_> + + <_> + 10 11 6 12 -1. + <_> + 10 11 3 12 2. + <_> + + <_> + 8 11 6 11 -1. + <_> + 11 11 3 11 2. + <_> + + <_> + 7 8 10 4 -1. + <_> + 7 8 5 4 2. + <_> + + <_> + 9 6 6 7 -1. + <_> + 12 6 3 7 2. + <_> + + <_> + 5 18 18 3 -1. + <_> + 5 19 18 1 3. + <_> + + <_> + 8 4 6 9 -1. + <_> + 10 4 2 9 3. + <_> + + <_> + 8 1 9 7 -1. + <_> + 11 1 3 7 3. + <_> + + <_> + 6 11 6 6 -1. + <_> + 9 11 3 6 2. + <_> + + <_> + 14 12 4 11 -1. + <_> + 14 12 2 11 2. + <_> + + <_> + 6 12 4 11 -1. + <_> + 8 12 2 11 2. + <_> + + <_> + 8 0 12 18 -1. + <_> + 12 0 4 18 3. + <_> + + <_> + 2 12 10 5 -1. + <_> + 7 12 5 5 2. + <_> + + <_> + 2 20 22 3 -1. + <_> + 2 21 22 1 3. + <_> + + <_> + 0 4 2 20 -1. + <_> + 1 4 1 20 2. + <_> + + <_> + 0 2 24 4 -1. + <_> + 8 2 8 4 3. + <_> + + <_> + 7 8 10 4 -1. + <_> + 7 10 10 2 2. + <_> + + <_> + 6 7 8 10 -1. + <_> + 6 7 4 5 2. + <_> + 10 12 4 5 2. + <_> + + <_> + 14 0 6 14 -1. + <_> + 17 0 3 7 2. + <_> + 14 7 3 7 2. + <_> + + <_> + 4 11 5 8 -1. + <_> + 4 15 5 4 2. + <_> + + <_> + 2 0 20 9 -1. + <_> + 2 3 20 3 3. + <_> + + <_> + 6 7 12 8 -1. + <_> + 6 7 6 4 2. + <_> + 12 11 6 4 2. + <_> + + <_> + 9 17 6 6 -1. + <_> + 9 20 6 3 2. + <_> + + <_> + 7 10 10 4 -1. + <_> + 7 12 10 2 2. + <_> + + <_> + 6 5 12 9 -1. + <_> + 10 5 4 9 3. + <_> + + <_> + 5 11 6 8 -1. + <_> + 8 11 3 8 2. + <_> + + <_> + 18 4 4 17 -1. + <_> + 18 4 2 17 2. + <_> + + <_> + 0 0 6 6 -1. + <_> + 3 0 3 6 2. + <_> + + <_> + 18 4 4 17 -1. + <_> + 18 4 2 17 2. + <_> + + <_> + 2 4 4 17 -1. + <_> + 4 4 2 17 2. + <_> + + <_> + 5 18 19 3 -1. + <_> + 5 19 19 1 3. + <_> + + <_> + 11 0 2 18 -1. + <_> + 11 9 2 9 2. + <_> + + <_> + 15 4 2 18 -1. + <_> + 15 13 2 9 2. + <_> + + <_> + 7 4 2 18 -1. + <_> + 7 13 2 9 2. + <_> + + <_> + 7 11 10 8 -1. + <_> + 12 11 5 4 2. + <_> + 7 15 5 4 2. + <_> + + <_> + 10 6 4 9 -1. + <_> + 12 6 2 9 2. + <_> + + <_> + 10 0 6 9 -1. + <_> + 12 0 2 9 3. + <_> + + <_> + 2 9 16 8 -1. + <_> + 2 9 8 4 2. + <_> + 10 13 8 4 2. + <_> + + <_> + 14 15 6 9 -1. + <_> + 14 18 6 3 3. + <_> + + <_> + 8 7 6 9 -1. + <_> + 10 7 2 9 3. + <_> + + <_> + 14 15 6 9 -1. + <_> + 14 18 6 3 3. + <_> + + <_> + 3 12 12 6 -1. + <_> + 3 14 12 2 3. + <_> + + <_> + 14 12 9 6 -1. + <_> + 14 14 9 2 3. + <_> + + <_> + 1 12 9 6 -1. + <_> + 1 14 9 2 3. + <_> + + <_> + 3 7 18 3 -1. + <_> + 3 8 18 1 3. + <_> + + <_> + 1 7 22 6 -1. + <_> + 1 9 22 2 3. + <_> + + <_> + 18 4 6 6 -1. + <_> + 18 7 6 3 2. + <_> + + <_> + 0 4 6 6 -1. + <_> + 0 7 6 3 2. + <_> + + <_> + 5 11 16 6 -1. + <_> + 5 14 16 3 2. + <_> + + <_> + 6 16 9 4 -1. + <_> + 6 18 9 2 2. + <_> + + <_> + 14 15 6 9 -1. + <_> + 14 18 6 3 3. + <_> + + <_> + 4 15 6 9 -1. + <_> + 4 18 6 3 3. + <_> + + <_> + 15 1 6 23 -1. + <_> + 17 1 2 23 3. + <_> + + <_> + 0 21 24 3 -1. + <_> + 8 21 8 3 3. + <_> + + <_> + 0 20 24 4 -1. + <_> + 8 20 8 4 3. + <_> + + <_> + 3 1 6 23 -1. + <_> + 5 1 2 23 3. + <_> + + <_> + 3 17 18 3 -1. + <_> + 3 18 18 1 3. + <_> + + <_> + 0 16 18 3 -1. + <_> + 0 17 18 1 3. + <_> + + <_> + 1 16 22 4 -1. + <_> + 12 16 11 2 2. + <_> + 1 18 11 2 2. + <_> + + <_> + 0 16 9 6 -1. + <_> + 0 18 9 2 3. + <_> + + <_> + 2 10 21 3 -1. + <_> + 9 10 7 3 3. + <_> + + <_> + 2 18 12 6 -1. + <_> + 2 18 6 3 2. + <_> + 8 21 6 3 2. + <_> + + <_> + 0 5 24 4 -1. + <_> + 0 7 24 2 2. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 10 7 6 12 -1. + <_> + 10 13 6 6 2. + <_> + + <_> + 6 6 6 9 -1. + <_> + 8 6 2 9 3. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 9 7 6 9 -1. + <_> + 11 7 2 9 3. + <_> + + <_> + 2 1 20 3 -1. + <_> + 2 2 20 1 3. + <_> + + <_> + 1 18 12 6 -1. + <_> + 1 18 6 3 2. + <_> + 7 21 6 3 2. + <_> + + <_> + 13 2 4 13 -1. + <_> + 13 2 2 13 2. + <_> + + <_> + 6 7 12 4 -1. + <_> + 12 7 6 4 2. + <_> + + <_> + 10 1 4 13 -1. + <_> + 10 1 2 13 2. + <_> + + <_> + 6 0 3 18 -1. + <_> + 7 0 1 18 3. + <_> + + <_> + 14 3 10 5 -1. + <_> + 14 3 5 5 2. + <_> + + <_> + 6 15 12 8 -1. + <_> + 10 15 4 8 3. + <_> + + <_> + 9 10 6 9 -1. + <_> + 11 10 2 9 3. + <_> + + <_> + 8 3 4 9 -1. + <_> + 10 3 2 9 2. + <_> + + <_> + 17 0 6 14 -1. + <_> + 20 0 3 7 2. + <_> + 17 7 3 7 2. + <_> + + <_> + 1 0 6 14 -1. + <_> + 1 0 3 7 2. + <_> + 4 7 3 7 2. + <_> + + <_> + 14 0 6 16 -1. + <_> + 17 0 3 8 2. + <_> + 14 8 3 8 2. + <_> + + <_> + 7 4 4 10 -1. + <_> + 9 4 2 10 2. + <_> + + <_> + 3 17 18 6 -1. + <_> + 12 17 9 3 2. + <_> + 3 20 9 3 2. + <_> + + <_> + 1 20 22 4 -1. + <_> + 12 20 11 4 2. + <_> + + <_> + 14 3 10 5 -1. + <_> + 14 3 5 5 2. + <_> + + <_> + 0 3 10 5 -1. + <_> + 5 3 5 5 2. + <_> + + <_> + 12 6 12 16 -1. + <_> + 16 6 4 16 3. + <_> + + <_> + 0 6 12 16 -1. + <_> + 4 6 4 16 3. + <_> + + <_> + 10 9 5 15 -1. + <_> + 10 14 5 5 3. + <_> + + <_> + 1 18 21 2 -1. + <_> + 1 19 21 1 2. + <_> + + <_> + 15 0 9 6 -1. + <_> + 15 2 9 2 3. + <_> + + <_> + 6 1 12 4 -1. + <_> + 12 1 6 4 2. + <_> + + <_> + 6 0 12 12 -1. + <_> + 12 0 6 6 2. + <_> + 6 6 6 6 2. + <_> + + <_> + 8 10 8 12 -1. + <_> + 8 10 4 6 2. + <_> + 12 16 4 6 2. + <_> + + <_> + 14 16 10 8 -1. + <_> + 19 16 5 4 2. + <_> + 14 20 5 4 2. + <_> + + <_> + 0 16 10 8 -1. + <_> + 0 16 5 4 2. + <_> + 5 20 5 4 2. + <_> + + <_> + 10 12 12 5 -1. + <_> + 14 12 4 5 3. + <_> + + <_> + 6 16 10 8 -1. + <_> + 6 16 5 4 2. + <_> + 11 20 5 4 2. + <_> + + <_> + 7 6 12 6 -1. + <_> + 13 6 6 3 2. + <_> + 7 9 6 3 2. + <_> + + <_> + 9 6 4 18 -1. + <_> + 9 6 2 9 2. + <_> + 11 15 2 9 2. + <_> + + <_> + 10 9 6 14 -1. + <_> + 13 9 3 7 2. + <_> + 10 16 3 7 2. + <_> + + <_> + 8 9 6 14 -1. + <_> + 8 9 3 7 2. + <_> + 11 16 3 7 2. + <_> + + <_> + 7 4 11 12 -1. + <_> + 7 10 11 6 2. + <_> + + <_> + 4 8 6 16 -1. + <_> + 4 8 3 8 2. + <_> + 7 16 3 8 2. + <_> + + <_> + 17 3 4 21 -1. + <_> + 17 10 4 7 3. + <_> + + <_> + 3 3 4 21 -1. + <_> + 3 10 4 7 3. + <_> + + <_> + 10 1 8 18 -1. + <_> + 14 1 4 9 2. + <_> + 10 10 4 9 2. + <_> + + <_> + 2 5 16 8 -1. + <_> + 2 5 8 4 2. + <_> + 10 9 8 4 2. + <_> + + <_> + 3 6 18 12 -1. + <_> + 3 10 18 4 3. + <_> + + <_> + 4 10 16 12 -1. + <_> + 4 14 16 4 3. + <_> + + <_> + 15 4 8 20 -1. + <_> + 19 4 4 10 2. + <_> + 15 14 4 10 2. + <_> + + <_> + 7 2 9 6 -1. + <_> + 10 2 3 6 3. + <_> + + <_> + 15 4 8 20 -1. + <_> + 19 4 4 10 2. + <_> + 15 14 4 10 2. + <_> + + <_> + 1 4 8 20 -1. + <_> + 1 4 4 10 2. + <_> + 5 14 4 10 2. + <_> + + <_> + 11 8 8 14 -1. + <_> + 15 8 4 7 2. + <_> + 11 15 4 7 2. + <_> + + <_> + 5 8 8 14 -1. + <_> + 5 8 4 7 2. + <_> + 9 15 4 7 2. + <_> + + <_> + 10 13 5 8 -1. + <_> + 10 17 5 4 2. + <_> + + <_> + 4 13 7 9 -1. + <_> + 4 16 7 3 3. + <_> + + <_> + 0 13 24 10 -1. + <_> + 0 18 24 5 2. + <_> + + <_> + 4 2 8 11 -1. + <_> + 8 2 4 11 2. + <_> + + <_> + 10 2 8 16 -1. + <_> + 14 2 4 8 2. + <_> + 10 10 4 8 2. + <_> + + <_> + 0 2 24 6 -1. + <_> + 0 2 12 3 2. + <_> + 12 5 12 3 2. + <_> + + <_> + 6 0 12 9 -1. + <_> + 6 3 12 3 3. + <_> + + <_> + 1 2 12 12 -1. + <_> + 1 2 6 6 2. + <_> + 7 8 6 6 2. + <_> + + <_> + 18 5 6 9 -1. + <_> + 18 8 6 3 3. + <_> + + <_> + 4 3 8 10 -1. + <_> + 4 3 4 5 2. + <_> + 8 8 4 5 2. + <_> + + <_> + 6 21 18 3 -1. + <_> + 6 22 18 1 3. + <_> + + <_> + 1 10 18 2 -1. + <_> + 1 11 18 1 2. + <_> + + <_> + 1 10 22 3 -1. + <_> + 1 11 22 1 3. + <_> + + <_> + 2 8 12 9 -1. + <_> + 2 11 12 3 3. + <_> + + <_> + 12 8 12 6 -1. + <_> + 18 8 6 3 2. + <_> + 12 11 6 3 2. + <_> + + <_> + 0 8 12 6 -1. + <_> + 0 8 6 3 2. + <_> + 6 11 6 3 2. + <_> + + <_> + 10 15 6 9 -1. + <_> + 12 15 2 9 3. + <_> + + <_> + 7 13 9 6 -1. + <_> + 7 15 9 2 3. + <_> + + <_> + 9 8 7 12 -1. + <_> + 9 14 7 6 2. + <_> + + <_> + 4 13 9 6 -1. + <_> + 7 13 3 6 3. + <_> + + <_> + 6 15 18 4 -1. + <_> + 12 15 6 4 3. + <_> + + <_> + 5 4 4 16 -1. + <_> + 7 4 2 16 2. + <_> + + <_> + 10 15 6 9 -1. + <_> + 12 15 2 9 3. + <_> + + <_> + 8 15 6 9 -1. + <_> + 10 15 2 9 3. + <_> + + <_> + 9 11 12 10 -1. + <_> + 15 11 6 5 2. + <_> + 9 16 6 5 2. + <_> + + <_> + 3 6 14 6 -1. + <_> + 3 8 14 2 3. + <_> + + <_> + 4 2 17 8 -1. + <_> + 4 6 17 4 2. + <_> + + <_> + 6 2 12 21 -1. + <_> + 6 9 12 7 3. + <_> + + <_> + 8 1 9 9 -1. + <_> + 8 4 9 3 3. + <_> + + <_> + 0 7 24 3 -1. + <_> + 12 7 12 3 2. + <_> + + <_> + 11 6 9 10 -1. + <_> + 11 11 9 5 2. + <_> + + <_> + 2 11 18 3 -1. + <_> + 2 12 18 1 3. + <_> + + <_> + 8 16 9 4 -1. + <_> + 8 18 9 2 2. + <_> + + <_> + 0 0 9 6 -1. + <_> + 0 2 9 2 3. + <_> + + <_> + 0 11 24 6 -1. + <_> + 0 13 24 2 3. + <_> + + <_> + 2 9 20 6 -1. + <_> + 2 12 20 3 2. + <_> + + <_> + 4 5 16 12 -1. + <_> + 12 5 8 6 2. + <_> + 4 11 8 6 2. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 7 3 10 4 -1. + <_> + 7 5 10 2 2. + <_> + + <_> + 9 15 6 8 -1. + <_> + 9 19 6 4 2. + <_> + + <_> + 17 0 7 10 -1. + <_> + 17 5 7 5 2. + <_> + + <_> + 0 0 7 10 -1. + <_> + 0 5 7 5 2. + <_> + + <_> + 16 1 6 12 -1. + <_> + 19 1 3 6 2. + <_> + 16 7 3 6 2. + <_> + + <_> + 1 0 19 8 -1. + <_> + 1 4 19 4 2. + <_> + + <_> + 12 2 9 4 -1. + <_> + 12 4 9 2 2. + <_> + + <_> + 3 2 9 4 -1. + <_> + 3 4 9 2 2. + <_> + + <_> + 12 2 10 6 -1. + <_> + 12 4 10 2 3. + <_> + + <_> + 3 4 18 2 -1. + <_> + 12 4 9 2 2. + <_> + + <_> + 12 1 4 9 -1. + <_> + 12 1 2 9 2. + <_> + + <_> + 8 1 4 9 -1. + <_> + 10 1 2 9 2. + <_> + + <_> + 10 5 8 10 -1. + <_> + 14 5 4 5 2. + <_> + 10 10 4 5 2. + <_> + + <_> + 6 4 12 13 -1. + <_> + 10 4 4 13 3. + <_> + + <_> + 13 5 6 6 -1. + <_> + 13 5 3 6 2. + <_> + + <_> + 1 5 12 3 -1. + <_> + 7 5 6 3 2. + <_> + + <_> + 7 5 10 6 -1. + <_> + 7 7 10 2 3. + <_> + + <_> + 2 0 21 5 -1. + <_> + 9 0 7 5 3. + <_> + + <_> + 0 8 9 9 -1. + <_> + 0 11 9 3 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 0 3 6 7 -1. + <_> + 3 3 3 7 2. + <_> + + <_> + 9 18 12 6 -1. + <_> + 15 18 6 3 2. + <_> + 9 21 6 3 2. + <_> + + <_> + 2 8 20 6 -1. + <_> + 2 8 10 3 2. + <_> + 12 11 10 3 2. + <_> + + <_> + 13 2 10 4 -1. + <_> + 13 4 10 2 2. + <_> + + <_> + 4 5 5 18 -1. + <_> + 4 11 5 6 3. + <_> + + <_> + 20 4 4 9 -1. + <_> + 20 4 2 9 2. + <_> + + <_> + 8 6 8 14 -1. + <_> + 8 13 8 7 2. + <_> + + <_> + 0 1 24 6 -1. + <_> + 12 1 12 3 2. + <_> + 0 4 12 3 2. + <_> + + <_> + 0 4 4 9 -1. + <_> + 2 4 2 9 2. + <_> + + <_> + 3 6 18 3 -1. + <_> + 3 7 18 1 3. + <_> + + <_> + 3 17 16 6 -1. + <_> + 3 19 16 2 3. + <_> + + <_> + 13 6 6 9 -1. + <_> + 13 9 6 3 3. + <_> + + <_> + 5 6 14 6 -1. + <_> + 5 6 7 3 2. + <_> + 12 9 7 3 2. + <_> + + <_> + 13 5 8 10 -1. + <_> + 17 5 4 5 2. + <_> + 13 10 4 5 2. + <_> + + <_> + 2 2 20 3 -1. + <_> + 2 3 20 1 3. + <_> + + <_> + 9 2 9 6 -1. + <_> + 12 2 3 6 3. + <_> + + <_> + 8 6 6 9 -1. + <_> + 10 6 2 9 3. + <_> + + <_> + 12 3 4 11 -1. + <_> + 12 3 2 11 2. + <_> + + <_> + 8 3 4 11 -1. + <_> + 10 3 2 11 2. + <_> + + <_> + 8 3 8 10 -1. + <_> + 12 3 4 5 2. + <_> + 8 8 4 5 2. + <_> + + <_> + 11 1 2 18 -1. + <_> + 12 1 1 18 2. + <_> + + <_> + 9 2 9 6 -1. + <_> + 12 2 3 6 3. + <_> + + <_> + 0 2 19 3 -1. + <_> + 0 3 19 1 3. + <_> + + <_> + 9 14 9 6 -1. + <_> + 9 16 9 2 3. + <_> + + <_> + 1 8 18 5 -1. + <_> + 7 8 6 5 3. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 13 6 4 15 -1. + <_> + 13 11 4 5 3. + <_> + + <_> + 1 5 18 3 -1. + <_> + 1 6 18 1 3. + <_> + + <_> + 9 7 14 6 -1. + <_> + 9 9 14 2 3. + <_> + + <_> + 2 16 18 3 -1. + <_> + 2 17 18 1 3. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 0 8 12 6 -1. + <_> + 0 8 6 3 2. + <_> + 6 11 6 3 2. + <_> + + <_> + 9 13 7 8 -1. + <_> + 9 17 7 4 2. + <_> + + <_> + 2 17 20 3 -1. + <_> + 2 18 20 1 3. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 4 0 15 4 -1. + <_> + 4 2 15 2 2. + <_> + + <_> + 17 2 6 6 -1. + <_> + 17 5 6 3 2. + <_> + + <_> + 0 3 6 9 -1. + <_> + 0 6 6 3 3. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 0 17 9 6 -1. + <_> + 0 19 9 2 3. + <_> + + <_> + 9 18 12 6 -1. + <_> + 15 18 6 3 2. + <_> + 9 21 6 3 2. + <_> + + <_> + 3 15 6 9 -1. + <_> + 3 18 6 3 3. + <_> + + <_> + 16 13 8 10 -1. + <_> + 20 13 4 5 2. + <_> + 16 18 4 5 2. + <_> + + <_> + 0 14 24 4 -1. + <_> + 8 14 8 4 3. + <_> + + <_> + 13 18 6 6 -1. + <_> + 13 18 3 6 2. + <_> + + <_> + 0 13 8 10 -1. + <_> + 0 13 4 5 2. + <_> + 4 18 4 5 2. + <_> + + <_> + 0 14 24 6 -1. + <_> + 0 17 24 3 2. + <_> + + <_> + 5 2 12 8 -1. + <_> + 5 2 6 4 2. + <_> + 11 6 6 4 2. + <_> + + <_> + 8 9 9 6 -1. + <_> + 11 9 3 6 3. + <_> + + <_> + 4 3 16 4 -1. + <_> + 4 5 16 2 2. + <_> + + <_> + 10 2 4 10 -1. + <_> + 10 7 4 5 2. + <_> + + <_> + 8 4 5 8 -1. + <_> + 8 8 5 4 2. + <_> + + <_> + 11 5 9 12 -1. + <_> + 11 9 9 4 3. + <_> + + <_> + 4 5 9 12 -1. + <_> + 4 9 9 4 3. + <_> + + <_> + 14 6 6 9 -1. + <_> + 14 9 6 3 3. + <_> + + <_> + 2 4 20 12 -1. + <_> + 2 8 20 4 3. + <_> + + <_> + 4 4 17 16 -1. + <_> + 4 12 17 8 2. + <_> + + <_> + 8 7 7 6 -1. + <_> + 8 10 7 3 2. + <_> + + <_> + 1 9 23 2 -1. + <_> + 1 10 23 1 2. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 13 3 4 9 -1. + <_> + 13 3 2 9 2. + <_> + + <_> + 8 1 6 13 -1. + <_> + 10 1 2 13 3. + <_> + + <_> + 4 22 18 2 -1. + <_> + 4 23 18 1 2. + <_> + + <_> + 3 10 9 6 -1. + <_> + 6 10 3 6 3. + <_> + + <_> + 14 0 2 24 -1. + <_> + 14 0 1 24 2. + <_> + + <_> + 8 0 2 24 -1. + <_> + 9 0 1 24 2. + <_> + + <_> + 3 2 18 10 -1. + <_> + 9 2 6 10 3. + <_> + + <_> + 4 13 15 6 -1. + <_> + 9 13 5 6 3. + <_> + + <_> + 3 21 18 3 -1. + <_> + 9 21 6 3 3. + <_> + + <_> + 9 1 4 11 -1. + <_> + 11 1 2 11 2. + <_> + + <_> + 9 7 10 4 -1. + <_> + 9 7 5 4 2. + <_> + + <_> + 7 0 10 18 -1. + <_> + 12 0 5 18 2. + <_> + + <_> + 12 1 6 16 -1. + <_> + 14 1 2 16 3. + <_> + + <_> + 6 1 6 16 -1. + <_> + 8 1 2 16 3. + <_> + + <_> + 18 2 6 6 -1. + <_> + 18 5 6 3 2. + <_> + + <_> + 3 5 18 2 -1. + <_> + 3 6 18 1 2. + <_> + + <_> + 18 2 6 6 -1. + <_> + 18 5 6 3 2. + <_> + + <_> + 0 2 6 6 -1. + <_> + 0 5 6 3 2. + <_> + + <_> + 13 11 11 6 -1. + <_> + 13 13 11 2 3. + <_> + + <_> + 5 7 10 4 -1. + <_> + 10 7 5 4 2. + <_> + + <_> + 11 9 10 7 -1. + <_> + 11 9 5 7 2. + <_> + + <_> + 3 9 10 7 -1. + <_> + 8 9 5 7 2. + <_> + + <_> + 16 4 6 6 -1. + <_> + 16 4 3 6 2. + <_> + + <_> + 5 6 10 8 -1. + <_> + 5 6 5 4 2. + <_> + 10 10 5 4 2. + <_> + + <_> + 7 21 16 3 -1. + <_> + 7 21 8 3 2. + <_> + + <_> + 1 21 16 3 -1. + <_> + 9 21 8 3 2. + <_> + + <_> + 2 5 22 14 -1. + <_> + 13 5 11 7 2. + <_> + 2 12 11 7 2. + <_> + + <_> + 3 10 8 10 -1. + <_> + 3 10 4 5 2. + <_> + 7 15 4 5 2. + <_> + + <_> + 17 0 6 12 -1. + <_> + 20 0 3 6 2. + <_> + 17 6 3 6 2. + <_> + + <_> + 5 2 6 18 -1. + <_> + 7 2 2 18 3. + <_> + + <_> + 13 0 6 9 -1. + <_> + 15 0 2 9 3. + <_> + + <_> + 0 12 7 9 -1. + <_> + 0 15 7 3 3. + <_> + + <_> + 15 13 8 10 -1. + <_> + 19 13 4 5 2. + <_> + 15 18 4 5 2. + <_> + + <_> + 1 0 6 12 -1. + <_> + 1 0 3 6 2. + <_> + 4 6 3 6 2. + <_> + + <_> + 12 1 3 12 -1. + <_> + 12 7 3 6 2. + <_> + + <_> + 1 13 8 10 -1. + <_> + 1 13 4 5 2. + <_> + 5 18 4 5 2. + <_> + + <_> + 3 21 19 2 -1. + <_> + 3 22 19 1 2. + <_> + + <_> + 6 3 4 13 -1. + <_> + 8 3 2 13 2. + <_> + + <_> + 5 10 18 3 -1. + <_> + 5 11 18 1 3. + <_> + + <_> + 9 3 5 12 -1. + <_> + 9 7 5 4 3. + <_> + + <_> + 11 2 4 15 -1. + <_> + 11 7 4 5 3. + <_> + + <_> + 4 1 16 4 -1. + <_> + 4 3 16 2 2. + <_> + + <_> + 6 0 18 3 -1. + <_> + 6 1 18 1 3. + <_> + + <_> + 5 1 10 8 -1. + <_> + 5 1 5 4 2. + <_> + 10 5 5 4 2. + <_> + + <_> + 11 18 12 6 -1. + <_> + 17 18 6 3 2. + <_> + 11 21 6 3 2. + <_> + + <_> + 5 15 12 3 -1. + <_> + 11 15 6 3 2. + <_> + + <_> + 1 10 22 4 -1. + <_> + 1 10 11 4 2. + <_> + + <_> + 7 9 9 6 -1. + <_> + 10 9 3 6 3. + <_> + + <_> + 6 11 12 5 -1. + <_> + 10 11 4 5 3. + <_> + + <_> + 6 7 10 7 -1. + <_> + 11 7 5 7 2. + <_> + + <_> + 11 2 8 10 -1. + <_> + 11 2 4 10 2. + <_> + + <_> + 5 2 8 10 -1. + <_> + 9 2 4 10 2. + <_> + + <_> + 6 4 18 6 -1. + <_> + 15 4 9 3 2. + <_> + 6 7 9 3 2. + <_> + + <_> + 0 5 10 9 -1. + <_> + 0 8 10 3 3. + <_> + + <_> + 2 7 21 6 -1. + <_> + 2 9 21 2 3. + <_> + + <_> + 0 4 22 16 -1. + <_> + 0 4 11 8 2. + <_> + 11 12 11 8 2. + <_> + + <_> + 9 0 6 22 -1. + <_> + 9 11 6 11 2. + <_> + + <_> + 9 1 3 12 -1. + <_> + 9 7 3 6 2. + <_> + + <_> + 12 0 12 18 -1. + <_> + 18 0 6 9 2. + <_> + 12 9 6 9 2. + <_> + + <_> + 0 0 12 18 -1. + <_> + 0 0 6 9 2. + <_> + 6 9 6 9 2. + <_> + + <_> + 1 1 22 4 -1. + <_> + 12 1 11 2 2. + <_> + 1 3 11 2 2. + <_> + + <_> + 3 0 18 4 -1. + <_> + 3 2 18 2 2. + <_> + + <_> + 2 5 22 6 -1. + <_> + 2 7 22 2 3. + <_> + + <_> + 5 0 6 9 -1. + <_> + 5 3 6 3 3. + <_> + + <_> + 10 14 6 9 -1. + <_> + 12 14 2 9 3. + <_> + + <_> + 8 14 6 9 -1. + <_> + 10 14 2 9 3. + <_> + + <_> + 5 18 18 3 -1. + <_> + 5 19 18 1 3. + <_> + + <_> + 6 0 6 13 -1. + <_> + 9 0 3 13 2. + <_> + + <_> + 7 4 12 4 -1. + <_> + 7 4 6 4 2. + <_> + + <_> + 5 2 12 6 -1. + <_> + 9 2 4 6 3. + <_> + + <_> + 4 1 18 3 -1. + <_> + 4 2 18 1 3. + <_> + + <_> + 0 8 6 12 -1. + <_> + 0 12 6 4 3. + <_> + + <_> + 9 15 6 9 -1. + <_> + 11 15 2 9 3. + <_> + + <_> + 9 10 6 13 -1. + <_> + 11 10 2 13 3. + <_> + + <_> + 6 17 18 2 -1. + <_> + 6 18 18 1 2. + <_> + + <_> + 9 4 6 9 -1. + <_> + 11 4 2 9 3. + <_> + + <_> + 10 0 6 9 -1. + <_> + 12 0 2 9 3. + <_> + + <_> + 5 6 10 8 -1. + <_> + 5 6 5 4 2. + <_> + 10 10 5 4 2. + <_> + + <_> + 14 9 5 8 -1. + <_> + 14 13 5 4 2. + <_> + + <_> + 5 9 5 8 -1. + <_> + 5 13 5 4 2. + <_> + + <_> + 14 11 9 6 -1. + <_> + 14 13 9 2 3. + <_> + + <_> + 0 2 23 15 -1. + <_> + 0 7 23 5 3. + <_> + + <_> + 16 0 8 12 -1. + <_> + 16 6 8 6 2. + <_> + + <_> + 4 15 6 9 -1. + <_> + 4 18 6 3 3. + <_> + + <_> + 8 18 9 4 -1. + <_> + 8 20 9 2 2. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 13 11 11 6 -1. + <_> + 13 13 11 2 3. + <_> + + <_> + 0 11 11 6 -1. + <_> + 0 13 11 2 3. + <_> + + <_> + 0 9 24 6 -1. + <_> + 12 9 12 3 2. + <_> + 0 12 12 3 2. + <_> + + <_> + 6 16 8 8 -1. + <_> + 6 20 8 4 2. + <_> + + <_> + 10 16 14 6 -1. + <_> + 10 18 14 2 3. + <_> + + <_> + 1 1 21 3 -1. + <_> + 1 2 21 1 3. + <_> + + <_> + 0 2 24 3 -1. + <_> + 0 2 12 3 2. + <_> + + <_> + 2 15 8 5 -1. + <_> + 6 15 4 5 2. + <_> + + <_> + 2 11 21 3 -1. + <_> + 9 11 7 3 3. + <_> + + <_> + 1 18 12 6 -1. + <_> + 1 18 6 3 2. + <_> + 7 21 6 3 2. + <_> + + <_> + 10 14 4 10 -1. + <_> + 10 19 4 5 2. + <_> + + <_> + 7 7 4 10 -1. + <_> + 7 12 4 5 2. + <_> + + <_> + 9 8 6 12 -1. + <_> + 9 12 6 4 3. + <_> + + <_> + 7 1 9 6 -1. + <_> + 10 1 3 6 3. + <_> + + <_> + 3 14 19 2 -1. + <_> + 3 15 19 1 2. + <_> + + <_> + 7 7 10 10 -1. + <_> + 7 7 5 5 2. + <_> + 12 12 5 5 2. + <_> + + <_> + 3 12 18 12 -1. + <_> + 3 12 9 12 2. + <_> + + <_> + 8 0 6 12 -1. + <_> + 10 0 2 12 3. + <_> + + <_> + 3 0 17 9 -1. + <_> + 3 3 17 3 3. + <_> + + <_> + 6 0 12 11 -1. + <_> + 10 0 4 11 3. + <_> + + <_> + 1 0 6 13 -1. + <_> + 4 0 3 13 2. + <_> + + <_> + 5 8 16 6 -1. + <_> + 5 11 16 3 2. + <_> + + <_> + 8 8 5 12 -1. + <_> + 8 14 5 6 2. + <_> + + <_> + 3 21 18 3 -1. + <_> + 9 21 6 3 3. + <_> + + <_> + 0 0 6 6 -1. + <_> + 3 0 3 6 2. + <_> + + <_> + 2 0 20 3 -1. + <_> + 2 1 20 1 3. + <_> + + <_> + 4 6 15 10 -1. + <_> + 9 6 5 10 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 14 0 6 9 -1. + <_> + 16 0 2 9 3. + <_> + + <_> + 7 16 9 6 -1. + <_> + 7 18 9 2 3. + <_> + + <_> + 14 0 6 9 -1. + <_> + 16 0 2 9 3. + <_> + + <_> + 4 0 6 9 -1. + <_> + 6 0 2 9 3. + <_> + + <_> + 17 1 6 16 -1. + <_> + 19 1 2 16 3. + <_> + + <_> + 1 1 6 16 -1. + <_> + 3 1 2 16 3. + <_> + + <_> + 14 13 6 9 -1. + <_> + 14 16 6 3 3. + <_> + + <_> + 0 0 6 9 -1. + <_> + 0 3 6 3 3. + <_> + + <_> + 9 5 6 6 -1. + <_> + 9 5 3 6 2. + <_> + + <_> + 3 10 9 6 -1. + <_> + 6 10 3 6 3. + <_> + + <_> + 14 7 3 16 -1. + <_> + 14 15 3 8 2. + <_> + + <_> + 4 10 14 12 -1. + <_> + 4 10 7 6 2. + <_> + 11 16 7 6 2. + <_> + + <_> + 7 6 12 6 -1. + <_> + 7 8 12 2 3. + <_> + + <_> + 7 2 4 20 -1. + <_> + 9 2 2 20 2. + <_> + + <_> + 14 13 6 9 -1. + <_> + 14 16 6 3 3. + <_> + + <_> + 10 6 4 9 -1. + <_> + 12 6 2 9 2. + <_> + + <_> + 14 13 6 9 -1. + <_> + 14 16 6 3 3. + <_> + + <_> + 5 20 14 4 -1. + <_> + 5 22 14 2 2. + <_> + + <_> + 4 4 16 12 -1. + <_> + 4 10 16 6 2. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 3 0 21 4 -1. + <_> + 3 2 21 2 2. + <_> + + <_> + 4 13 6 9 -1. + <_> + 4 16 6 3 3. + <_> + + <_> + 16 16 5 8 -1. + <_> + 16 20 5 4 2. + <_> + + <_> + 4 0 16 16 -1. + <_> + 4 0 8 8 2. + <_> + 12 8 8 8 2. + <_> + + <_> + 6 6 14 6 -1. + <_> + 13 6 7 3 2. + <_> + 6 9 7 3 2. + <_> + + <_> + 10 5 4 15 -1. + <_> + 10 10 4 5 3. + <_> + + <_> + 9 15 12 8 -1. + <_> + 15 15 6 4 2. + <_> + 9 19 6 4 2. + <_> + + <_> + 6 7 12 4 -1. + <_> + 12 7 6 4 2. + <_> + + <_> + 5 6 14 6 -1. + <_> + 12 6 7 3 2. + <_> + 5 9 7 3 2. + <_> + + <_> + 3 6 18 10 -1. + <_> + 3 6 9 5 2. + <_> + 12 11 9 5 2. + <_> + + <_> + 6 0 18 21 -1. + <_> + 12 0 6 21 3. + <_> + + <_> + 0 0 24 21 -1. + <_> + 8 0 8 21 3. + <_> + + <_> + 6 18 18 3 -1. + <_> + 6 19 18 1 3. + <_> + + <_> + 0 15 9 6 -1. + <_> + 0 17 9 2 3. + <_> + + <_> + 4 3 19 2 -1. + <_> + 4 4 19 1 2. + <_> + + <_> + 0 3 24 2 -1. + <_> + 0 4 24 1 2. + <_> + + <_> + 15 14 9 4 -1. + <_> + 15 16 9 2 2. + <_> + + <_> + 0 14 9 4 -1. + <_> + 0 16 9 2 2. + <_> + + <_> + 6 15 18 2 -1. + <_> + 6 16 18 1 2. + <_> + + <_> + 3 17 18 3 -1. + <_> + 3 18 18 1 3. + <_> + + <_> + 12 0 3 23 -1. + <_> + 13 0 1 23 3. + <_> + + <_> + 6 0 8 6 -1. + <_> + 6 3 8 3 2. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 9 0 3 23 -1. + <_> + 10 0 1 23 3. + <_> + + <_> + 10 7 4 10 -1. + <_> + 10 12 4 5 2. + <_> + + <_> + 7 8 10 12 -1. + <_> + 7 12 10 4 3. + <_> + + <_> + 14 9 6 14 -1. + <_> + 17 9 3 7 2. + <_> + 14 16 3 7 2. + <_> + + <_> + 2 0 10 9 -1. + <_> + 2 3 10 3 3. + <_> + + <_> + 11 1 5 12 -1. + <_> + 11 7 5 6 2. + <_> + + <_> + 1 4 12 10 -1. + <_> + 1 4 6 5 2. + <_> + 7 9 6 5 2. + <_> + + <_> + 15 1 9 4 -1. + <_> + 15 3 9 2 2. + <_> + + <_> + 1 2 8 10 -1. + <_> + 1 2 4 5 2. + <_> + 5 7 4 5 2. + <_> + + <_> + 10 1 5 12 -1. + <_> + 10 5 5 4 3. + <_> + + <_> + 4 0 14 24 -1. + <_> + 11 0 7 24 2. + <_> + + <_> + 7 17 10 4 -1. + <_> + 7 19 10 2 2. + <_> + + <_> + 10 14 4 10 -1. + <_> + 10 19 4 5 2. + <_> + + <_> + 13 15 6 9 -1. + <_> + 15 15 2 9 3. + <_> + + <_> + 3 21 18 3 -1. + <_> + 3 22 18 1 3. + <_> + + <_> + 13 15 6 9 -1. + <_> + 15 15 2 9 3. + <_> + + <_> + 5 15 6 9 -1. + <_> + 7 15 2 9 3. + <_> + + <_> + 10 6 4 18 -1. + <_> + 12 6 2 9 2. + <_> + 10 15 2 9 2. + <_> + + <_> + 7 3 6 11 -1. + <_> + 9 3 2 11 3. + <_> + + <_> + 15 1 9 4 -1. + <_> + 15 3 9 2 2. + <_> + + <_> + 5 4 14 8 -1. + <_> + 5 8 14 4 2. + <_> + + <_> + 8 1 15 9 -1. + <_> + 8 4 15 3 3. + <_> + + <_> + 7 2 8 10 -1. + <_> + 7 2 4 5 2. + <_> + 11 7 4 5 2. + <_> + + <_> + 12 2 6 12 -1. + <_> + 12 2 3 12 2. + <_> + + <_> + 6 2 6 12 -1. + <_> + 9 2 3 12 2. + <_> + + <_> + 7 7 12 4 -1. + <_> + 7 7 6 4 2. + <_> + + <_> + 6 3 12 10 -1. + <_> + 10 3 4 10 3. + <_> + + <_> + 5 6 16 6 -1. + <_> + 13 6 8 3 2. + <_> + 5 9 8 3 2. + <_> + + <_> + 3 1 18 9 -1. + <_> + 9 1 6 9 3. + <_> + + <_> + 3 8 18 5 -1. + <_> + 9 8 6 5 3. + <_> + + <_> + 0 0 24 22 -1. + <_> + 0 0 12 11 2. + <_> + 12 11 12 11 2. + <_> + + <_> + 14 16 9 6 -1. + <_> + 14 18 9 2 3. + <_> + + <_> + 0 16 24 8 -1. + <_> + 0 20 24 4 2. + <_> + + <_> + 1 19 22 4 -1. + <_> + 12 19 11 2 2. + <_> + 1 21 11 2 2. + <_> + + <_> + 1 16 9 6 -1. + <_> + 1 18 9 2 3. + <_> + + <_> + 7 8 10 4 -1. + <_> + 7 8 5 4 2. + <_> + + <_> + 9 15 6 9 -1. + <_> + 11 15 2 9 3. + <_> + + <_> + 10 18 12 6 -1. + <_> + 16 18 6 3 2. + <_> + 10 21 6 3 2. + <_> + + <_> + 2 18 12 6 -1. + <_> + 2 18 6 3 2. + <_> + 8 21 6 3 2. + <_> + + <_> + 8 3 16 9 -1. + <_> + 8 6 16 3 3. + <_> + + <_> + 0 5 10 6 -1. + <_> + 0 7 10 2 3. + <_> + + <_> + 5 5 18 3 -1. + <_> + 5 6 18 1 3. + <_> + + <_> + 2 6 9 6 -1. + <_> + 2 9 9 3 2. + <_> + + <_> + 14 2 10 9 -1. + <_> + 14 5 10 3 3. + <_> + + <_> + 3 6 18 3 -1. + <_> + 3 7 18 1 3. + <_> + + <_> + 9 2 15 6 -1. + <_> + 9 4 15 2 3. + <_> + + <_> + 4 8 15 6 -1. + <_> + 4 10 15 2 3. + <_> + + <_> + 0 5 24 4 -1. + <_> + 12 5 12 2 2. + <_> + 0 7 12 2 2. + <_> + + <_> + 7 8 6 12 -1. + <_> + 9 8 2 12 3. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 0 12 6 12 -1. + <_> + 0 12 3 6 2. + <_> + 3 18 3 6 2. + <_> + + <_> + 14 12 10 6 -1. + <_> + 14 14 10 2 3. + <_> + + <_> + 2 7 18 9 -1. + <_> + 2 10 18 3 3. + <_> + + <_> + 11 14 10 9 -1. + <_> + 11 17 10 3 3. + <_> + + <_> + 7 6 10 8 -1. + <_> + 7 6 5 4 2. + <_> + 12 10 5 4 2. + <_> + + <_> + 6 6 14 6 -1. + <_> + 13 6 7 3 2. + <_> + 6 9 7 3 2. + <_> + + <_> + 4 13 9 7 -1. + <_> + 7 13 3 7 3. + <_> + + <_> + 14 10 6 12 -1. + <_> + 17 10 3 6 2. + <_> + 14 16 3 6 2. + <_> + + <_> + 4 10 6 12 -1. + <_> + 4 10 3 6 2. + <_> + 7 16 3 6 2. + <_> + + <_> + 13 9 8 6 -1. + <_> + 13 9 4 6 2. + <_> + + <_> + 8 3 4 14 -1. + <_> + 10 3 2 14 2. + <_> + + <_> + 17 0 3 18 -1. + <_> + 18 0 1 18 3. + <_> + + <_> + 4 12 16 12 -1. + <_> + 12 12 8 12 2. + <_> + + <_> + 15 0 6 14 -1. + <_> + 17 0 2 14 3. + <_> + + <_> + 3 0 6 14 -1. + <_> + 5 0 2 14 3. + <_> + + <_> + 12 2 12 20 -1. + <_> + 16 2 4 20 3. + <_> + + <_> + 0 2 12 20 -1. + <_> + 4 2 4 20 3. + <_> + + <_> + 16 0 6 17 -1. + <_> + 18 0 2 17 3. + <_> + + <_> + 2 0 6 17 -1. + <_> + 4 0 2 17 3. + <_> + + <_> + 15 6 9 6 -1. + <_> + 15 8 9 2 3. + <_> + + <_> + 0 6 9 6 -1. + <_> + 0 8 9 2 3. + <_> + + <_> + 18 1 6 13 -1. + <_> + 20 1 2 13 3. + <_> + + <_> + 0 1 6 13 -1. + <_> + 2 1 2 13 3. + <_> + + <_> + 16 0 4 9 -1. + <_> + 16 0 2 9 2. + <_> + + <_> + 5 10 12 7 -1. + <_> + 9 10 4 7 3. + <_> + + <_> + 12 9 12 6 -1. + <_> + 12 11 12 2 3. + <_> + + <_> + 0 9 12 6 -1. + <_> + 0 11 12 2 3. + <_> + + <_> + 5 7 14 9 -1. + <_> + 5 10 14 3 3. + <_> + + <_> + 0 15 20 3 -1. + <_> + 0 16 20 1 3. + <_> + + <_> + 8 10 8 10 -1. + <_> + 12 10 4 5 2. + <_> + 8 15 4 5 2. + <_> + + <_> + 5 4 13 9 -1. + <_> + 5 7 13 3 3. + <_> + + <_> + 10 2 6 18 -1. + <_> + 10 8 6 6 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 6 9 12 4 -1. + <_> + 6 11 12 2 2. + <_> + + <_> + 3 2 15 12 -1. + <_> + 3 6 15 4 3. + <_> + + <_> + 12 0 12 5 -1. + <_> + 16 0 4 5 3. + <_> + + <_> + 0 15 18 3 -1. + <_> + 6 15 6 3 3. + <_> + + <_> + 0 14 24 5 -1. + <_> + 8 14 8 5 3. + <_> + + <_> + 5 1 3 18 -1. + <_> + 6 1 1 18 3. + <_> + + <_> + 10 0 4 14 -1. + <_> + 10 0 2 14 2. + <_> + + <_> + 9 3 4 9 -1. + <_> + 11 3 2 9 2. + <_> + + <_> + 8 2 12 6 -1. + <_> + 14 2 6 3 2. + <_> + 8 5 6 3 2. + <_> + + <_> + 0 4 17 4 -1. + <_> + 0 6 17 2 2. + <_> + + <_> + 16 16 5 8 -1. + <_> + 16 20 5 4 2. + <_> + + <_> + 3 16 5 8 -1. + <_> + 3 20 5 4 2. + <_> + + <_> + 6 18 18 2 -1. + <_> + 6 19 18 1 2. + <_> + + <_> + 0 0 12 5 -1. + <_> + 4 0 4 5 3. + <_> + + <_> + 14 3 6 12 -1. + <_> + 17 3 3 6 2. + <_> + 14 9 3 6 2. + <_> + + <_> + 0 12 6 12 -1. + <_> + 2 12 2 12 3. + <_> + + <_> + 2 3 21 3 -1. + <_> + 2 4 21 1 3. + <_> + + <_> + 4 3 6 12 -1. + <_> + 4 3 3 6 2. + <_> + 7 9 3 6 2. + <_> + + <_> + 12 8 12 6 -1. + <_> + 18 8 6 3 2. + <_> + 12 11 6 3 2. + <_> + + <_> + 0 15 16 9 -1. + <_> + 8 15 8 9 2. + <_> + + <_> + 6 13 18 5 -1. + <_> + 6 13 9 5 2. + <_> + + <_> + 1 6 15 6 -1. + <_> + 6 6 5 6 3. + <_> + + <_> + 11 9 9 6 -1. + <_> + 14 9 3 6 3. + <_> + + <_> + 3 0 15 11 -1. + <_> + 8 0 5 11 3. + <_> + + <_> + 15 3 3 18 -1. + <_> + 15 9 3 6 3. + <_> + + <_> + 6 3 3 18 -1. + <_> + 6 9 3 6 3. + <_> + + <_> + 9 5 10 8 -1. + <_> + 14 5 5 4 2. + <_> + 9 9 5 4 2. + <_> + + <_> + 4 4 16 8 -1. + <_> + 4 4 8 4 2. + <_> + 12 8 8 4 2. + <_> + + <_> + 7 7 12 3 -1. + <_> + 7 7 6 3 2. + <_> + + <_> + 5 0 9 13 -1. + <_> + 8 0 3 13 3. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 8 1 10 9 -1. + <_> + 8 4 10 3 3. + <_> + + <_> + 0 2 18 2 -1. + <_> + 0 3 18 1 2. + <_> + + <_> + 10 13 14 6 -1. + <_> + 17 13 7 3 2. + <_> + 10 16 7 3 2. + <_> + + <_> + 0 13 14 6 -1. + <_> + 0 13 7 3 2. + <_> + 7 16 7 3 2. + <_> + + <_> + 20 2 3 21 -1. + <_> + 21 2 1 21 3. + <_> + + <_> + 0 9 5 12 -1. + <_> + 0 13 5 4 3. + <_> + + <_> + 12 6 12 6 -1. + <_> + 12 8 12 2 3. + <_> + + <_> + 1 8 20 3 -1. + <_> + 1 9 20 1 3. + <_> + + <_> + 5 7 19 3 -1. + <_> + 5 8 19 1 3. + <_> + + <_> + 1 12 9 6 -1. + <_> + 1 14 9 2 3. + <_> + + <_> + 6 10 14 12 -1. + <_> + 6 14 14 4 3. + <_> + + <_> + 5 6 14 18 -1. + <_> + 5 12 14 6 3. + <_> + + <_> + 11 12 9 7 -1. + <_> + 14 12 3 7 3. + <_> + + <_> + 1 15 18 4 -1. + <_> + 1 17 18 2 2. + <_> + + <_> + 11 14 6 9 -1. + <_> + 11 17 6 3 3. + <_> + + <_> + 0 8 18 4 -1. + <_> + 0 8 9 2 2. + <_> + 9 10 9 2 2. + <_> + + <_> + 3 10 20 6 -1. + <_> + 13 10 10 3 2. + <_> + 3 13 10 3 2. + <_> + + <_> + 1 10 20 6 -1. + <_> + 1 10 10 3 2. + <_> + 11 13 10 3 2. + <_> + + <_> + 0 9 24 2 -1. + <_> + 0 9 12 2 2. + <_> + + <_> + 1 12 20 8 -1. + <_> + 1 12 10 4 2. + <_> + 11 16 10 4 2. + <_> + + <_> + 11 12 9 7 -1. + <_> + 14 12 3 7 3. + <_> + + <_> + 4 12 9 7 -1. + <_> + 7 12 3 7 3. + <_> + + <_> + 12 12 8 5 -1. + <_> + 12 12 4 5 2. + <_> + + <_> + 4 12 8 5 -1. + <_> + 8 12 4 5 2. + <_> + + <_> + 13 10 4 10 -1. + <_> + 13 10 2 10 2. + <_> + + <_> + 1 15 20 2 -1. + <_> + 11 15 10 2 2. + <_> + + <_> + 9 10 6 6 -1. + <_> + 9 10 3 6 2. + <_> + + <_> + 0 1 21 3 -1. + <_> + 7 1 7 3 3. + <_> + + <_> + 6 4 13 9 -1. + <_> + 6 7 13 3 3. + <_> + + <_> + 6 5 12 5 -1. + <_> + 10 5 4 5 3. + <_> + + <_> + 10 10 10 6 -1. + <_> + 10 12 10 2 3. + <_> + + <_> + 6 12 5 8 -1. + <_> + 6 16 5 4 2. + <_> + + <_> + 13 0 6 9 -1. + <_> + 15 0 2 9 3. + <_> + + <_> + 2 10 18 6 -1. + <_> + 8 10 6 6 3. + <_> + + <_> + 11 2 9 4 -1. + <_> + 11 4 9 2 2. + <_> + + <_> + 1 20 21 3 -1. + <_> + 8 20 7 3 3. + <_> + + <_> + 1 10 22 2 -1. + <_> + 1 11 22 1 2. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 13 0 6 9 -1. + <_> + 15 0 2 9 3. + <_> + + <_> + 5 0 6 9 -1. + <_> + 7 0 2 9 3. + <_> + + <_> + 18 2 6 20 -1. + <_> + 20 2 2 20 3. + <_> + + <_> + 0 2 6 20 -1. + <_> + 2 2 2 20 3. + <_> + + <_> + 11 7 6 14 -1. + <_> + 14 7 3 7 2. + <_> + 11 14 3 7 2. + <_> + + <_> + 0 1 4 9 -1. + <_> + 2 1 2 9 2. + <_> + + <_> + 12 14 9 4 -1. + <_> + 12 16 9 2 2. + <_> + + <_> + 1 13 9 4 -1. + <_> + 1 15 9 2 2. + <_> + + <_> + 7 6 15 6 -1. + <_> + 7 8 15 2 3. + <_> + + <_> + 8 2 3 18 -1. + <_> + 8 8 3 6 3. + <_> + + <_> + 6 6 12 6 -1. + <_> + 12 6 6 3 2. + <_> + 6 9 6 3 2. + <_> + + <_> + 2 19 20 4 -1. + <_> + 2 19 10 2 2. + <_> + 12 21 10 2 2. + <_> + + <_> + 14 15 6 9 -1. + <_> + 14 18 6 3 3. + <_> + + <_> + 3 5 18 14 -1. + <_> + 3 5 9 7 2. + <_> + 12 12 9 7 2. + <_> + + <_> + 15 6 4 18 -1. + <_> + 17 6 2 9 2. + <_> + 15 15 2 9 2. + <_> + + <_> + 5 6 4 18 -1. + <_> + 5 6 2 9 2. + <_> + 7 15 2 9 2. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 11 5 6 9 -1. + <_> + 13 5 2 9 3. + <_> + + <_> + 9 5 6 6 -1. + <_> + 12 5 3 6 2. + <_> + + <_> + 4 1 16 6 -1. + <_> + 12 1 8 3 2. + <_> + 4 4 8 3 2. + <_> + + <_> + 9 13 6 11 -1. + <_> + 11 13 2 11 3. + <_> + + <_> + 17 1 6 12 -1. + <_> + 20 1 3 6 2. + <_> + 17 7 3 6 2. + <_> + + <_> + 1 17 18 3 -1. + <_> + 1 18 18 1 3. + <_> + + <_> + 7 13 10 8 -1. + <_> + 7 17 10 4 2. + <_> + + <_> + 6 18 10 6 -1. + <_> + 6 20 10 2 3. + <_> + + <_> + 9 14 9 4 -1. + <_> + 9 16 9 2 2. + <_> + + <_> + 1 1 6 12 -1. + <_> + 1 1 3 6 2. + <_> + 4 7 3 6 2. + <_> + + <_> + 19 4 5 12 -1. + <_> + 19 8 5 4 3. + <_> + + <_> + 0 0 8 8 -1. + <_> + 4 0 4 8 2. + <_> + + <_> + 3 5 19 3 -1. + <_> + 3 6 19 1 3. + <_> + + <_> + 1 5 12 6 -1. + <_> + 1 5 6 3 2. + <_> + 7 8 6 3 2. + <_> + + <_> + 2 1 21 8 -1. + <_> + 9 1 7 8 3. + <_> + + <_> + 4 1 16 8 -1. + <_> + 4 5 16 4 2. + <_> + + <_> + 6 0 18 3 -1. + <_> + 6 1 18 1 3. + <_> + + <_> + 4 4 10 14 -1. + <_> + 4 11 10 7 2. + <_> + + <_> + 15 6 4 10 -1. + <_> + 15 11 4 5 2. + <_> + + <_> + 3 18 18 3 -1. + <_> + 9 18 6 3 3. + <_> + + <_> + 8 18 12 6 -1. + <_> + 12 18 4 6 3. + <_> + + <_> + 3 15 6 9 -1. + <_> + 6 15 3 9 2. + <_> + + <_> + 15 7 6 8 -1. + <_> + 15 11 6 4 2. + <_> + + <_> + 3 7 6 8 -1. + <_> + 3 11 6 4 2. + <_> + + <_> + 5 9 18 6 -1. + <_> + 14 9 9 3 2. + <_> + 5 12 9 3 2. + <_> + + <_> + 1 13 12 6 -1. + <_> + 1 15 12 2 3. + <_> + + <_> + 14 15 10 6 -1. + <_> + 14 17 10 2 3. + <_> + + <_> + 0 15 10 6 -1. + <_> + 0 17 10 2 3. + <_> + + <_> + 15 13 6 9 -1. + <_> + 15 16 6 3 3. + <_> + + <_> + 3 13 6 9 -1. + <_> + 3 16 6 3 3. + <_> + + <_> + 9 5 8 8 -1. + <_> + 9 5 4 8 2. + <_> + + <_> + 1 18 12 6 -1. + <_> + 1 18 6 3 2. + <_> + 7 21 6 3 2. + <_> + + <_> + 13 19 10 4 -1. + <_> + 13 21 10 2 2. + <_> + + <_> + 1 19 10 4 -1. + <_> + 1 21 10 2 2. + <_> + + <_> + 6 19 18 3 -1. + <_> + 6 20 18 1 3. + <_> + + <_> + 8 14 4 10 -1. + <_> + 8 19 4 5 2. + <_> + + <_> + 0 0 24 6 -1. + <_> + 0 2 24 2 3. + <_> + + <_> + 0 1 6 9 -1. + <_> + 0 4 6 3 3. + <_> + + <_> + 4 9 20 6 -1. + <_> + 14 9 10 3 2. + <_> + 4 12 10 3 2. + <_> + + <_> + 1 15 19 8 -1. + <_> + 1 19 19 4 2. + <_> + + <_> + 14 0 10 6 -1. + <_> + 14 2 10 2 3. + <_> + + <_> + 1 10 21 14 -1. + <_> + 8 10 7 14 3. + <_> + + <_> + 10 10 8 8 -1. + <_> + 10 10 4 8 2. + <_> + + <_> + 6 8 10 4 -1. + <_> + 11 8 5 4 2. + <_> + + <_> + 10 5 4 9 -1. + <_> + 10 5 2 9 2. + <_> + + <_> + 7 5 6 10 -1. + <_> + 9 5 2 10 3. + <_> + + <_> + 14 4 4 13 -1. + <_> + 14 4 2 13 2. + <_> + + <_> + 6 4 4 13 -1. + <_> + 8 4 2 13 2. + <_> + + <_> + 8 7 9 6 -1. + <_> + 11 7 3 6 3. + <_> + + <_> + 3 6 16 6 -1. + <_> + 3 6 8 3 2. + <_> + 11 9 8 3 2. + <_> + + <_> + 5 4 16 14 -1. + <_> + 13 4 8 7 2. + <_> + 5 11 8 7 2. + <_> + + <_> + 0 0 24 4 -1. + <_> + 0 0 12 2 2. + <_> + 12 2 12 2 2. + <_> + + <_> + 9 1 9 6 -1. + <_> + 12 1 3 6 3. + <_> + + <_> + 4 1 14 4 -1. + <_> + 11 1 7 4 2. + <_> + + <_> + 10 14 7 9 -1. + <_> + 10 17 7 3 3. + <_> + + <_> + 8 3 8 10 -1. + <_> + 8 3 4 5 2. + <_> + 12 8 4 5 2. + <_> + + <_> + 7 3 12 5 -1. + <_> + 11 3 4 5 3. + <_> + + <_> + 8 2 4 13 -1. + <_> + 10 2 2 13 2. + <_> + + <_> + 11 2 3 19 -1. + <_> + 12 2 1 19 3. + <_> + + <_> + 7 7 9 6 -1. + <_> + 10 7 3 6 3. + <_> + + <_> + 4 22 20 2 -1. + <_> + 4 22 10 2 2. + <_> + + <_> + 0 16 24 4 -1. + <_> + 0 16 12 2 2. + <_> + 12 18 12 2 2. + <_> + + <_> + 7 3 12 5 -1. + <_> + 11 3 4 5 3. + <_> + + <_> + 1 10 8 14 -1. + <_> + 1 10 4 7 2. + <_> + 5 17 4 7 2. + <_> + + <_> + 11 16 6 6 -1. + <_> + 11 19 6 3 2. + <_> + + <_> + 6 0 10 24 -1. + <_> + 6 0 5 12 2. + <_> + 11 12 5 12 2. + <_> + + <_> + 7 5 14 14 -1. + <_> + 14 5 7 7 2. + <_> + 7 12 7 7 2. + <_> + + <_> + 7 8 10 8 -1. + <_> + 7 8 5 4 2. + <_> + 12 12 5 4 2. + <_> + + <_> + 9 1 9 6 -1. + <_> + 12 1 3 6 3. + <_> + + <_> + 0 6 24 3 -1. + <_> + 12 6 12 3 2. + <_> + + <_> + 7 3 12 5 -1. + <_> + 11 3 4 5 3. + <_> + + <_> + 1 13 22 4 -1. + <_> + 1 13 11 2 2. + <_> + 12 15 11 2 2. + <_> + + <_> + 9 12 12 6 -1. + <_> + 9 14 12 2 3. + <_> + + <_> + 0 5 9 6 -1. + <_> + 0 7 9 2 3. + <_> + + <_> + 1 5 23 6 -1. + <_> + 1 7 23 2 3. + <_> + + <_> + 1 6 19 12 -1. + <_> + 1 10 19 4 3. + <_> + + <_> + 9 1 6 21 -1. + <_> + 9 8 6 7 3. + <_> + + <_> + 3 19 18 3 -1. + <_> + 9 19 6 3 3. + <_> + + <_> + 9 14 6 9 -1. + <_> + 11 14 2 9 3. + <_> + + <_> + 9 6 4 12 -1. + <_> + 11 6 2 12 2. + <_> + + <_> + 16 0 6 9 -1. + <_> + 18 0 2 9 3. + <_> + + <_> + 2 0 6 9 -1. + <_> + 4 0 2 9 3. + <_> + + <_> + 13 1 4 22 -1. + <_> + 15 1 2 11 2. + <_> + 13 12 2 11 2. + <_> + + <_> + 1 8 8 12 -1. + <_> + 1 14 8 6 2. + <_> + + <_> + 14 7 7 9 -1. + <_> + 14 10 7 3 3. + <_> + + <_> + 3 12 18 4 -1. + <_> + 3 12 9 2 2. + <_> + 12 14 9 2 2. + <_> + + <_> + 13 1 4 22 -1. + <_> + 15 1 2 11 2. + <_> + 13 12 2 11 2. + <_> + + <_> + 7 1 4 22 -1. + <_> + 7 1 2 11 2. + <_> + 9 12 2 11 2. + <_> + + <_> + 4 7 20 4 -1. + <_> + 14 7 10 2 2. + <_> + 4 9 10 2 2. + <_> + + <_> + 9 10 6 7 -1. + <_> + 12 10 3 7 2. + <_> + + <_> + 7 7 10 4 -1. + <_> + 7 7 5 4 2. + <_> + + <_> + 0 3 4 15 -1. + <_> + 0 8 4 5 3. + <_> + + <_> + 15 0 8 12 -1. + <_> + 19 0 4 6 2. + <_> + 15 6 4 6 2. + <_> + + <_> + 1 0 8 12 -1. + <_> + 1 0 4 6 2. + <_> + 5 6 4 6 2. + <_> + + <_> + 14 5 6 16 -1. + <_> + 16 5 2 16 3. + <_> + + <_> + 4 5 6 16 -1. + <_> + 6 5 2 16 3. + <_> + + <_> + 15 0 6 16 -1. + <_> + 17 0 2 16 3. + <_> + + <_> + 3 0 6 16 -1. + <_> + 5 0 2 16 3. + <_> + + <_> + 0 2 24 3 -1. + <_> + 0 3 24 1 3. + <_> + + <_> + 7 1 10 4 -1. + <_> + 7 3 10 2 2. + <_> + + <_> + 1 0 23 8 -1. + <_> + 1 4 23 4 2. + <_> + + <_> + 1 17 19 3 -1. + <_> + 1 18 19 1 3. + <_> + + <_> + 6 18 18 2 -1. + <_> + 6 19 18 1 2. + <_> + + <_> + 1 17 9 6 -1. + <_> + 1 19 9 2 3. + <_> + + <_> + 15 15 6 9 -1. + <_> + 15 18 6 3 3. + <_> + + <_> + 3 15 6 9 -1. + <_> + 3 18 6 3 3. + <_> + + <_> + 4 14 20 6 -1. + <_> + 4 17 20 3 2. + <_> + + <_> + 0 10 6 14 -1. + <_> + 0 10 3 7 2. + <_> + 3 17 3 7 2. + <_> + + <_> + 6 18 18 3 -1. + <_> + 6 19 18 1 3. + <_> + + <_> + 4 12 9 7 -1. + <_> + 7 12 3 7 3. + <_> + + <_> + 6 10 18 5 -1. + <_> + 12 10 6 5 3. + <_> + + <_> + 0 10 18 5 -1. + <_> + 6 10 6 5 3. + <_> + + <_> + 3 2 18 9 -1. + <_> + 9 2 6 9 3. + <_> + + <_> + 4 6 10 10 -1. + <_> + 4 6 5 5 2. + <_> + 9 11 5 5 2. + <_> + + <_> + 20 14 4 9 -1. + <_> + 20 14 2 9 2. + <_> + + <_> + 0 14 4 9 -1. + <_> + 2 14 2 9 2. + <_> + + <_> + 11 1 4 20 -1. + <_> + 13 1 2 10 2. + <_> + 11 11 2 10 2. + <_> + + <_> + 6 21 12 3 -1. + <_> + 12 21 6 3 2. + <_> + + <_> + 11 1 4 20 -1. + <_> + 13 1 2 10 2. + <_> + 11 11 2 10 2. + <_> + + <_> + 1 16 10 8 -1. + <_> + 1 16 5 4 2. + <_> + 6 20 5 4 2. + <_> + + <_> + 11 1 4 20 -1. + <_> + 13 1 2 10 2. + <_> + 11 11 2 10 2. + <_> + + <_> + 1 0 3 19 -1. + <_> + 2 0 1 19 3. + <_> + + <_> + 11 1 4 20 -1. + <_> + 13 1 2 10 2. + <_> + 11 11 2 10 2. + <_> + + <_> + 0 1 6 9 -1. + <_> + 2 1 2 9 3. + <_> + + <_> + 3 7 19 4 -1. + <_> + 3 9 19 2 2. + <_> + + <_> + 7 14 9 6 -1. + <_> + 7 16 9 2 3. + <_> + + <_> + 17 1 7 6 -1. + <_> + 17 4 7 3 2. + <_> + + <_> + 5 0 14 8 -1. + <_> + 5 4 14 4 2. + <_> + + <_> + 16 1 8 6 -1. + <_> + 16 4 8 3 2. + <_> + + <_> + 0 1 8 6 -1. + <_> + 0 4 8 3 2. + <_> + + <_> + 6 0 18 4 -1. + <_> + 15 0 9 2 2. + <_> + 6 2 9 2 2. + <_> + + <_> + 0 14 9 6 -1. + <_> + 0 16 9 2 3. + <_> + + <_> + 3 7 18 8 -1. + <_> + 9 7 6 8 3. + <_> + + <_> + 2 11 6 9 -1. + <_> + 4 11 2 9 3. + <_> + + <_> + 10 5 6 9 -1. + <_> + 12 5 2 9 3. + <_> + + <_> + 10 6 4 18 -1. + <_> + 10 6 2 9 2. + <_> + 12 15 2 9 2. + <_> + + <_> + 11 1 4 20 -1. + <_> + 13 1 2 10 2. + <_> + 11 11 2 10 2. + <_> + + <_> + 9 1 4 20 -1. + <_> + 9 1 2 10 2. + <_> + 11 11 2 10 2. + <_> + + <_> + 5 9 18 6 -1. + <_> + 14 9 9 3 2. + <_> + 5 12 9 3 2. + <_> + + <_> + 6 4 6 9 -1. + <_> + 8 4 2 9 3. + <_> + + <_> + 10 16 8 6 -1. + <_> + 10 16 4 6 2. + <_> + + <_> + 0 0 18 8 -1. + <_> + 0 0 9 4 2. + <_> + 9 4 9 4 2. + <_> + + <_> + 6 5 14 12 -1. + <_> + 13 5 7 6 2. + <_> + 6 11 7 6 2. + <_> + + <_> + 4 3 15 7 -1. + <_> + 9 3 5 7 3. + <_> + + <_> + 14 12 10 6 -1. + <_> + 14 14 10 2 3. + <_> + + <_> + 0 11 4 10 -1. + <_> + 0 16 4 5 2. + <_> + + <_> + 1 10 22 3 -1. + <_> + 1 11 22 1 3. + <_> + + <_> + 8 9 6 10 -1. + <_> + 10 9 2 10 3. + <_> + + <_> + 13 2 6 12 -1. + <_> + 16 2 3 6 2. + <_> + 13 8 3 6 2. + <_> + + <_> + 10 6 4 18 -1. + <_> + 10 6 2 9 2. + <_> + 12 15 2 9 2. + <_> + + <_> + 7 8 10 16 -1. + <_> + 12 8 5 8 2. + <_> + 7 16 5 8 2. + <_> + + <_> + 8 1 8 12 -1. + <_> + 8 1 4 6 2. + <_> + 12 7 4 6 2. + <_> + + <_> + 7 1 12 14 -1. + <_> + 13 1 6 7 2. + <_> + 7 8 6 7 2. + <_> + + <_> + 2 14 12 6 -1. + <_> + 2 16 12 2 3. + <_> + + <_> + 11 16 6 6 -1. + <_> + 11 19 6 3 2. + <_> + + <_> + 7 16 6 6 -1. + <_> + 7 19 6 3 2. + <_> + + <_> + 13 4 4 10 -1. + <_> + 13 4 2 10 2. + <_> + + <_> + 0 19 19 3 -1. + <_> + 0 20 19 1 3. + <_> + + <_> + 12 8 6 8 -1. + <_> + 12 12 6 4 2. + <_> + + <_> + 8 1 8 22 -1. + <_> + 8 12 8 11 2. + <_> + + <_> + 12 8 6 8 -1. + <_> + 12 12 6 4 2. + <_> + + <_> + 6 8 6 8 -1. + <_> + 6 12 6 4 2. + <_> + + <_> + 14 5 6 9 -1. + <_> + 14 8 6 3 3. + <_> + + <_> + 0 6 24 4 -1. + <_> + 0 8 24 2 2. + <_> + + <_> + 14 12 10 6 -1. + <_> + 14 14 10 2 3. + <_> + + <_> + 0 12 10 6 -1. + <_> + 0 14 10 2 3. + <_> + + <_> + 4 6 19 3 -1. + <_> + 4 7 19 1 3. + <_> + + <_> + 1 6 19 3 -1. + <_> + 1 7 19 1 3. + <_> + + <_> + 4 0 16 9 -1. + <_> + 4 3 16 3 3. + <_> + + <_> + 0 1 24 5 -1. + <_> + 8 1 8 5 3. + <_> + + <_> + 3 6 6 15 -1. + <_> + 3 11 6 5 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 6 22 18 2 -1. + <_> + 6 23 18 1 2. + <_> + + <_> + 2 12 6 9 -1. + <_> + 2 15 6 3 3. + <_> + + <_> + 18 12 6 9 -1. + <_> + 18 15 6 3 3. + <_> + + <_> + 0 12 6 9 -1. + <_> + 0 15 6 3 3. + <_> + + <_> + 11 14 4 10 -1. + <_> + 11 19 4 5 2. + <_> + + <_> + 9 6 6 16 -1. + <_> + 9 14 6 8 2. + <_> + + <_> + 7 7 10 10 -1. + <_> + 7 12 10 5 2. + <_> + + <_> + 1 3 6 13 -1. + <_> + 3 3 2 13 3. + <_> + + <_> + 18 1 6 13 -1. + <_> + 18 1 3 13 2. + <_> + + <_> + 5 1 6 9 -1. + <_> + 7 1 2 9 3. + <_> + + <_> + 18 2 6 11 -1. + <_> + 18 2 3 11 2. + <_> + + <_> + 0 2 6 11 -1. + <_> + 3 2 3 11 2. + <_> + + <_> + 9 12 15 6 -1. + <_> + 9 14 15 2 3. + <_> + + <_> + 2 2 20 3 -1. + <_> + 2 3 20 1 3. + <_> + + <_> + 10 6 4 9 -1. + <_> + 10 6 2 9 2. + <_> + + <_> + 5 6 12 14 -1. + <_> + 5 6 6 7 2. + <_> + 11 13 6 7 2. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 7 0 9 6 -1. + <_> + 10 0 3 6 3. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 4 1 12 20 -1. + <_> + 4 1 6 10 2. + <_> + 10 11 6 10 2. + <_> + + <_> + 6 7 18 3 -1. + <_> + 6 7 9 3 2. + <_> + + <_> + 0 7 18 3 -1. + <_> + 9 7 9 3 2. + <_> + + <_> + 3 20 18 3 -1. + <_> + 9 20 6 3 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 6 2 12 15 -1. + <_> + 10 2 4 15 3. + <_> + + <_> + 2 3 18 3 -1. + <_> + 2 4 18 1 3. + <_> + + <_> + 19 4 4 18 -1. + <_> + 21 4 2 9 2. + <_> + 19 13 2 9 2. + <_> + + <_> + 0 1 19 3 -1. + <_> + 0 2 19 1 3. + <_> + + <_> + 5 0 15 4 -1. + <_> + 5 2 15 2 2. + <_> + + <_> + 5 2 14 5 -1. + <_> + 12 2 7 5 2. + <_> + + <_> + 1 2 22 14 -1. + <_> + 1 2 11 14 2. + <_> + + <_> + 8 15 6 9 -1. + <_> + 10 15 2 9 3. + <_> + + <_> + 6 17 18 3 -1. + <_> + 6 18 18 1 3. + <_> + + <_> + 9 6 3 18 -1. + <_> + 9 12 3 6 3. + <_> + + <_> + 2 0 20 3 -1. + <_> + 2 1 20 1 3. + <_> + + <_> + 5 4 5 12 -1. + <_> + 5 8 5 4 3. + <_> + + <_> + 8 6 12 5 -1. + <_> + 12 6 4 5 3. + <_> + + <_> + 9 12 6 12 -1. + <_> + 9 12 3 6 2. + <_> + 12 18 3 6 2. + <_> + + <_> + 14 14 8 10 -1. + <_> + 18 14 4 5 2. + <_> + 14 19 4 5 2. + <_> + + <_> + 2 14 8 10 -1. + <_> + 2 14 4 5 2. + <_> + 6 19 4 5 2. + <_> + + <_> + 10 18 12 6 -1. + <_> + 16 18 6 3 2. + <_> + 10 21 6 3 2. + <_> + + <_> + 1 3 6 9 -1. + <_> + 1 6 6 3 3. + <_> + + <_> + 11 3 3 20 -1. + <_> + 12 3 1 20 3. + <_> + + <_> + 4 6 14 6 -1. + <_> + 4 6 7 3 2. + <_> + 11 9 7 3 2. + <_> + + <_> + 6 5 12 13 -1. + <_> + 10 5 4 13 3. + <_> + + <_> + 5 4 4 15 -1. + <_> + 5 9 4 5 3. + <_> + + <_> + 9 16 15 4 -1. + <_> + 14 16 5 4 3. + <_> + + <_> + 7 8 6 14 -1. + <_> + 7 8 3 7 2. + <_> + 10 15 3 7 2. + <_> + + <_> + 7 6 10 6 -1. + <_> + 7 8 10 2 3. + <_> + + <_> + 2 5 18 3 -1. + <_> + 2 6 18 1 3. + <_> + + <_> + 5 1 15 8 -1. + <_> + 5 5 15 4 2. + <_> + + <_> + 7 1 8 18 -1. + <_> + 7 10 8 9 2. + <_> + + <_> + 0 10 24 3 -1. + <_> + 0 11 24 1 3. + <_> + + <_> + 0 2 6 13 -1. + <_> + 2 2 2 13 3. + <_> + + <_> + 16 0 8 10 -1. + <_> + 20 0 4 5 2. + <_> + 16 5 4 5 2. + <_> + + <_> + 5 1 10 9 -1. + <_> + 5 4 10 3 3. + <_> + + <_> + 5 6 18 3 -1. + <_> + 5 7 18 1 3. + <_> + + <_> + 0 1 24 3 -1. + <_> + 0 2 24 1 3. + <_> + + <_> + 11 4 6 11 -1. + <_> + 13 4 2 11 3. + <_> + + <_> + 0 0 8 10 -1. + <_> + 0 0 4 5 2. + <_> + 4 5 4 5 2. + <_> + + <_> + 4 16 18 3 -1. + <_> + 4 17 18 1 3. + <_> + + <_> + 2 16 18 3 -1. + <_> + 2 17 18 1 3. + <_> + + <_> + 3 0 18 10 -1. + <_> + 12 0 9 5 2. + <_> + 3 5 9 5 2. + <_> + + <_> + 2 3 20 21 -1. + <_> + 12 3 10 21 2. + <_> + + <_> + 6 7 14 3 -1. + <_> + 6 7 7 3 2. + <_> + + <_> + 0 9 12 6 -1. + <_> + 0 9 6 3 2. + <_> + 6 12 6 3 2. + <_> + + <_> + 3 14 21 4 -1. + <_> + 10 14 7 4 3. + <_> + + <_> + 0 14 21 4 -1. + <_> + 7 14 7 4 3. + <_> + + <_> + 5 21 18 3 -1. + <_> + 11 21 6 3 3. + <_> + + <_> + 1 21 18 3 -1. + <_> + 7 21 6 3 3. + <_> + + <_> + 19 4 4 18 -1. + <_> + 21 4 2 9 2. + <_> + 19 13 2 9 2. + <_> + + <_> + 3 7 18 3 -1. + <_> + 3 8 18 1 3. + <_> + + <_> + 19 4 4 18 -1. + <_> + 21 4 2 9 2. + <_> + 19 13 2 9 2. + <_> + + <_> + 7 15 10 6 -1. + <_> + 7 17 10 2 3. + <_> + + <_> + 9 13 11 9 -1. + <_> + 9 16 11 3 3. + <_> + + <_> + 0 6 4 10 -1. + <_> + 0 11 4 5 2. + <_> + + <_> + 15 16 9 6 -1. + <_> + 15 18 9 2 3. + <_> + + <_> + 1 5 4 18 -1. + <_> + 1 5 2 9 2. + <_> + 3 14 2 9 2. + <_> + + <_> + 9 8 8 10 -1. + <_> + 13 8 4 5 2. + <_> + 9 13 4 5 2. + <_> + + <_> + 7 8 8 10 -1. + <_> + 7 8 4 5 2. + <_> + 11 13 4 5 2. + <_> + + <_> + 9 8 12 5 -1. + <_> + 13 8 4 5 3. + <_> + + <_> + 7 8 9 7 -1. + <_> + 10 8 3 7 3. + <_> + + <_> + 9 8 12 5 -1. + <_> + 13 8 4 5 3. + <_> + + <_> + 7 6 9 7 -1. + <_> + 10 6 3 7 3. + <_> + + <_> + 9 8 12 5 -1. + <_> + 13 8 4 5 3. + <_> + + <_> + 10 5 4 18 -1. + <_> + 10 11 4 6 3. + <_> + + <_> + 5 5 14 12 -1. + <_> + 5 11 14 6 2. + <_> + + <_> + 0 1 11 4 -1. + <_> + 0 3 11 2 2. + <_> + + <_> + 9 10 6 10 -1. + <_> + 11 10 2 10 3. + <_> + + <_> + 2 17 11 6 -1. + <_> + 2 19 11 2 3. + <_> + + <_> + 15 16 9 6 -1. + <_> + 15 18 9 2 3. + <_> + + <_> + 1 10 18 2 -1. + <_> + 1 11 18 1 2. + <_> + + <_> + 6 4 12 13 -1. + <_> + 10 4 4 13 3. + <_> + + <_> + 0 18 18 3 -1. + <_> + 0 19 18 1 3. + <_> + + <_> + 6 18 18 3 -1. + <_> + 6 19 18 1 3. + <_> + + <_> + 0 16 9 6 -1. + <_> + 0 18 9 2 3. + <_> + + <_> + 13 15 9 6 -1. + <_> + 13 17 9 2 3. + <_> + + <_> + 2 15 9 6 -1. + <_> + 2 17 9 2 3. + <_> + + <_> + 13 1 6 16 -1. + <_> + 13 1 3 16 2. + <_> + + <_> + 5 1 6 16 -1. + <_> + 8 1 3 16 2. + <_> + + <_> + 11 5 6 10 -1. + <_> + 13 5 2 10 3. + <_> + + <_> + 7 5 6 10 -1. + <_> + 9 5 2 10 3. + <_> + + <_> + 10 0 6 24 -1. + <_> + 12 0 2 24 3. + <_> + + <_> + 3 4 4 20 -1. + <_> + 3 4 2 10 2. + <_> + 5 14 2 10 2. + <_> + + <_> + 14 0 6 9 -1. + <_> + 16 0 2 9 3. + <_> + + <_> + 4 0 6 9 -1. + <_> + 6 0 2 9 3. + <_> + + <_> + 4 5 18 5 -1. + <_> + 10 5 6 5 3. + <_> + + <_> + 5 6 6 9 -1. + <_> + 7 6 2 9 3. + <_> + + <_> + 7 2 15 8 -1. + <_> + 12 2 5 8 3. + <_> + + <_> + 2 2 15 8 -1. + <_> + 7 2 5 8 3. + <_> + + <_> + 10 0 4 9 -1. + <_> + 10 0 2 9 2. + <_> + + <_> + 3 4 6 12 -1. + <_> + 3 4 3 6 2. + <_> + 6 10 3 6 2. + <_> + + <_> + 16 0 8 18 -1. + <_> + 16 0 4 18 2. + <_> + + <_> + 0 0 8 18 -1. + <_> + 4 0 4 18 2. + <_> + + <_> + 0 7 24 6 -1. + <_> + 0 9 24 2 3. + <_> + + <_> + 4 7 14 3 -1. + <_> + 11 7 7 3 2. + <_> + + <_> + 10 8 8 15 -1. + <_> + 10 8 4 15 2. + <_> + + <_> + 7 0 10 14 -1. + <_> + 12 0 5 14 2. + <_> + + <_> + 13 10 8 10 -1. + <_> + 17 10 4 5 2. + <_> + 13 15 4 5 2. + <_> + + <_> + 3 0 4 9 -1. + <_> + 5 0 2 9 2. + <_> + + <_> + 16 1 6 8 -1. + <_> + 16 1 3 8 2. + <_> + + <_> + 2 1 6 8 -1. + <_> + 5 1 3 8 2. + <_> + + <_> + 3 6 18 12 -1. + <_> + 3 10 18 4 3. + <_> + + <_> + 4 12 16 4 -1. + <_> + 4 14 16 2 2. + <_> + + <_> + 4 9 16 15 -1. + <_> + 4 14 16 5 3. + <_> + + <_> + 3 10 8 10 -1. + <_> + 3 10 4 5 2. + <_> + 7 15 4 5 2. + <_> + + <_> + 8 18 16 6 -1. + <_> + 16 18 8 3 2. + <_> + 8 21 8 3 2. + <_> + + <_> + 2 16 12 5 -1. + <_> + 6 16 4 5 3. + <_> + + <_> + 14 14 9 4 -1. + <_> + 14 16 9 2 2. + <_> + + <_> + 7 14 9 6 -1. + <_> + 7 16 9 2 3. + <_> + + <_> + 4 10 16 12 -1. + <_> + 4 14 16 4 3. + <_> + + <_> + 0 13 19 6 -1. + <_> + 0 15 19 2 3. + <_> + + <_> + 10 13 9 6 -1. + <_> + 10 15 9 2 3. + <_> + + <_> + 5 0 3 23 -1. + <_> + 6 0 1 23 3. + <_> + + <_> + 0 8 24 6 -1. + <_> + 0 10 24 2 3. + <_> + + <_> + 0 5 5 12 -1. + <_> + 0 9 5 4 3. + <_> + + <_> + 3 0 19 18 -1. + <_> + 3 9 19 9 2. + <_> + + <_> + 9 11 6 12 -1. + <_> + 9 11 3 6 2. + <_> + 12 17 3 6 2. + <_> + + <_> + 0 5 24 8 -1. + <_> + 12 5 12 4 2. + <_> + 0 9 12 4 2. + <_> + + <_> + 6 18 9 4 -1. + <_> + 6 20 9 2 2. + <_> + + <_> + 8 8 10 6 -1. + <_> + 8 10 10 2 3. + <_> + + <_> + 2 7 20 3 -1. + <_> + 2 8 20 1 3. + <_> + + <_> + 12 0 7 20 -1. + <_> + 12 10 7 10 2. + <_> + + <_> + 5 0 7 20 -1. + <_> + 5 10 7 10 2. + <_> + + <_> + 14 2 2 18 -1. + <_> + 14 11 2 9 2. + <_> + + <_> + 5 8 10 12 -1. + <_> + 10 8 5 12 2. + <_> + + <_> + 6 9 12 8 -1. + <_> + 12 9 6 4 2. + <_> + 6 13 6 4 2. + <_> + + <_> + 7 7 3 14 -1. + <_> + 7 14 3 7 2. + <_> + + <_> + 11 2 12 16 -1. + <_> + 17 2 6 8 2. + <_> + 11 10 6 8 2. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 13 14 9 4 -1. + <_> + 13 16 9 2 2. + <_> + + <_> + 0 12 22 4 -1. + <_> + 0 12 11 2 2. + <_> + 11 14 11 2 2. + <_> + + <_> + 1 12 22 6 -1. + <_> + 12 12 11 3 2. + <_> + 1 15 11 3 2. + <_> + + <_> + 6 6 9 6 -1. + <_> + 9 6 3 6 3. + <_> + + <_> + 10 0 4 9 -1. + <_> + 10 0 2 9 2. + <_> + + <_> + 3 8 18 7 -1. + <_> + 9 8 6 7 3. + <_> + + <_> + 0 6 24 6 -1. + <_> + 0 8 24 2 3. + <_> + + <_> + 0 11 24 10 -1. + <_> + 8 11 8 10 3. + <_> + + <_> + 3 3 18 21 -1. + <_> + 9 3 6 21 3. + <_> + + <_> + 7 12 4 10 -1. + <_> + 9 12 2 10 2. + <_> + + <_> + 10 16 10 8 -1. + <_> + 15 16 5 4 2. + <_> + 10 20 5 4 2. + <_> + + <_> + 8 6 6 9 -1. + <_> + 10 6 2 9 3. + <_> + + <_> + 12 10 6 12 -1. + <_> + 15 10 3 6 2. + <_> + 12 16 3 6 2. + <_> + + <_> + 6 10 6 12 -1. + <_> + 6 10 3 6 2. + <_> + 9 16 3 6 2. + <_> + + <_> + 16 12 6 12 -1. + <_> + 19 12 3 6 2. + <_> + 16 18 3 6 2. + <_> + + <_> + 2 12 6 12 -1. + <_> + 2 12 3 6 2. + <_> + 5 18 3 6 2. + <_> + + <_> + 10 15 6 9 -1. + <_> + 12 15 2 9 3. + <_> + + <_> + 8 15 6 9 -1. + <_> + 10 15 2 9 3. + <_> + + <_> + 14 20 10 4 -1. + <_> + 14 20 5 4 2. + <_> + + <_> + 0 20 10 4 -1. + <_> + 5 20 5 4 2. + <_> + + <_> + 11 17 9 6 -1. + <_> + 11 19 9 2 3. + <_> + + <_> + 3 2 14 4 -1. + <_> + 3 4 14 2 2. + <_> + + <_> + 10 1 10 4 -1. + <_> + 10 3 10 2 2. + <_> + + <_> + 0 15 10 4 -1. + <_> + 5 15 5 4 2. + <_> + + <_> + 19 2 3 19 -1. + <_> + 20 2 1 19 3. + <_> + + <_> + 4 12 9 8 -1. + <_> + 7 12 3 8 3. + <_> + + <_> + 4 7 5 12 -1. + <_> + 4 11 5 4 3. + <_> + + <_> + 0 1 24 3 -1. + <_> + 8 1 8 3 3. + <_> + + <_> + 6 8 12 4 -1. + <_> + 6 10 12 2 2. + <_> + + <_> + 19 3 4 10 -1. + <_> + 19 3 2 10 2. + <_> + + <_> + 0 6 9 6 -1. + <_> + 3 6 3 6 3. + <_> + + <_> + 18 0 6 22 -1. + <_> + 20 0 2 22 3. + <_> + + <_> + 0 0 6 22 -1. + <_> + 2 0 2 22 3. + <_> + + <_> + 5 15 19 3 -1. + <_> + 5 16 19 1 3. + <_> + + <_> + 10 7 4 15 -1. + <_> + 10 12 4 5 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 0 21 18 3 -1. + <_> + 0 22 18 1 3. + <_> + + <_> + 7 3 10 15 -1. + <_> + 7 8 10 5 3. + <_> + + <_> + 1 7 18 3 -1. + <_> + 1 8 18 1 3. + <_> + + <_> + 8 2 9 6 -1. + <_> + 11 2 3 6 3. + <_> + + <_> + 0 10 24 14 -1. + <_> + 0 17 24 7 2. + <_> + + <_> + 13 9 8 10 -1. + <_> + 17 9 4 5 2. + <_> + 13 14 4 5 2. + <_> + + <_> + 10 5 4 9 -1. + <_> + 12 5 2 9 2. + <_> + + <_> + 13 9 8 10 -1. + <_> + 17 9 4 5 2. + <_> + 13 14 4 5 2. + <_> + + <_> + 7 11 10 10 -1. + <_> + 7 11 5 5 2. + <_> + 12 16 5 5 2. + <_> + + <_> + 4 13 18 4 -1. + <_> + 13 13 9 2 2. + <_> + 4 15 9 2 2. + <_> + + <_> + 0 0 19 2 -1. + <_> + 0 1 19 1 2. + <_> + + <_> + 0 18 24 6 -1. + <_> + 8 18 8 6 3. + <_> + + <_> + 6 4 8 16 -1. + <_> + 6 12 8 8 2. + <_> + + <_> + 7 8 10 4 -1. + <_> + 7 10 10 2 2. + <_> + + <_> + 0 3 6 9 -1. + <_> + 0 6 6 3 3. + <_> + + <_> + 13 15 7 9 -1. + <_> + 13 18 7 3 3. + <_> + + <_> + 3 18 12 6 -1. + <_> + 3 18 6 3 2. + <_> + 9 21 6 3 2. + <_> + + <_> + 12 14 6 9 -1. + <_> + 12 17 6 3 3. + <_> + + <_> + 2 15 15 8 -1. + <_> + 2 19 15 4 2. + <_> + + <_> + 9 6 6 16 -1. + <_> + 9 14 6 8 2. + <_> + + <_> + 6 6 7 12 -1. + <_> + 6 10 7 4 3. + <_> + + <_> + 14 6 6 9 -1. + <_> + 14 9 6 3 3. + <_> + + <_> + 5 14 6 9 -1. + <_> + 5 17 6 3 3. + <_> + + <_> + 10 8 6 9 -1. + <_> + 12 8 2 9 3. + <_> + + <_> + 6 6 4 18 -1. + <_> + 6 6 2 9 2. + <_> + 8 15 2 9 2. + <_> + + <_> + 14 9 6 12 -1. + <_> + 17 9 3 6 2. + <_> + 14 15 3 6 2. + <_> + + <_> + 4 9 6 12 -1. + <_> + 4 9 3 6 2. + <_> + 7 15 3 6 2. + <_> + + <_> + 14 15 9 6 -1. + <_> + 14 17 9 2 3. + <_> + + <_> + 0 20 18 4 -1. + <_> + 0 20 9 2 2. + <_> + 9 22 9 2 2. + <_> + + <_> + 13 18 9 6 -1. + <_> + 13 20 9 2 3. + <_> + + <_> + 2 18 9 6 -1. + <_> + 2 20 9 2 3. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 0 16 18 3 -1. + <_> + 0 17 18 1 3. + <_> + + <_> + 19 2 4 22 -1. + <_> + 21 2 2 11 2. + <_> + 19 13 2 11 2. + <_> + + <_> + 1 2 4 22 -1. + <_> + 1 2 2 11 2. + <_> + 3 13 2 11 2. + <_> + + <_> + 15 0 2 24 -1. + <_> + 15 0 1 24 2. + <_> + + <_> + 3 20 16 4 -1. + <_> + 11 20 8 4 2. + <_> + + <_> + 11 6 4 18 -1. + <_> + 13 6 2 9 2. + <_> + 11 15 2 9 2. + <_> + + <_> + 7 9 10 14 -1. + <_> + 7 9 5 7 2. + <_> + 12 16 5 7 2. + <_> + + <_> + 14 6 6 9 -1. + <_> + 14 9 6 3 3. + <_> + + <_> + 3 6 7 9 -1. + <_> + 3 9 7 3 3. + <_> + + <_> + 20 4 4 20 -1. + <_> + 22 4 2 10 2. + <_> + 20 14 2 10 2. + <_> + + <_> + 7 6 6 9 -1. + <_> + 7 9 6 3 3. + <_> + + <_> + 7 0 10 14 -1. + <_> + 12 0 5 7 2. + <_> + 7 7 5 7 2. + <_> + + <_> + 2 1 18 6 -1. + <_> + 11 1 9 6 2. + <_> + + <_> + 15 0 2 24 -1. + <_> + 15 0 1 24 2. + <_> + + <_> + 7 0 2 24 -1. + <_> + 8 0 1 24 2. + <_> + + <_> + 13 12 6 7 -1. + <_> + 13 12 3 7 2. + <_> + + <_> + 5 12 6 7 -1. + <_> + 8 12 3 7 2. + <_> + + <_> + 3 5 18 19 -1. + <_> + 9 5 6 19 3. + <_> + + <_> + 5 6 9 6 -1. + <_> + 8 6 3 6 3. + <_> + + <_> + 9 5 9 6 -1. + <_> + 12 5 3 6 3. + <_> + + <_> + 3 16 10 8 -1. + <_> + 3 16 5 4 2. + <_> + 8 20 5 4 2. + <_> + + <_> + 19 8 5 15 -1. + <_> + 19 13 5 5 3. + <_> + + <_> + 0 8 5 15 -1. + <_> + 0 13 5 5 3. + <_> + + <_> + 20 4 4 20 -1. + <_> + 22 4 2 10 2. + <_> + 20 14 2 10 2. + <_> + + <_> + 0 4 4 20 -1. + <_> + 0 4 2 10 2. + <_> + 2 14 2 10 2. + <_> + + <_> + 7 7 10 4 -1. + <_> + 7 7 5 4 2. + <_> + + <_> + 4 19 14 4 -1. + <_> + 11 19 7 4 2. + <_> + + <_> + 10 11 12 3 -1. + <_> + 10 11 6 3 2. + <_> + + <_> + 0 1 24 3 -1. + <_> + 0 2 24 1 3. + <_> + + <_> + 7 2 14 20 -1. + <_> + 14 2 7 10 2. + <_> + 7 12 7 10 2. + <_> + + <_> + 0 13 6 9 -1. + <_> + 2 13 2 9 3. + <_> + + <_> + 13 0 4 19 -1. + <_> + 13 0 2 19 2. + <_> + + <_> + 1 11 14 3 -1. + <_> + 8 11 7 3 2. + <_> + + <_> + 7 1 16 20 -1. + <_> + 15 1 8 10 2. + <_> + 7 11 8 10 2. + <_> + + <_> + 0 10 21 9 -1. + <_> + 7 10 7 9 3. + <_> + + <_> + 6 19 15 5 -1. + <_> + 11 19 5 5 3. + <_> + + <_> + 8 10 6 6 -1. + <_> + 11 10 3 6 2. + <_> + + <_> + 7 1 16 20 -1. + <_> + 15 1 8 10 2. + <_> + 7 11 8 10 2. + <_> + + <_> + 1 1 16 20 -1. + <_> + 1 1 8 10 2. + <_> + 9 11 8 10 2. + <_> + + <_> + 16 4 3 12 -1. + <_> + 16 10 3 6 2. + <_> + + <_> + 5 4 3 12 -1. + <_> + 5 10 3 6 2. + <_> + + <_> + 7 6 10 8 -1. + <_> + 12 6 5 4 2. + <_> + 7 10 5 4 2. + <_> + + <_> + 4 9 6 6 -1. + <_> + 4 12 6 3 2. + <_> + + <_> + 6 5 12 4 -1. + <_> + 6 7 12 2 2. + <_> + + <_> + 9 2 5 15 -1. + <_> + 9 7 5 5 3. + <_> + + <_> + 15 0 9 6 -1. + <_> + 15 2 9 2 3. + <_> + + <_> + 6 0 11 10 -1. + <_> + 6 5 11 5 2. + <_> + + <_> + 12 7 4 12 -1. + <_> + 12 13 4 6 2. + <_> + + <_> + 7 2 9 4 -1. + <_> + 7 4 9 2 2. + <_> + + <_> + 6 0 13 6 -1. + <_> + 6 2 13 2 3. + <_> + + <_> + 10 6 4 18 -1. + <_> + 10 6 2 9 2. + <_> + 12 15 2 9 2. + <_> + + <_> + 10 8 6 9 -1. + <_> + 12 8 2 9 3. + <_> + + <_> + 3 18 10 6 -1. + <_> + 3 20 10 2 3. + <_> + + <_> + 4 14 20 3 -1. + <_> + 4 15 20 1 3. + <_> + + <_> + 2 15 9 6 -1. + <_> + 2 17 9 2 3. + <_> + + <_> + 13 0 4 19 -1. + <_> + 13 0 2 19 2. + <_> + + <_> + 7 0 4 19 -1. + <_> + 9 0 2 19 2. + <_> + + <_> + 1 4 22 2 -1. + <_> + 1 5 22 1 2. + <_> + + <_> + 0 0 9 6 -1. + <_> + 0 2 9 2 3. + <_> + + <_> + 0 0 24 18 -1. + <_> + 0 9 24 9 2. + <_> + + <_> + 3 2 16 8 -1. + <_> + 3 6 16 4 2. + <_> + + <_> + 3 6 18 6 -1. + <_> + 3 8 18 2 3. + <_> + + <_> + 3 1 6 10 -1. + <_> + 5 1 2 10 3. + <_> + + <_> + 13 0 9 6 -1. + <_> + 16 0 3 6 3. + <_> + + <_> + 2 0 9 6 -1. + <_> + 5 0 3 6 3. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 6 0 7 10 -1. + <_> + 6 5 7 5 2. + <_> + + <_> + 2 2 20 4 -1. + <_> + 12 2 10 2 2. + <_> + 2 4 10 2 2. + <_> + + <_> + 2 11 19 3 -1. + <_> + 2 12 19 1 3. + <_> + + <_> + 10 8 6 9 -1. + <_> + 12 8 2 9 3. + <_> + + <_> + 8 8 6 9 -1. + <_> + 10 8 2 9 3. + <_> + + <_> + 13 8 4 9 -1. + <_> + 13 8 2 9 2. + <_> + + <_> + 3 11 9 9 -1. + <_> + 6 11 3 9 3. + <_> + + <_> + 3 9 18 5 -1. + <_> + 9 9 6 5 3. + <_> + + <_> + 2 4 2 20 -1. + <_> + 2 14 2 10 2. + <_> + + <_> + 14 17 8 6 -1. + <_> + 14 20 8 3 2. + <_> + + <_> + 3 21 18 2 -1. + <_> + 3 22 18 1 2. + <_> + + <_> + 5 4 15 6 -1. + <_> + 10 4 5 6 3. + <_> + + <_> + 2 15 12 6 -1. + <_> + 2 17 12 2 3. + <_> + + <_> + 17 8 6 9 -1. + <_> + 17 11 6 3 3. + <_> + + <_> + 2 12 20 4 -1. + <_> + 2 12 10 2 2. + <_> + 12 14 10 2 2. + <_> + + <_> + 0 17 24 6 -1. + <_> + 0 19 24 2 3. + <_> + + <_> + 7 16 9 4 -1. + <_> + 7 18 9 2 2. + <_> + + <_> + 15 1 4 22 -1. + <_> + 17 1 2 11 2. + <_> + 15 12 2 11 2. + <_> + + <_> + 5 1 4 22 -1. + <_> + 5 1 2 11 2. + <_> + 7 12 2 11 2. + <_> + + <_> + 11 13 8 9 -1. + <_> + 11 16 8 3 3. + <_> + + <_> + 6 1 6 9 -1. + <_> + 8 1 2 9 3. + <_> + + <_> + 11 4 3 18 -1. + <_> + 11 10 3 6 3. + <_> + + <_> + 5 8 12 6 -1. + <_> + 5 8 6 3 2. + <_> + 11 11 6 3 2. + <_> + + <_> + 15 7 5 8 -1. + <_> + 15 11 5 4 2. + <_> + + <_> + 4 7 5 8 -1. + <_> + 4 11 5 4 2. + <_> + + <_> + 12 6 6 12 -1. + <_> + 15 6 3 6 2. + <_> + 12 12 3 6 2. + <_> + + <_> + 6 6 6 12 -1. + <_> + 6 6 3 6 2. + <_> + 9 12 3 6 2. + <_> + + <_> + 5 9 14 8 -1. + <_> + 12 9 7 4 2. + <_> + 5 13 7 4 2. + <_> + + <_> + 9 1 3 14 -1. + <_> + 9 8 3 7 2. + <_> + + <_> + 12 6 6 12 -1. + <_> + 12 10 6 4 3. + <_> + + <_> + 4 5 4 18 -1. + <_> + 4 5 2 9 2. + <_> + 6 14 2 9 2. + <_> + + <_> + 4 6 16 18 -1. + <_> + 4 12 16 6 3. + <_> + + <_> + 5 4 7 20 -1. + <_> + 5 14 7 10 2. + <_> + + <_> + 14 8 8 12 -1. + <_> + 14 14 8 6 2. + <_> + + <_> + 9 10 6 14 -1. + <_> + 9 10 3 7 2. + <_> + 12 17 3 7 2. + <_> + + <_> + 9 5 9 6 -1. + <_> + 12 5 3 6 3. + <_> + + <_> + 9 4 3 18 -1. + <_> + 10 4 1 18 3. + <_> + + <_> + 1 4 22 14 -1. + <_> + 12 4 11 7 2. + <_> + 1 11 11 7 2. + <_> + + <_> + 2 7 18 2 -1. + <_> + 2 8 18 1 2. + <_> + + <_> + 12 6 6 12 -1. + <_> + 12 10 6 4 3. + <_> + + <_> + 6 5 9 7 -1. + <_> + 9 5 3 7 3. + <_> + + <_> + 12 7 4 12 -1. + <_> + 12 13 4 6 2. + <_> + + <_> + 8 7 4 12 -1. + <_> + 8 13 4 6 2. + <_> + + <_> + 7 2 10 22 -1. + <_> + 7 13 10 11 2. + <_> + + <_> + 0 1 3 20 -1. + <_> + 1 1 1 20 3. + <_> + + <_> + 4 13 18 4 -1. + <_> + 13 13 9 2 2. + <_> + 4 15 9 2 2. + <_> + + <_> + 2 13 18 4 -1. + <_> + 2 13 9 2 2. + <_> + 11 15 9 2 2. + <_> + + <_> + 15 15 9 6 -1. + <_> + 15 17 9 2 3. + <_> + + <_> + 0 15 9 6 -1. + <_> + 0 17 9 2 3. + <_> + + <_> + 6 0 18 24 -1. + <_> + 15 0 9 12 2. + <_> + 6 12 9 12 2. + <_> + + <_> + 6 6 6 12 -1. + <_> + 6 10 6 4 3. + <_> + + <_> + 8 7 10 4 -1. + <_> + 8 9 10 2 2. + <_> + + <_> + 1 9 18 6 -1. + <_> + 1 9 9 3 2. + <_> + 10 12 9 3 2. + <_> + + <_> + 6 6 18 3 -1. + <_> + 6 7 18 1 3. + <_> + + <_> + 7 7 9 8 -1. + <_> + 10 7 3 8 3. + <_> + + <_> + 10 12 6 12 -1. + <_> + 12 12 2 12 3. + <_> + + <_> + 3 14 18 3 -1. + <_> + 3 15 18 1 3. + <_> + + <_> + 15 17 9 7 -1. + <_> + 18 17 3 7 3. + <_> + + <_> + 1 12 10 6 -1. + <_> + 1 14 10 2 3. + <_> + + <_> + 15 17 9 7 -1. + <_> + 18 17 3 7 3. + <_> + + <_> + 10 3 3 19 -1. + <_> + 11 3 1 19 3. + <_> + + <_> + 15 17 9 7 -1. + <_> + 18 17 3 7 3. + <_> + + <_> + 6 1 11 9 -1. + <_> + 6 4 11 3 3. + <_> + + <_> + 15 17 9 7 -1. + <_> + 18 17 3 7 3. + <_> + + <_> + 6 5 11 6 -1. + <_> + 6 8 11 3 2. + <_> + + <_> + 16 7 8 5 -1. + <_> + 16 7 4 5 2. + <_> + + <_> + 2 4 20 19 -1. + <_> + 12 4 10 19 2. + <_> + + <_> + 2 1 21 6 -1. + <_> + 9 1 7 6 3. + <_> + + <_> + 6 5 12 14 -1. + <_> + 6 5 6 7 2. + <_> + 12 12 6 7 2. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 2 11 8 5 -1. + <_> + 6 11 4 5 2. + <_> + + <_> + 16 7 8 5 -1. + <_> + 16 7 4 5 2. + <_> + + <_> + 0 7 8 5 -1. + <_> + 4 7 4 5 2. + <_> + + <_> + 15 17 9 7 -1. + <_> + 18 17 3 7 3. + <_> + + <_> + 8 6 8 10 -1. + <_> + 8 6 4 5 2. + <_> + 12 11 4 5 2. + <_> + + <_> + 15 15 9 9 -1. + <_> + 18 15 3 9 3. + <_> + + <_> + 0 15 9 9 -1. + <_> + 3 15 3 9 3. + <_> + + <_> + 12 10 9 7 -1. + <_> + 15 10 3 7 3. + <_> + + <_> + 3 10 9 7 -1. + <_> + 6 10 3 7 3. + <_> + + <_> + 13 15 10 8 -1. + <_> + 18 15 5 4 2. + <_> + 13 19 5 4 2. + <_> + + <_> + 0 1 6 12 -1. + <_> + 0 1 3 6 2. + <_> + 3 7 3 6 2. + <_> + + <_> + 10 0 6 12 -1. + <_> + 13 0 3 6 2. + <_> + 10 6 3 6 2. + <_> + + <_> + 7 0 10 12 -1. + <_> + 7 0 5 6 2. + <_> + 12 6 5 6 2. + <_> + + <_> + 4 1 16 8 -1. + <_> + 4 1 8 8 2. + <_> + + <_> + 0 21 19 3 -1. + <_> + 0 22 19 1 3. + <_> + + <_> + 6 9 18 4 -1. + <_> + 15 9 9 2 2. + <_> + 6 11 9 2 2. + <_> + + <_> + 3 4 9 6 -1. + <_> + 3 6 9 2 3. + <_> + + <_> + 9 1 6 15 -1. + <_> + 9 6 6 5 3. + <_> + + <_> + 5 9 6 6 -1. + <_> + 8 9 3 6 2. + <_> + + <_> + 5 1 14 9 -1. + <_> + 5 4 14 3 3. + <_> + + <_> + 3 0 8 20 -1. + <_> + 3 0 4 10 2. + <_> + 7 10 4 10 2. + <_> + + <_> + 5 0 7 9 -1. + <_> + 5 3 7 3 3. + <_> + + <_> + 6 6 12 5 -1. + <_> + 10 6 4 5 3. + <_> + + <_> + 0 1 8 14 -1. + <_> + 4 1 4 14 2. + <_> + + <_> + 2 12 22 4 -1. + <_> + 2 14 22 2 2. + <_> + + <_> + 8 17 6 6 -1. + <_> + 8 20 6 3 2. + <_> + + <_> + 18 1 6 7 -1. + <_> + 18 1 3 7 2. + <_> + + <_> + 0 0 6 6 -1. + <_> + 3 0 3 6 2. + <_> + + <_> + 4 6 17 18 -1. + <_> + 4 12 17 6 3. + <_> + + <_> + 6 0 12 6 -1. + <_> + 6 0 6 3 2. + <_> + 12 3 6 3 2. + <_> + + <_> + 4 7 18 4 -1. + <_> + 13 7 9 2 2. + <_> + 4 9 9 2 2. + <_> + + <_> + 4 12 10 6 -1. + <_> + 4 14 10 2 3. + <_> + + <_> + 7 9 10 12 -1. + <_> + 12 9 5 6 2. + <_> + 7 15 5 6 2. + <_> + + <_> + 0 1 24 3 -1. + <_> + 8 1 8 3 3. + <_> + + <_> + 13 11 6 6 -1. + <_> + 13 11 3 6 2. + <_> + + <_> + 5 11 6 6 -1. + <_> + 8 11 3 6 2. + <_> + + <_> + 3 10 19 3 -1. + <_> + 3 11 19 1 3. + <_> + + <_> + 0 2 6 9 -1. + <_> + 0 5 6 3 3. + <_> + + <_> + 14 16 10 6 -1. + <_> + 14 18 10 2 3. + <_> + + <_> + 0 16 10 6 -1. + <_> + 0 18 10 2 3. + <_> + + <_> + 14 13 9 6 -1. + <_> + 14 15 9 2 3. + <_> + + <_> + 0 16 18 3 -1. + <_> + 0 17 18 1 3. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 0 18 9 6 -1. + <_> + 0 20 9 2 3. + <_> + + <_> + 14 13 9 6 -1. + <_> + 14 15 9 2 3. + <_> + + <_> + 6 2 6 9 -1. + <_> + 8 2 2 9 3. + <_> + + <_> + 15 8 4 12 -1. + <_> + 15 8 2 12 2. + <_> + + <_> + 8 13 8 8 -1. + <_> + 8 17 8 4 2. + <_> + + <_> + 4 20 18 3 -1. + <_> + 10 20 6 3 3. + <_> + + <_> + 5 8 4 12 -1. + <_> + 7 8 2 12 2. + <_> + + <_> + 7 7 12 3 -1. + <_> + 7 7 6 3 2. + <_> + + <_> + 10 6 4 9 -1. + <_> + 12 6 2 9 2. + <_> + + <_> + 5 20 18 3 -1. + <_> + 11 20 6 3 3. + <_> + + <_> + 1 20 18 3 -1. + <_> + 7 20 6 3 3. + <_> + + <_> + 18 1 6 20 -1. + <_> + 21 1 3 10 2. + <_> + 18 11 3 10 2. + <_> + + <_> + 0 1 6 20 -1. + <_> + 0 1 3 10 2. + <_> + 3 11 3 10 2. + <_> + + <_> + 13 3 4 18 -1. + <_> + 15 3 2 9 2. + <_> + 13 12 2 9 2. + <_> + + <_> + 0 2 6 12 -1. + <_> + 0 6 6 4 3. + <_> + + <_> + 12 9 12 6 -1. + <_> + 18 9 6 3 2. + <_> + 12 12 6 3 2. + <_> + + <_> + 7 3 4 18 -1. + <_> + 7 3 2 9 2. + <_> + 9 12 2 9 2. + <_> + + <_> + 14 0 6 9 -1. + <_> + 16 0 2 9 3. + <_> + + <_> + 0 9 12 6 -1. + <_> + 0 9 6 3 2. + <_> + 6 12 6 3 2. + <_> + + <_> + 14 4 8 20 -1. + <_> + 18 4 4 10 2. + <_> + 14 14 4 10 2. + <_> + + <_> + 2 4 8 20 -1. + <_> + 2 4 4 10 2. + <_> + 6 14 4 10 2. + <_> + + <_> + 14 13 9 6 -1. + <_> + 14 15 9 2 3. + <_> + + <_> + 1 13 9 6 -1. + <_> + 1 15 9 2 3. + <_> + + <_> + 3 15 18 3 -1. + <_> + 9 15 6 3 3. + <_> + + <_> + 5 13 9 6 -1. + <_> + 5 15 9 2 3. + <_> + + <_> + 5 0 18 3 -1. + <_> + 5 1 18 1 3. + <_> + + <_> + 8 2 6 7 -1. + <_> + 11 2 3 7 2. + <_> + + <_> + 9 1 9 6 -1. + <_> + 12 1 3 6 3. + <_> + + <_> + 6 1 9 6 -1. + <_> + 9 1 3 6 3. + <_> + + <_> + 5 6 14 6 -1. + <_> + 12 6 7 3 2. + <_> + 5 9 7 3 2. + <_> + + <_> + 8 2 6 13 -1. + <_> + 10 2 2 13 3. + <_> + + <_> + 6 11 12 6 -1. + <_> + 12 11 6 3 2. + <_> + 6 14 6 3 2. + <_> + + <_> + 3 1 18 15 -1. + <_> + 9 1 6 15 3. + <_> + + <_> + 13 0 6 7 -1. + <_> + 13 0 3 7 2. + <_> + + <_> + 3 3 16 6 -1. + <_> + 3 6 16 3 2. + <_> + + <_> + 12 1 3 12 -1. + <_> + 12 7 3 6 2. + <_> + + <_> + 7 7 6 9 -1. + <_> + 9 7 2 9 3. + <_> + + <_> + 13 0 4 24 -1. + <_> + 13 0 2 24 2. + <_> + + <_> + 7 0 4 24 -1. + <_> + 9 0 2 24 2. + <_> + + <_> + 11 9 5 12 -1. + <_> + 11 13 5 4 3. + <_> + + <_> + 7 15 9 6 -1. + <_> + 7 17 9 2 3. + <_> + + <_> + 5 7 18 6 -1. + <_> + 5 9 18 2 3. + <_> + + <_> + 8 9 5 12 -1. + <_> + 8 13 5 4 3. + <_> + + <_> + 4 17 17 6 -1. + <_> + 4 19 17 2 3. + <_> + + <_> + 0 3 18 14 -1. + <_> + 0 3 9 7 2. + <_> + 9 10 9 7 2. + <_> + + <_> + 0 1 24 2 -1. + <_> + 0 2 24 1 2. + <_> + + <_> + 0 15 18 3 -1. + <_> + 0 16 18 1 3. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 3 3 14 12 -1. + <_> + 3 9 14 6 2. + <_> + + <_> + 12 1 3 12 -1. + <_> + 12 7 3 6 2. + <_> + + <_> + 8 0 6 9 -1. + <_> + 10 0 2 9 3. + <_> + + <_> + 10 6 6 10 -1. + <_> + 12 6 2 10 3. + <_> + + <_> + 5 0 6 9 -1. + <_> + 7 0 2 9 3. + <_> + + <_> + 2 0 21 7 -1. + <_> + 9 0 7 7 3. + <_> + + <_> + 6 11 12 5 -1. + <_> + 10 11 4 5 3. + <_> + + <_> + 8 7 9 8 -1. + <_> + 11 7 3 8 3. + <_> + + <_> + 9 6 6 18 -1. + <_> + 9 6 3 9 2. + <_> + 12 15 3 9 2. + <_> + + <_> + 15 14 8 10 -1. + <_> + 19 14 4 5 2. + <_> + 15 19 4 5 2. + <_> + + <_> + 1 14 8 10 -1. + <_> + 1 14 4 5 2. + <_> + 5 19 4 5 2. + <_> + + <_> + 11 0 8 10 -1. + <_> + 15 0 4 5 2. + <_> + 11 5 4 5 2. + <_> + + <_> + 5 0 8 10 -1. + <_> + 5 0 4 5 2. + <_> + 9 5 4 5 2. + <_> + + <_> + 6 1 12 5 -1. + <_> + 6 1 6 5 2. + <_> + + <_> + 1 12 18 2 -1. + <_> + 10 12 9 2 2. + <_> + + <_> + 2 8 20 6 -1. + <_> + 12 8 10 3 2. + <_> + 2 11 10 3 2. + <_> + + <_> + 7 6 9 7 -1. + <_> + 10 6 3 7 3. + <_> + + <_> + 10 5 8 16 -1. + <_> + 14 5 4 8 2. + <_> + 10 13 4 8 2. + <_> + + <_> + 3 9 16 8 -1. + <_> + 3 9 8 4 2. + <_> + 11 13 8 4 2. + <_> + + <_> + 7 8 10 4 -1. + <_> + 7 8 5 4 2. + <_> + + <_> + 7 12 10 8 -1. + <_> + 7 12 5 4 2. + <_> + 12 16 5 4 2. + <_> + + <_> + 9 19 15 4 -1. + <_> + 14 19 5 4 3. + <_> + + <_> + 1 0 18 9 -1. + <_> + 7 0 6 9 3. + <_> + + <_> + 13 4 10 8 -1. + <_> + 18 4 5 4 2. + <_> + 13 8 5 4 2. + <_> + + <_> + 3 16 18 4 -1. + <_> + 9 16 6 4 3. + <_> + + <_> + 8 7 10 12 -1. + <_> + 13 7 5 6 2. + <_> + 8 13 5 6 2. + <_> + + <_> + 6 7 10 12 -1. + <_> + 6 7 5 6 2. + <_> + 11 13 5 6 2. + <_> + + <_> + 4 6 18 7 -1. + <_> + 10 6 6 7 3. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 3 17 18 3 -1. + <_> + 3 18 18 1 3. + <_> + + <_> + 2 4 6 10 -1. + <_> + 4 4 2 10 3. + <_> + + <_> + 16 0 8 24 -1. + <_> + 16 0 4 24 2. + <_> + + <_> + 4 0 8 15 -1. + <_> + 8 0 4 15 2. + <_> + + <_> + 16 0 8 24 -1. + <_> + 16 0 4 24 2. + <_> + + <_> + 1 4 18 9 -1. + <_> + 7 4 6 9 3. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 3 9 18 6 -1. + <_> + 3 9 9 3 2. + <_> + 12 12 9 3 2. + <_> + + <_> + 18 5 6 9 -1. + <_> + 18 8 6 3 3. + <_> + + <_> + 0 5 6 9 -1. + <_> + 0 8 6 3 3. + <_> + + <_> + 4 7 18 4 -1. + <_> + 13 7 9 2 2. + <_> + 4 9 9 2 2. + <_> + + <_> + 2 1 12 20 -1. + <_> + 2 1 6 10 2. + <_> + 8 11 6 10 2. + <_> + + <_> + 17 0 6 23 -1. + <_> + 17 0 3 23 2. + <_> + + <_> + 1 6 2 18 -1. + <_> + 1 15 2 9 2. + <_> + + <_> + 8 8 10 6 -1. + <_> + 8 10 10 2 3. + <_> + + <_> + 0 6 20 6 -1. + <_> + 0 6 10 3 2. + <_> + 10 9 10 3 2. + <_> + + <_> + 11 12 12 5 -1. + <_> + 15 12 4 5 3. + <_> + + <_> + 0 4 3 19 -1. + <_> + 1 4 1 19 3. + <_> + + <_> + 19 1 3 18 -1. + <_> + 20 1 1 18 3. + <_> + + <_> + 2 1 3 18 -1. + <_> + 3 1 1 18 3. + <_> + + <_> + 3 10 18 3 -1. + <_> + 9 10 6 3 3. + <_> + + <_> + 4 4 10 9 -1. + <_> + 9 4 5 9 2. + <_> + + <_> + 7 13 14 7 -1. + <_> + 7 13 7 7 2. + <_> + + <_> + 3 13 14 7 -1. + <_> + 10 13 7 7 2. + <_> + + <_> + 8 15 9 6 -1. + <_> + 11 15 3 6 3. + <_> + + <_> + 4 14 8 10 -1. + <_> + 4 14 4 5 2. + <_> + 8 19 4 5 2. + <_> + + <_> + 10 14 4 10 -1. + <_> + 10 19 4 5 2. + <_> + + <_> + 3 8 5 16 -1. + <_> + 3 16 5 8 2. + <_> + + <_> + 15 10 9 6 -1. + <_> + 15 12 9 2 3. + <_> + + <_> + 0 10 9 6 -1. + <_> + 0 12 9 2 3. + <_> + + <_> + 6 7 12 9 -1. + <_> + 6 10 12 3 3. + <_> + + <_> + 9 10 5 8 -1. + <_> + 9 14 5 4 2. + <_> + + <_> + 12 1 3 12 -1. + <_> + 12 7 3 6 2. + <_> + + <_> + 8 15 6 9 -1. + <_> + 10 15 2 9 3. + <_> + + <_> + 16 6 7 6 -1. + <_> + 16 9 7 3 2. + <_> + + <_> + 8 1 4 22 -1. + <_> + 10 1 2 22 2. + <_> + + <_> + 6 6 14 3 -1. + <_> + 6 6 7 3 2. + <_> + + <_> + 0 18 19 3 -1. + <_> + 0 19 19 1 3. + <_> + + <_> + 17 0 6 24 -1. + <_> + 17 0 3 24 2. + <_> + + <_> + 0 13 15 6 -1. + <_> + 5 13 5 6 3. + <_> + + <_> + 9 6 10 14 -1. + <_> + 14 6 5 7 2. + <_> + 9 13 5 7 2. + <_> + + <_> + 1 6 8 10 -1. + <_> + 1 6 4 5 2. + <_> + 5 11 4 5 2. + <_> + + <_> + 7 6 12 5 -1. + <_> + 7 6 6 5 2. + <_> + + <_> + 7 7 9 6 -1. + <_> + 10 7 3 6 3. + <_> + + <_> + 7 8 14 14 -1. + <_> + 14 8 7 7 2. + <_> + 7 15 7 7 2. + <_> + + <_> + 3 8 14 14 -1. + <_> + 3 8 7 7 2. + <_> + 10 15 7 7 2. + <_> + + <_> + 9 8 13 4 -1. + <_> + 9 10 13 2 2. + <_> + + <_> + 3 2 6 12 -1. + <_> + 3 2 3 6 2. + <_> + 6 8 3 6 2. + <_> + + <_> + 6 10 17 6 -1. + <_> + 6 13 17 3 2. + <_> + + <_> + 1 10 17 6 -1. + <_> + 1 13 17 3 2. + <_> + + <_> + 16 7 8 9 -1. + <_> + 16 10 8 3 3. + <_> + + <_> + 0 7 8 9 -1. + <_> + 0 10 8 3 3. + <_> + + <_> + 0 9 24 10 -1. + <_> + 12 9 12 5 2. + <_> + 0 14 12 5 2. + <_> + + <_> + 3 2 15 8 -1. + <_> + 8 2 5 8 3. + <_> + + <_> + 4 2 18 8 -1. + <_> + 10 2 6 8 3. + <_> + + <_> + 0 1 18 4 -1. + <_> + 0 1 9 2 2. + <_> + 9 3 9 2 2. + <_> + + <_> + 20 2 3 18 -1. + <_> + 21 2 1 18 3. + <_> + + <_> + 1 3 3 19 -1. + <_> + 2 3 1 19 3. + <_> + + <_> + 18 8 6 16 -1. + <_> + 20 8 2 16 3. + <_> + + <_> + 0 8 6 16 -1. + <_> + 2 8 2 16 3. + <_> + + <_> + 8 18 11 6 -1. + <_> + 8 20 11 2 3. + <_> + + <_> + 4 6 12 5 -1. + <_> + 8 6 4 5 3. + <_> + + <_> + 7 6 12 5 -1. + <_> + 11 6 4 5 3. + <_> + + <_> + 6 3 9 6 -1. + <_> + 9 3 3 6 3. + <_> + + <_> + 7 6 12 5 -1. + <_> + 7 6 6 5 2. + <_> + + <_> + 9 8 6 7 -1. + <_> + 12 8 3 7 2. + <_> + + <_> + 8 2 9 6 -1. + <_> + 11 2 3 6 3. + <_> + + <_> + 8 14 6 9 -1. + <_> + 8 17 6 3 3. + <_> + + <_> + 8 2 9 6 -1. + <_> + 11 2 3 6 3. + <_> + + <_> + 4 3 16 20 -1. + <_> + 4 3 8 10 2. + <_> + 12 13 8 10 2. + <_> + + <_> + 7 6 10 12 -1. + <_> + 12 6 5 6 2. + <_> + 7 12 5 6 2. + <_> + + <_> + 0 2 7 12 -1. + <_> + 0 6 7 4 3. + <_> + + <_> + 12 17 11 6 -1. + <_> + 12 19 11 2 3. + <_> + + <_> + 4 7 12 8 -1. + <_> + 4 7 6 4 2. + <_> + 10 11 6 4 2. + <_> + + <_> + 8 11 8 10 -1. + <_> + 12 11 4 5 2. + <_> + 8 16 4 5 2. + <_> + + <_> + 9 1 4 9 -1. + <_> + 11 1 2 9 2. + <_> + + <_> + 14 0 3 22 -1. + <_> + 15 0 1 22 3. + <_> + + <_> + 7 0 3 22 -1. + <_> + 8 0 1 22 3. + <_> + + <_> + 4 7 18 4 -1. + <_> + 13 7 9 2 2. + <_> + 4 9 9 2 2. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 12 1 3 12 -1. + <_> + 12 7 3 6 2. + <_> + + <_> + 0 0 18 13 -1. + <_> + 9 0 9 13 2. + <_> + + <_> + 16 0 3 24 -1. + <_> + 17 0 1 24 3. + <_> + + <_> + 5 0 3 24 -1. + <_> + 6 0 1 24 3. + <_> + + <_> + 10 15 5 8 -1. + <_> + 10 19 5 4 2. + <_> + + <_> + 2 18 18 2 -1. + <_> + 2 19 18 1 2. + <_> + + <_> + 2 8 20 3 -1. + <_> + 2 9 20 1 3. + <_> + + <_> + 7 6 9 6 -1. + <_> + 7 8 9 2 3. + <_> + + <_> + 3 2 19 10 -1. + <_> + 3 7 19 5 2. + <_> + + <_> + 2 7 19 3 -1. + <_> + 2 8 19 1 3. + <_> + + <_> + 15 6 9 4 -1. + <_> + 15 8 9 2 2. + <_> + + <_> + 2 2 18 8 -1. + <_> + 8 2 6 8 3. + <_> + + <_> + 10 9 14 4 -1. + <_> + 10 9 7 4 2. + <_> + + <_> + 4 4 6 16 -1. + <_> + 7 4 3 16 2. + <_> + + <_> + 15 8 9 16 -1. + <_> + 18 8 3 16 3. + <_> + + <_> + 0 8 9 16 -1. + <_> + 3 8 3 16 3. + <_> + + <_> + 18 0 6 14 -1. + <_> + 20 0 2 14 3. + <_> + + <_> + 0 0 6 14 -1. + <_> + 2 0 2 14 3. + <_> + + <_> + 15 0 6 22 -1. + <_> + 17 0 2 22 3. + <_> + + <_> + 3 0 6 22 -1. + <_> + 5 0 2 22 3. + <_> + + <_> + 12 2 12 20 -1. + <_> + 16 2 4 20 3. + <_> + + <_> + 0 2 12 20 -1. + <_> + 4 2 4 20 3. + <_> + + <_> + 11 6 4 9 -1. + <_> + 11 6 2 9 2. + <_> + + <_> + 9 0 6 16 -1. + <_> + 12 0 3 16 2. + <_> + + <_> + 12 1 3 12 -1. + <_> + 12 7 3 6 2. + <_> + + <_> + 3 4 18 6 -1. + <_> + 3 4 9 3 2. + <_> + 12 7 9 3 2. + <_> + + <_> + 5 5 16 8 -1. + <_> + 13 5 8 4 2. + <_> + 5 9 8 4 2. + <_> + + <_> + 0 13 10 6 -1. + <_> + 0 15 10 2 3. + <_> + + <_> + 8 14 9 6 -1. + <_> + 8 16 9 2 3. + <_> + + <_> + 6 2 9 6 -1. + <_> + 9 2 3 6 3. + <_> + + <_> + 14 1 10 8 -1. + <_> + 19 1 5 4 2. + <_> + 14 5 5 4 2. + <_> + + <_> + 9 1 3 12 -1. + <_> + 9 7 3 6 2. + <_> + + <_> + 6 4 12 9 -1. + <_> + 6 7 12 3 3. + <_> + + <_> + 6 5 12 6 -1. + <_> + 10 5 4 6 3. + <_> + + <_> + 1 1 8 5 -1. + <_> + 5 1 4 5 2. + <_> + + <_> + 12 12 6 8 -1. + <_> + 12 16 6 4 2. + <_> + + <_> + 3 12 12 6 -1. + <_> + 3 14 12 2 3. + <_> + + <_> + 9 18 12 6 -1. + <_> + 15 18 6 3 2. + <_> + 9 21 6 3 2. + <_> + + <_> + 4 13 6 6 -1. + <_> + 4 16 6 3 2. + <_> + + <_> + 11 3 7 18 -1. + <_> + 11 12 7 9 2. + <_> + + <_> + 3 9 18 3 -1. + <_> + 9 9 6 3 3. + <_> + + <_> + 5 3 19 2 -1. + <_> + 5 4 19 1 2. + <_> + + <_> + 4 2 12 6 -1. + <_> + 4 2 6 3 2. + <_> + 10 5 6 3 2. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 8 6 6 9 -1. + <_> + 10 6 2 9 3. + <_> + + <_> + 16 9 5 15 -1. + <_> + 16 14 5 5 3. + <_> + + <_> + 3 9 5 15 -1. + <_> + 3 14 5 5 3. + <_> + + <_> + 6 6 14 6 -1. + <_> + 13 6 7 3 2. + <_> + 6 9 7 3 2. + <_> + + <_> + 8 6 3 14 -1. + <_> + 8 13 3 7 2. + <_> + + <_> + 0 16 24 5 -1. + <_> + 8 16 8 5 3. + <_> + + <_> + 0 20 20 3 -1. + <_> + 10 20 10 3 2. + <_> + + <_> + 5 10 18 2 -1. + <_> + 5 11 18 1 2. + <_> + + <_> + 0 6 6 10 -1. + <_> + 2 6 2 10 3. + <_> + + <_> + 2 1 20 3 -1. + <_> + 2 2 20 1 3. + <_> + + <_> + 9 13 6 11 -1. + <_> + 11 13 2 11 3. + <_> + + <_> + 9 15 6 8 -1. + <_> + 9 19 6 4 2. + <_> + + <_> + 9 12 6 9 -1. + <_> + 9 15 6 3 3. + <_> + + <_> + 5 11 18 2 -1. + <_> + 5 12 18 1 2. + <_> + + <_> + 2 6 15 6 -1. + <_> + 2 8 15 2 3. + <_> + + <_> + 6 0 18 3 -1. + <_> + 6 1 18 1 3. + <_> + + <_> + 5 0 3 18 -1. + <_> + 6 0 1 18 3. + <_> + + <_> + 18 3 6 10 -1. + <_> + 20 3 2 10 3. + <_> + + <_> + 0 3 6 10 -1. + <_> + 2 3 2 10 3. + <_> + + <_> + 10 5 8 9 -1. + <_> + 10 5 4 9 2. + <_> + + <_> + 6 5 8 9 -1. + <_> + 10 5 4 9 2. + <_> + + <_> + 3 2 20 3 -1. + <_> + 3 3 20 1 3. + <_> + + <_> + 5 2 13 4 -1. + <_> + 5 4 13 2 2. + <_> + + <_> + 17 0 7 14 -1. + <_> + 17 7 7 7 2. + <_> + + <_> + 0 0 7 14 -1. + <_> + 0 7 7 7 2. + <_> + + <_> + 9 11 10 6 -1. + <_> + 9 11 5 6 2. + <_> + + <_> + 5 11 10 6 -1. + <_> + 10 11 5 6 2. + <_> + + <_> + 11 6 3 18 -1. + <_> + 11 12 3 6 3. + <_> + + <_> + 0 16 18 3 -1. + <_> + 0 17 18 1 3. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 4 6 9 10 -1. + <_> + 4 11 9 5 2. + <_> + + <_> + 9 7 15 4 -1. + <_> + 9 9 15 2 2. + <_> + + <_> + 5 6 12 6 -1. + <_> + 5 6 6 3 2. + <_> + 11 9 6 3 2. + <_> + + <_> + 6 1 12 9 -1. + <_> + 6 4 12 3 3. + <_> + + <_> + 7 9 6 12 -1. + <_> + 7 9 3 6 2. + <_> + 10 15 3 6 2. + <_> + + <_> + 11 5 13 6 -1. + <_> + 11 7 13 2 3. + <_> + + <_> + 1 11 22 13 -1. + <_> + 12 11 11 13 2. + <_> + + <_> + 18 8 6 6 -1. + <_> + 18 11 6 3 2. + <_> + + <_> + 0 8 6 6 -1. + <_> + 0 11 6 3 2. + <_> + + <_> + 0 6 24 3 -1. + <_> + 0 7 24 1 3. + <_> + + <_> + 0 5 10 6 -1. + <_> + 0 7 10 2 3. + <_> + + <_> + 6 7 18 3 -1. + <_> + 6 8 18 1 3. + <_> + + <_> + 0 0 10 6 -1. + <_> + 0 2 10 2 3. + <_> + + <_> + 19 0 3 19 -1. + <_> + 20 0 1 19 3. + <_> + + <_> + 4 6 12 16 -1. + <_> + 4 6 6 8 2. + <_> + 10 14 6 8 2. + <_> + + <_> + 19 6 4 18 -1. + <_> + 21 6 2 9 2. + <_> + 19 15 2 9 2. + <_> + + <_> + 1 6 4 18 -1. + <_> + 1 6 2 9 2. + <_> + 3 15 2 9 2. + <_> + + <_> + 3 21 18 3 -1. + <_> + 3 22 18 1 3. + <_> + + <_> + 0 19 9 4 -1. + <_> + 0 21 9 2 2. + <_> + + <_> + 12 18 12 6 -1. + <_> + 18 18 6 3 2. + <_> + 12 21 6 3 2. + <_> + + <_> + 7 18 9 4 -1. + <_> + 7 20 9 2 2. + <_> + + <_> + 12 16 10 8 -1. + <_> + 17 16 5 4 2. + <_> + 12 20 5 4 2. + <_> + + <_> + 2 16 10 8 -1. + <_> + 2 16 5 4 2. + <_> + 7 20 5 4 2. + <_> + + <_> + 14 0 10 12 -1. + <_> + 19 0 5 6 2. + <_> + 14 6 5 6 2. + <_> + + <_> + 0 0 10 12 -1. + <_> + 0 0 5 6 2. + <_> + 5 6 5 6 2. + <_> + + <_> + 15 14 9 6 -1. + <_> + 15 16 9 2 3. + <_> + + <_> + 0 14 9 6 -1. + <_> + 0 16 9 2 3. + <_> + + <_> + 14 14 10 6 -1. + <_> + 14 16 10 2 3. + <_> + + <_> + 0 14 10 6 -1. + <_> + 0 16 10 2 3. + <_> + + <_> + 5 18 18 2 -1. + <_> + 5 19 18 1 2. + <_> + + <_> + 0 18 18 3 -1. + <_> + 0 19 18 1 3. + <_> + + <_> + 3 5 18 12 -1. + <_> + 12 5 9 6 2. + <_> + 3 11 9 6 2. + <_> + + <_> + 5 3 7 9 -1. + <_> + 5 6 7 3 3. + <_> + + <_> + 4 0 19 15 -1. + <_> + 4 5 19 5 3. + <_> + + <_> + 3 0 16 4 -1. + <_> + 3 2 16 2 2. + <_> + + <_> + 4 12 16 12 -1. + <_> + 4 12 8 12 2. + <_> + + <_> + 4 3 12 15 -1. + <_> + 10 3 6 15 2. + <_> + + <_> + 16 4 2 19 -1. + <_> + 16 4 1 19 2. + <_> + + <_> + 6 4 2 19 -1. + <_> + 7 4 1 19 2. + <_> + + <_> + 13 14 8 10 -1. + <_> + 17 14 4 5 2. + <_> + 13 19 4 5 2. + <_> + + <_> + 3 14 8 10 -1. + <_> + 3 14 4 5 2. + <_> + 7 19 4 5 2. + <_> + + <_> + 12 6 3 18 -1. + <_> + 12 12 3 6 3. + <_> + + <_> + 5 11 12 6 -1. + <_> + 5 11 6 3 2. + <_> + 11 14 6 3 2. + <_> + + <_> + 10 5 8 10 -1. + <_> + 14 5 4 5 2. + <_> + 10 10 4 5 2. + <_> + + <_> + 6 4 12 10 -1. + <_> + 6 4 6 5 2. + <_> + 12 9 6 5 2. + <_> + + <_> + 6 8 18 10 -1. + <_> + 15 8 9 5 2. + <_> + 6 13 9 5 2. + <_> + + <_> + 0 8 18 10 -1. + <_> + 0 8 9 5 2. + <_> + 9 13 9 5 2. + <_> + + <_> + 12 6 3 18 -1. + <_> + 12 12 3 6 3. + <_> + + <_> + 0 14 18 3 -1. + <_> + 0 15 18 1 3. + <_> + + <_> + 12 6 3 18 -1. + <_> + 12 12 3 6 3. + <_> + + <_> + 9 6 3 18 -1. + <_> + 9 12 3 6 3. + <_> + + <_> + 6 14 18 3 -1. + <_> + 6 15 18 1 3. + <_> + + <_> + 0 5 18 3 -1. + <_> + 0 6 18 1 3. + <_> + + <_> + 2 5 22 3 -1. + <_> + 2 6 22 1 3. + <_> + + <_> + 0 0 21 10 -1. + <_> + 7 0 7 10 3. + <_> + + <_> + 6 3 18 17 -1. + <_> + 12 3 6 17 3. + <_> + + <_> + 0 3 18 17 -1. + <_> + 6 3 6 17 3. + <_> + + <_> + 0 12 24 11 -1. + <_> + 8 12 8 11 3. + <_> + + <_> + 4 10 16 6 -1. + <_> + 4 13 16 3 2. + <_> + + <_> + 12 8 6 8 -1. + <_> + 12 12 6 4 2. + <_> + + <_> + 6 14 8 7 -1. + <_> + 10 14 4 7 2. + <_> + + <_> + 15 10 6 14 -1. + <_> + 18 10 3 7 2. + <_> + 15 17 3 7 2. + <_> + + <_> + 3 10 6 14 -1. + <_> + 3 10 3 7 2. + <_> + 6 17 3 7 2. + <_> + + <_> + 6 12 18 2 -1. + <_> + 6 13 18 1 2. + <_> + + <_> + 5 8 10 6 -1. + <_> + 5 10 10 2 3. + <_> + + <_> + 12 11 9 4 -1. + <_> + 12 13 9 2 2. + <_> + + <_> + 0 11 9 6 -1. + <_> + 0 13 9 2 3. + <_> + + <_> + 11 2 3 18 -1. + <_> + 12 2 1 18 3. + <_> + + <_> + 10 2 3 18 -1. + <_> + 11 2 1 18 3. + <_> + + <_> + 9 12 6 10 -1. + <_> + 11 12 2 10 3. + <_> + + <_> + 1 10 6 9 -1. + <_> + 1 13 6 3 3. + <_> + + <_> + 6 9 16 6 -1. + <_> + 14 9 8 3 2. + <_> + 6 12 8 3 2. + <_> + + <_> + 1 8 9 6 -1. + <_> + 1 10 9 2 3. + <_> + + <_> + 7 7 16 6 -1. + <_> + 7 9 16 2 3. + <_> + + <_> + 0 0 18 3 -1. + <_> + 0 1 18 1 3. + <_> + + <_> + 10 0 6 9 -1. + <_> + 12 0 2 9 3. + <_> + + <_> + 9 5 6 6 -1. + <_> + 12 5 3 6 2. + <_> + + <_> + 10 6 4 18 -1. + <_> + 12 6 2 9 2. + <_> + 10 15 2 9 2. + <_> + + <_> + 8 0 6 9 -1. + <_> + 10 0 2 9 3. + <_> + + <_> + 9 1 6 9 -1. + <_> + 9 4 6 3 3. + <_> + + <_> + 1 0 18 9 -1. + <_> + 1 3 18 3 3. + <_> + + <_> + 0 3 24 3 -1. + <_> + 0 4 24 1 3. + <_> + + <_> + 6 14 9 4 -1. + <_> + 6 16 9 2 2. + <_> + + <_> + 8 9 8 10 -1. + <_> + 12 9 4 5 2. + <_> + 8 14 4 5 2. + <_> + + <_> + 5 2 13 9 -1. + <_> + 5 5 13 3 3. + <_> + + <_> + 4 4 16 9 -1. + <_> + 4 7 16 3 3. + <_> + + <_> + 4 4 14 9 -1. + <_> + 4 7 14 3 3. + <_> + + <_> + 8 5 9 6 -1. + <_> + 8 7 9 2 3. + <_> + + <_> + 1 7 16 6 -1. + <_> + 1 9 16 2 3. + <_> + + <_> + 10 5 13 9 -1. + <_> + 10 8 13 3 3. + <_> + + <_> + 1 5 13 9 -1. + <_> + 1 8 13 3 3. + <_> + + <_> + 0 4 24 6 -1. + <_> + 12 4 12 3 2. + <_> + 0 7 12 3 2. + <_> + + <_> + 1 14 10 9 -1. + <_> + 1 17 10 3 3. + <_> + + <_> + 5 17 18 3 -1. + <_> + 5 18 18 1 3. + <_> + + <_> + 0 16 18 3 -1. + <_> + 0 17 18 1 3. + <_> + + <_> + 9 17 9 6 -1. + <_> + 9 19 9 2 3. + <_> + + <_> + 1 20 22 4 -1. + <_> + 1 20 11 2 2. + <_> + 12 22 11 2 2. + <_> + + <_> + 8 14 8 6 -1. + <_> + 8 17 8 3 2. + <_> + + <_> + 8 6 8 15 -1. + <_> + 8 11 8 5 3. + <_> + + <_> + 5 4 18 3 -1. + <_> + 5 5 18 1 3. + <_> + + <_> + 9 3 5 10 -1. + <_> + 9 8 5 5 2. + <_> + + <_> + 6 8 12 3 -1. + <_> + 6 8 6 3 2. + <_> + + <_> + 2 6 18 6 -1. + <_> + 2 6 9 3 2. + <_> + 11 9 9 3 2. + <_> + + <_> + 10 6 4 18 -1. + <_> + 12 6 2 9 2. + <_> + 10 15 2 9 2. + <_> + + <_> + 7 5 6 6 -1. + <_> + 10 5 3 6 2. + <_> + + <_> + 14 5 2 18 -1. + <_> + 14 14 2 9 2. + <_> + + <_> + 8 5 2 18 -1. + <_> + 8 14 2 9 2. + <_> + + <_> + 9 2 10 6 -1. + <_> + 9 2 5 6 2. + <_> + + <_> + 3 1 18 12 -1. + <_> + 12 1 9 12 2. + <_> + + <_> + 5 2 17 22 -1. + <_> + 5 13 17 11 2. + <_> + + <_> + 4 0 12 6 -1. + <_> + 4 2 12 2 3. + <_> + + <_> + 6 9 16 6 -1. + <_> + 14 9 8 3 2. + <_> + 6 12 8 3 2. + <_> + + <_> + 9 0 5 18 -1. + <_> + 9 9 5 9 2. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 9 1 6 12 -1. + <_> + 11 1 2 12 3. + <_> + + <_> + 5 9 13 4 -1. + <_> + 5 11 13 2 2. + <_> + + <_> + 5 8 19 3 -1. + <_> + 5 9 19 1 3. + <_> + + <_> + 9 9 6 8 -1. + <_> + 9 13 6 4 2. + <_> + + <_> + 11 9 4 15 -1. + <_> + 11 14 4 5 3. + <_> + + <_> + 2 0 6 14 -1. + <_> + 2 0 3 7 2. + <_> + 5 7 3 7 2. + <_> + + <_> + 15 1 6 14 -1. + <_> + 18 1 3 7 2. + <_> + 15 8 3 7 2. + <_> + + <_> + 3 1 6 14 -1. + <_> + 3 1 3 7 2. + <_> + 6 8 3 7 2. + <_> + + <_> + 3 20 18 4 -1. + <_> + 12 20 9 2 2. + <_> + 3 22 9 2 2. + <_> + + <_> + 5 0 4 20 -1. + <_> + 5 0 2 10 2. + <_> + 7 10 2 10 2. + <_> + + <_> + 16 8 8 12 -1. + <_> + 20 8 4 6 2. + <_> + 16 14 4 6 2. + <_> + + <_> + 0 8 8 12 -1. + <_> + 0 8 4 6 2. + <_> + 4 14 4 6 2. + <_> + + <_> + 13 13 10 8 -1. + <_> + 18 13 5 4 2. + <_> + 13 17 5 4 2. + <_> + + <_> + 1 13 10 8 -1. + <_> + 1 13 5 4 2. + <_> + 6 17 5 4 2. + <_> + + <_> + 15 8 4 15 -1. + <_> + 15 13 4 5 3. + <_> + + <_> + 5 8 4 15 -1. + <_> + 5 13 4 5 3. + <_> + + <_> + 6 11 16 12 -1. + <_> + 6 15 16 4 3. + <_> + + <_> + 2 11 16 12 -1. + <_> + 2 15 16 4 3. + <_> + + <_> + 14 12 7 9 -1. + <_> + 14 15 7 3 3. + <_> + + <_> + 10 1 3 21 -1. + <_> + 10 8 3 7 3. + <_> + + <_> + 13 11 9 4 -1. + <_> + 13 13 9 2 2. + <_> + + <_> + 3 10 17 9 -1. + <_> + 3 13 17 3 3. + <_> + + <_> + 13 8 8 15 -1. + <_> + 13 13 8 5 3. + <_> + + <_> + 3 8 8 15 -1. + <_> + 3 13 8 5 3. + <_> + + <_> + 11 14 10 8 -1. + <_> + 16 14 5 4 2. + <_> + 11 18 5 4 2. + <_> + + <_> + 0 18 22 6 -1. + <_> + 0 18 11 3 2. + <_> + 11 21 11 3 2. + <_> + + <_> + 0 16 24 4 -1. + <_> + 0 16 12 4 2. + <_> + + <_> + 6 20 12 3 -1. + <_> + 12 20 6 3 2. + <_> + + <_> + 18 12 6 12 -1. + <_> + 21 12 3 6 2. + <_> + 18 18 3 6 2. + <_> + + <_> + 0 12 6 12 -1. + <_> + 0 12 3 6 2. + <_> + 3 18 3 6 2. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 1 6 22 10 -1. + <_> + 1 6 11 5 2. + <_> + 12 11 11 5 2. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 0 18 18 2 -1. + <_> + 0 19 18 1 2. + <_> + + <_> + 3 15 19 3 -1. + <_> + 3 16 19 1 3. + <_> + + <_> + 0 13 18 3 -1. + <_> + 0 14 18 1 3. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 0 17 9 6 -1. + <_> + 0 19 9 2 3. + <_> + + <_> + 12 17 9 6 -1. + <_> + 12 19 9 2 3. + <_> + + <_> + 3 17 9 6 -1. + <_> + 3 19 9 2 3. + <_> + + <_> + 16 2 3 20 -1. + <_> + 17 2 1 20 3. + <_> + + <_> + 0 13 24 8 -1. + <_> + 0 17 24 4 2. + <_> + + <_> + 9 1 6 22 -1. + <_> + 12 1 3 11 2. + <_> + 9 12 3 11 2. + diff --git a/setup_model_weights.py b/setup_model_weights.py new file mode 100644 index 0000000000000000000000000000000000000000..a101749c1356d9e60888627de726f56f07259ef9 --- /dev/null +++ b/setup_model_weights.py @@ -0,0 +1,13 @@ +import os +import gdown + +os.makedirs('./saved_models/u2net', exist_ok=True) +os.makedirs('./saved_models/u2net_portrait', exist_ok=True) + +gdown.download('https://drive.google.com/uc?id=1ao1ovG1Qtx4b7EoskHXmi2E9rp5CHLcZ', + './saved_models/u2net/u2net.pth', + quiet=False) + +gdown.download('https://drive.google.com/uc?id=1IG3HdpcRiDoWNookbncQjeaPN28t90yW', + './saved_models/u2net_portrait/u2net_portrait.pth', + quiet=False) diff --git a/test.py b/test.py new file mode 100644 index 0000000000000000000000000000000000000000..b745cff90afb37bd25557ecf50f5fd9984c8881f --- /dev/null +++ b/test.py @@ -0,0 +1,126 @@ +import time +from collections import OrderedDict +from options.test_options import TestOptions +from data.data_loader import CreateDataLoader +from models.models import create_model +import util.util as util +import os +import numpy as np +import torch +from torch.autograd import Variable +#from tensorboardX import SummaryWriter +import cv2 +#writer = SummaryWriter('runs/G1G2') +SIZE = 320 +NC = 14 + + +def generate_label_plain(inputs): + size = inputs.size() + pred_batch = [] + for input in inputs: + input = input.view(1, NC, 256, 192) + pred = np.squeeze(input.data.max(1)[1].cpu().numpy(), axis=0) + pred_batch.append(pred) + + pred_batch = np.array(pred_batch) + pred_batch = torch.from_numpy(pred_batch) + label_batch = pred_batch.view(size[0], 1, 256, 192) + + return label_batch + + +def generate_label_color(inputs): + label_batch = [] + for i in range(len(inputs)): + label_batch.append(util.tensor2label(inputs[i], NC)) + label_batch = np.array(label_batch) + label_batch = label_batch * 2 - 1 + input_label = torch.from_numpy(label_batch) + + return input_label + + +def complete_compose(img, mask, label): + label = label.cpu().numpy() + M_f = label > 0 + M_f = M_f.astype(np.int) + M_f = torch.FloatTensor(M_f).cuda() + masked_img = img*(1-mask) + M_c = (1-mask.cuda())*M_f + M_c = M_c+torch.zeros(img.shape).cuda() # broadcasting + return masked_img, M_c, M_f + + +def compose(label, mask, color_mask, edge, color, noise): + masked_label = label*(1-mask) + masked_edge = mask*edge + masked_color_strokes = mask*(1-color_mask)*color + masked_noise = mask*noise + return masked_label, masked_edge, masked_color_strokes, masked_noise + + +def changearm(old_label): + label = old_label + arm1 = torch.FloatTensor((old_label.cpu().numpy() == 11).astype(np.int)) + arm2 = torch.FloatTensor((old_label.cpu().numpy() == 13).astype(np.int)) + noise = torch.FloatTensor((old_label.cpu().numpy() == 7).astype(np.int)) + label = label*(1-arm1)+arm1*4 + label = label*(1-arm2)+arm2*4 + label = label*(1-noise)+noise*4 + return label + + +def main(): + os.makedirs('sample', exist_ok=True) + opt = TestOptions().parse() + + data_loader = CreateDataLoader(opt) + dataset = data_loader.load_data() + dataset_size = len(data_loader) + print('# Inference images = %d' % dataset_size) + + model = create_model(opt) + + for i, data in enumerate(dataset): + + # add gaussian noise channel + # wash the label + t_mask = torch.FloatTensor( + (data['label'].cpu().numpy() == 7).astype(np.float)) + # + # data['label'] = data['label'] * (1 - t_mask) + t_mask * 4 + mask_clothes = torch.FloatTensor( + (data['label'].cpu().numpy() == 4).astype(np.int)) + mask_fore = torch.FloatTensor( + (data['label'].cpu().numpy() > 0).astype(np.int)) + img_fore = data['image'] * mask_fore + img_fore_wc = img_fore * mask_fore + all_clothes_label = changearm(data['label']) + + ############## Forward Pass ###################### + fake_image, warped_cloth, refined_cloth = model(Variable(data['label'].cuda()), Variable(data['edge'].cuda()), Variable(img_fore.cuda()), Variable( + mask_clothes.cuda()), Variable(data['color'].cuda()), Variable(all_clothes_label.cuda()), Variable(data['image'].cuda()), Variable(data['pose'].cuda()), Variable(data['image'].cuda()), Variable(mask_fore.cuda())) + + # make output folders + output_dir = os.path.join(opt.results_dir, opt.phase) + fake_image_dir = os.path.join(output_dir, 'try-on') + os.makedirs(fake_image_dir, exist_ok=True) + warped_cloth_dir = os.path.join(output_dir, 'warped_cloth') + os.makedirs(warped_cloth_dir, exist_ok=True) + refined_cloth_dir = os.path.join(output_dir, 'refined_cloth') + os.makedirs(refined_cloth_dir, exist_ok=True) + + # save output + for j in range(opt.batchSize): + print("Saving", data['name'][j]) + util.save_tensor_as_image(fake_image[j], + os.path.join(fake_image_dir, data['name'][j])) + util.save_tensor_as_image(warped_cloth[j], + os.path.join(warped_cloth_dir, data['name'][j])) + util.save_tensor_as_image(refined_cloth[j], + os.path.join(refined_cloth_dir, data['name'][j])) + + +if __name__ == '__main__': + main() diff --git a/tps_grid_gen.py b/tps_grid_gen.py new file mode 100644 index 0000000000000000000000000000000000000000..3d96b20e8482de603696231ac172155e2d6b442a --- /dev/null +++ b/tps_grid_gen.py @@ -0,0 +1,83 @@ +# encoding: utf-8 + +import torch +import itertools +import torch.nn as nn +from torch.autograd import Function, Variable + +# phi(x1, x2) = r^2 * log(r), where r = ||x1 - x2||_2 + + +def compute_partial_repr(input_points, control_points): + N = input_points.size(0) + M = control_points.size(0) + pairwise_diff = input_points.view(N, 1, 2) - control_points.view(1, M, 2) + # original implementation, very slow + # pairwise_dist = torch.sum(pairwise_diff ** 2, dim = 2) # square of distance + pairwise_diff_square = pairwise_diff * pairwise_diff + pairwise_dist = pairwise_diff_square[:, + :, 0] + pairwise_diff_square[:, :, 1] + repr_matrix = 0.5 * pairwise_dist * torch.log(pairwise_dist) + # fix numerical error for 0 * log(0), substitute all nan with 0 + mask = repr_matrix != repr_matrix + repr_matrix.masked_fill_(mask, 0) + return repr_matrix + + +class TPSGridGen(nn.Module): + + def __init__(self, target_height, target_width, target_control_points): + super(TPSGridGen, self).__init__() + assert target_control_points.ndimension() == 2 + assert target_control_points.size(1) == 2 + N = target_control_points.size(0) + self.num_points = N + target_control_points = target_control_points.float() + + # create padded kernel matrix + forward_kernel = torch.zeros(N + 3, N + 3) + target_control_partial_repr = compute_partial_repr( + target_control_points, target_control_points) + forward_kernel[:N, :N].copy_(target_control_partial_repr) + forward_kernel[:N, -3].fill_(1) + forward_kernel[-3, :N].fill_(1) + forward_kernel[:N, -2:].copy_(target_control_points) + forward_kernel[-2:, :N].copy_(target_control_points.transpose(0, 1)) + # compute inverse matrix + inverse_kernel = torch.inverse(forward_kernel) + + # create target cordinate matrix + HW = target_height * target_width + target_coordinate = list(itertools.product( + range(target_height), range(target_width))) + # print(target_coordinate) + target_coordinate = torch.Tensor(target_coordinate) # HW x 2 + Y, X = target_coordinate.split(1, dim=1) + Y = Y * 2 / (target_height - 1) - 1 + X = X * 2 / (target_width - 1) - 1 + # convert from (y, x) to (x, y) + target_coordinate = torch.cat([X, Y], dim=1) + target_coordinate_partial_repr = compute_partial_repr( + target_coordinate, target_control_points) + target_coordinate_repr = torch.cat([ + target_coordinate_partial_repr, torch.ones( + HW, 1), target_coordinate + ], dim=1) + + # register precomputed matrices + self.register_buffer('inverse_kernel', inverse_kernel) + self.register_buffer('padding_matrix', torch.zeros(3, 2)) + self.register_buffer('target_coordinate_repr', target_coordinate_repr) + + def forward(self, source_control_points): + assert source_control_points.ndimension() == 3 + assert source_control_points.size(1) == self.num_points + assert source_control_points.size(2) == 2 + batch_size = source_control_points.size(0) + + Y = torch.cat([source_control_points, Variable( + self.padding_matrix.expand(batch_size, 3, 2))], 1) + mapping_matrix = torch.matmul(Variable(self.inverse_kernel), Y) + source_coordinate = torch.matmul( + Variable(self.target_coordinate_repr), mapping_matrix) + return source_coordinate diff --git a/u2net_human_seg_test.py b/u2net_human_seg_test.py new file mode 100644 index 0000000000000000000000000000000000000000..ade5fc040e6ea18663659e1818723c436ca48d5e --- /dev/null +++ b/u2net_human_seg_test.py @@ -0,0 +1,117 @@ +import os +from skimage import io, transform +import torch +import torchvision +from torch.autograd import Variable +import torch.nn as nn +import torch.nn.functional as F +from torch.utils.data import Dataset, DataLoader +from torchvision import transforms#, utils +# import torch.optim as optim + +import numpy as np +from PIL import Image +import glob + +from data_loader import RescaleT +from data_loader import ToTensor +from data_loader import ToTensorLab +from data_loader import SalObjDataset + +from model import U2NET # full size version 173.6 MB + +# normalize the predicted SOD probability map +def normPRED(d): + ma = torch.max(d) + mi = torch.min(d) + + dn = (d-mi)/(ma-mi) + + return dn + +def save_output(image_name,pred,d_dir): + + predict = pred + predict = predict.squeeze() + predict_np = predict.cpu().data.numpy() + + im = Image.fromarray(predict_np*255).convert('RGB') + img_name = image_name.split(os.sep)[-1] + image = io.imread(image_name) + imo = im.resize((image.shape[1],image.shape[0]),resample=Image.BILINEAR) + + pb_np = np.array(imo) + + aaa = img_name.split(".") + bbb = aaa[0:-1] + imidx = bbb[0] + for i in range(1,len(bbb)): + imidx = imidx + "." + bbb[i] + + imo.save(d_dir+imidx+'.png') + +def main(): + + # --------- 1. get image path and name --------- + model_name='u2net' + + + image_dir = os.path.join(os.getcwd(), 'test_data', 'test_human_images') + prediction_dir = os.path.join(os.getcwd(), 'test_data', 'test_human_images' + '_results' + os.sep) + model_dir = os.path.join(os.getcwd(), 'saved_models', model_name+'_human_seg', model_name + '_human_seg.pth') + + img_name_list = glob.glob(image_dir + os.sep + '*') + print(img_name_list) + + # --------- 2. dataloader --------- + #1. dataloader + test_salobj_dataset = SalObjDataset(img_name_list = img_name_list, + lbl_name_list = [], + transform=transforms.Compose([RescaleT(320), + ToTensorLab(flag=0)]) + ) + test_salobj_dataloader = DataLoader(test_salobj_dataset, + batch_size=1, + shuffle=False, + num_workers=1) + + # --------- 3. model define --------- + if(model_name=='u2net'): + print("...load U2NET---173.6 MB") + net = U2NET(3,1) + + if torch.cuda.is_available(): + net.load_state_dict(torch.load(model_dir)) + net.cuda() + else: + net.load_state_dict(torch.load(model_dir, map_location='cpu')) + net.eval() + + # --------- 4. inference for each image --------- + for i_test, data_test in enumerate(test_salobj_dataloader): + + print("inferencing:",img_name_list[i_test].split(os.sep)[-1]) + + inputs_test = data_test['image'] + inputs_test = inputs_test.type(torch.FloatTensor) + + if torch.cuda.is_available(): + inputs_test = Variable(inputs_test.cuda()) + else: + inputs_test = Variable(inputs_test) + + d1,d2,d3,d4,d5,d6,d7= net(inputs_test) + + # normalization + pred = d1[:,0,:,:] + pred = normPRED(pred) + + # save results to test_results folder + if not os.path.exists(prediction_dir): + os.makedirs(prediction_dir, exist_ok=True) + save_output(img_name_list[i_test],pred,prediction_dir) + + del d1,d2,d3,d4,d5,d6,d7 + +if __name__ == "__main__": + main() diff --git a/u2net_load.py b/u2net_load.py new file mode 100644 index 0000000000000000000000000000000000000000..d57beaabd36e0762ce2aa730a657d871e0cda8c4 --- /dev/null +++ b/u2net_load.py @@ -0,0 +1,24 @@ +import os +import torch +from model import U2NET # full size version 173.6 MB +from model import U2NETP # small version u2net 4.7 MB + + +def model(model_name='u2net'): + + + model_dir = os.path.join(os.getcwd(), 'saved_models', model_name, model_name + '.pth') + + if(model_name=='u2net'): + print("...load U2NET---173.6 MB") + net = U2NET(3,1) + elif(model_name=='u2netp'): + print("...load U2NEP---4.7 MB") + net = U2NETP(3,1) + net.load_state_dict(torch.load(model_dir)) + + if torch.cuda.is_available(): + net.cuda() + net.eval() + + return net diff --git a/u2net_portrait_demo.py b/u2net_portrait_demo.py new file mode 100644 index 0000000000000000000000000000000000000000..516272a61d6533b8ebf8e466dfa3bda2d9c4e9a3 --- /dev/null +++ b/u2net_portrait_demo.py @@ -0,0 +1,175 @@ +import cv2 +import torch +from model import U2NET +from torch.autograd import Variable +import numpy as np +from glob import glob +import os + +def detect_single_face(face_cascade,img): + # Convert into grayscale + gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) + + # Detect faces + faces = face_cascade.detectMultiScale(gray, 1.1, 4) + if(len(faces)==0): + print("Warming: no face detection, the portrait u2net will run on the whole image!") + return None + + # filter to keep the largest face + wh = 0 + idx = 0 + for i in range(0,len(faces)): + (x,y,w,h) = faces[i] + if(whwidth): + r = right-width + right = width + + tpad = int(float(h)*0.6) + top = y - tpad + if(top<0): + t = tpad-y + top = 0 + + bpad = int(float(h)*0.2) + bottom = y+h+bpad + if(bottom>height): + b = bottom-height + bottom = height + + + im_face = img[top:bottom,left:right] + if(len(im_face.shape)==2): + im_face = np.repeat(im_face[:,:,np.newaxis],(1,1,3)) + + im_face = np.pad(im_face,((t,b),(l,r),(0,0)),mode='constant',constant_values=((255,255),(255,255),(255,255))) + + # pad to achieve image with square shape for avoding face deformation after resizing + hf,wf = im_face.shape[0:2] + if(hf-2>wf): + wfp = int((hf-wf)/2) + im_face = np.pad(im_face,((0,0),(wfp,wfp),(0,0)),mode='constant',constant_values=((255,255),(255,255),(255,255))) + elif(wf-2>hf): + hfp = int((wf-hf)/2) + im_face = np.pad(im_face,((hfp,hfp),(0,0),(0,0)),mode='constant',constant_values=((255,255),(255,255),(255,255))) + + # resize to have 512x512 resolution + im_face = cv2.resize(im_face, (512,512), interpolation = cv2.INTER_AREA) + + return im_face + +def normPRED(d): + ma = torch.max(d) + mi = torch.min(d) + + dn = (d-mi)/(ma-mi) + + return dn + +def inference(net,input): + + # normalize the input + tmpImg = np.zeros((input.shape[0],input.shape[1],3)) + input = input/np.max(input) + + tmpImg[:,:,0] = (input[:,:,2]-0.406)/0.225 + tmpImg[:,:,1] = (input[:,:,1]-0.456)/0.224 + tmpImg[:,:,2] = (input[:,:,0]-0.485)/0.229 + + # convert BGR to RGB + tmpImg = tmpImg.transpose((2, 0, 1)) + tmpImg = tmpImg[np.newaxis,:,:,:] + tmpImg = torch.from_numpy(tmpImg) + + # convert numpy array to torch tensor + tmpImg = tmpImg.type(torch.FloatTensor) + + if torch.cuda.is_available(): + tmpImg = Variable(tmpImg.cuda()) + else: + tmpImg = Variable(tmpImg) + + # inference + d1,d2,d3,d4,d5,d6,d7= net(tmpImg) + + # normalization + pred = 1.0 - d1[:,0,:,:] + pred = normPRED(pred) + + # convert torch tensor to numpy array + pred = pred.squeeze() + pred = pred.cpu().data.numpy() + + del d1,d2,d3,d4,d5,d6,d7 + + return pred + +def main(): + + # get the image path list for inference + im_list = glob('./test_data/test_portrait_images/your_portrait_im/*') + print("Number of images: ",len(im_list)) + # indicate the output directory + out_dir = './test_data/test_portrait_images/your_portrait_results' + if(not os.path.exists(out_dir)): + os.mkdir(out_dir) + + # Load the cascade face detection model + face_cascade = cv2.CascadeClassifier('./saved_models/face_detection_cv2/haarcascade_frontalface_default.xml') + # u2net_portrait path + model_dir = './saved_models/u2net_portrait/u2net_portrait.pth' + + # load u2net_portrait model + net = U2NET(3,1) + net.load_state_dict(torch.load(model_dir)) + if torch.cuda.is_available(): + net.cuda() + net.eval() + + # do the inference one-by-one + for i in range(0,len(im_list)): + print("--------------------------") + print("inferencing ", i, "/", len(im_list), im_list[i]) + + # load each image + img = cv2.imread(im_list[i]) + height,width = img.shape[0:2] + face = detect_single_face(face_cascade,img) + im_face = crop_face(img, face) + im_portrait = inference(net,im_face) + + # save the output + cv2.imwrite(out_dir+"/"+im_list[i].split('/')[-1][0:-4]+'.png',(im_portrait*255).astype(np.uint8)) + +if __name__ == '__main__': + main() diff --git a/u2net_portrait_test.py b/u2net_portrait_test.py new file mode 100644 index 0000000000000000000000000000000000000000..7e103dd336868ff71fe4a114d7e4e3b437a80a59 --- /dev/null +++ b/u2net_portrait_test.py @@ -0,0 +1,117 @@ +import os +from skimage import io, transform +import torch +import torchvision +from torch.autograd import Variable +import torch.nn as nn +import torch.nn.functional as F +from torch.utils.data import Dataset, DataLoader +from torchvision import transforms#, utils +# import torch.optim as optim + +import numpy as np +from PIL import Image +import glob + +from data_loader import RescaleT +from data_loader import ToTensor +from data_loader import ToTensorLab +from data_loader import SalObjDataset + +from model import U2NET # full size version 173.6 MB +from model import U2NETP # small version u2net 4.7 MB + +# normalize the predicted SOD probability map +def normPRED(d): + ma = torch.max(d) + mi = torch.min(d) + + dn = (d-mi)/(ma-mi) + + return dn + +def save_output(image_name,pred,d_dir): + + predict = pred + predict = predict.squeeze() + predict_np = predict.cpu().data.numpy() + + im = Image.fromarray(predict_np*255).convert('RGB') + img_name = image_name.split(os.sep)[-1] + image = io.imread(image_name) + imo = im.resize((image.shape[1],image.shape[0]),resample=Image.BILINEAR) + + pb_np = np.array(imo) + + aaa = img_name.split(".") + bbb = aaa[0:-1] + imidx = bbb[0] + for i in range(1,len(bbb)): + imidx = imidx + "." + bbb[i] + + imo.save(d_dir+'/'+imidx+'.png') + +def main(): + + # --------- 1. get image path and name --------- + model_name='u2net_portrait'#u2netp + + + image_dir = './test_data/test_portrait_images/portrait_im' + prediction_dir = './test_data/test_portrait_images/portrait_results' + if(not os.path.exists(prediction_dir)): + os.mkdir(prediction_dir) + + model_dir = './saved_models/u2net_portrait/u2net_portrait.pth' + + img_name_list = glob.glob(image_dir+'/*') + print("Number of images: ", len(img_name_list)) + + # --------- 2. dataloader --------- + #1. dataloader + test_salobj_dataset = SalObjDataset(img_name_list = img_name_list, + lbl_name_list = [], + transform=transforms.Compose([RescaleT(512), + ToTensorLab(flag=0)]) + ) + test_salobj_dataloader = DataLoader(test_salobj_dataset, + batch_size=1, + shuffle=False, + num_workers=1) + + # --------- 3. model define --------- + + print("...load U2NET---173.6 MB") + net = U2NET(3,1) + + net.load_state_dict(torch.load(model_dir)) + if torch.cuda.is_available(): + net.cuda() + net.eval() + + # --------- 4. inference for each image --------- + for i_test, data_test in enumerate(test_salobj_dataloader): + + print("inferencing:",img_name_list[i_test].split(os.sep)[-1]) + + inputs_test = data_test['image'] + inputs_test = inputs_test.type(torch.FloatTensor) + + if torch.cuda.is_available(): + inputs_test = Variable(inputs_test.cuda()) + else: + inputs_test = Variable(inputs_test) + + d1,d2,d3,d4,d5,d6,d7= net(inputs_test) + + # normalization + pred = 1.0 - d1[:,0,:,:] + pred = normPRED(pred) + + # save results to test_results folder + save_output(img_name_list[i_test],pred,prediction_dir) + + del d1,d2,d3,d4,d5,d6,d7 + +if __name__ == "__main__": + main() diff --git a/u2net_run.py b/u2net_run.py new file mode 100644 index 0000000000000000000000000000000000000000..d66c955b35c05547516fd574f1ebdf7601899357 --- /dev/null +++ b/u2net_run.py @@ -0,0 +1,91 @@ +import os +from skimage import io, transform +import torch +import torchvision +from torch.autograd import Variable +import torch.nn as nn +import torch.nn.functional as F +from torch.utils.data import Dataset, DataLoader +from torchvision import transforms#, utils +from u2net_test import normPRED +# import torch.optim as optim + +import numpy as np +from PIL import Image +import glob +import warnings + +from data_loader import RescaleT +from data_loader import ToTensor +from data_loader import ToTensorLab +from data_loader import SalObjDataset + +warnings.filterwarnings("ignore") + +def save_images(image_name,pred,d_dir): + predict = pred + predict = predict.squeeze() + predict_np = predict.cpu().data.numpy() + + im = Image.fromarray(predict_np*255).convert('RGB') + img_name = image_name.split(os.sep)[-1] + image = io.imread(image_name) + imo = im.resize((image.shape[1],image.shape[0]),resample=Image.BICUBIC) + + pb_np = np.array(imo) + + aaa = img_name.split(".") + bbb = aaa[0:-1] + imidx = bbb[0] + for i in range(1,len(bbb)): + imidx = imidx + "." + bbb[i] + print('Saving output at {}'.format(os.path.join(d_dir, imidx+'.png'))) + imo.save(os.path.join(d_dir, imidx+'.png')) + +def infer( + net, + image_dir = os.path.join(os.getcwd(), 'test_data', 'test_images'), + prediction_dir = os.path.join(os.getcwd(), 'test_data', 'u2net' + '_results') + ): + + + img_name_list = glob.glob(image_dir + os.sep + '*') + prediction_dir = prediction_dir + os.sep + + # --------- 2. dataloader --------- + #1. dataloader + test_salobj_dataset = SalObjDataset(img_name_list = img_name_list, + lbl_name_list = [], + transform=transforms.Compose([RescaleT(320), + ToTensorLab(flag=0)]) + ) + test_salobj_dataloader = DataLoader(test_salobj_dataset, + batch_size=1, + shuffle=False, + num_workers=1) + + # --------- 4. inference for each image --------- + for i_test, data_test in enumerate(test_salobj_dataloader): + + print("Generating mask for:",img_name_list[i_test].split(os.sep)[-1]) + + inputs_test = data_test['image'] + inputs_test = inputs_test.type(torch.FloatTensor) + + if torch.cuda.is_available(): + inputs_test = Variable(inputs_test.cuda()) + else: + inputs_test = Variable(inputs_test) + + d1,d2,d3,d4,d5,d6,d7= net(inputs_test) + + # normalization + pred = d1[:,0,:,:] + pred = normPRED(pred) + + # save results to test_results folder + if not os.path.exists(prediction_dir): + os.makedirs(prediction_dir, exist_ok=True) + save_images(img_name_list[i_test],pred,prediction_dir) + + del d1,d2,d3,d4,d5,d6,d7 diff --git a/u2net_test.py b/u2net_test.py new file mode 100644 index 0000000000000000000000000000000000000000..040d6f90d96bc544ad363f221936898a3ba1a708 --- /dev/null +++ b/u2net_test.py @@ -0,0 +1,122 @@ +import os +from skimage import io, transform +import torch +import torchvision +from torch.autograd import Variable +import torch.nn as nn +import torch.nn.functional as F +from torch.utils.data import Dataset, DataLoader +from torchvision import transforms#, utils +# import torch.optim as optim + +import numpy as np +from PIL import Image +import glob + +from data_loader import RescaleT +from data_loader import ToTensor +from data_loader import ToTensorLab +from data_loader import SalObjDataset + +from model import U2NET # full size version 173.6 MB +from model import U2NETP # small version u2net 4.7 MB + +# normalize the predicted SOD probability map +def normPRED(d): + ma = torch.max(d) + mi = torch.min(d) + + dn = (d-mi)/(ma-mi) + + return dn + +def save_output(image_name,pred,d_dir): + + predict = pred + predict = predict.squeeze() + predict_np = predict.cpu().data.numpy() + + im = Image.fromarray(predict_np*255).convert('RGB') + img_name = image_name.split(os.sep)[-1] + image = io.imread(image_name) + imo = im.resize((image.shape[1],image.shape[0]),resample=Image.BICUBIC) + + pb_np = np.array(imo) + + aaa = img_name.split(".") + bbb = aaa[0:-1] + imidx = bbb[0] + for i in range(1,len(bbb)): + imidx = imidx + "." + bbb[i] + + imo.save(d_dir+imidx+'.png') + +def main(): + + # --------- 1. get image path and name --------- + model_name='u2net'#u2netp + + + + image_dir = os.path.join(os.getcwd(), 'test_data', 'test_images') + prediction_dir = os.path.join(os.getcwd(), 'test_data', model_name + '_results' + os.sep) + model_dir = os.path.join(os.getcwd(), 'saved_models', model_name, model_name + '.pth') + + img_name_list = glob.glob(image_dir + os.sep + '*') + print(img_name_list) + + # --------- 2. dataloader --------- + #1. dataloader + test_salobj_dataset = SalObjDataset(img_name_list = img_name_list, + lbl_name_list = [], + transform=transforms.Compose([RescaleT(320), + ToTensorLab(flag=0)]) + ) + test_salobj_dataloader = DataLoader(test_salobj_dataset, + batch_size=1, + shuffle=False, + num_workers=1) + + # --------- 3. model define --------- + if(model_name=='u2net'): + print("...load U2NET---173.6 MB") + net = U2NET(3,1) + elif(model_name=='u2netp'): + print("...load U2NEP---4.7 MB") + net = U2NETP(3,1) + + if torch.cuda.is_available(): + net.load_state_dict(torch.load(model_dir)) + net.cuda() + else: + net.load_state_dict(torch.load(model_dir, map_location='cpu')) + net.eval() + + # --------- 4. inference for each image --------- + for i_test, data_test in enumerate(test_salobj_dataloader): + + print("inferencing:",img_name_list[i_test].split(os.sep)[-1]) + + inputs_test = data_test['image'] + inputs_test = inputs_test.type(torch.FloatTensor) + + if torch.cuda.is_available(): + inputs_test = Variable(inputs_test.cuda()) + else: + inputs_test = Variable(inputs_test) + + d1,d2,d3,d4,d5,d6,d7= net(inputs_test) + + # normalization + pred = d1[:,0,:,:] + pred = normPRED(pred) + + # save results to test_results folder + if not os.path.exists(prediction_dir): + os.makedirs(prediction_dir, exist_ok=True) + save_output(img_name_list[i_test],pred,prediction_dir) + + del d1,d2,d3,d4,d5,d6,d7 + +if __name__ == "__main__": + main() diff --git a/u2net_train.py b/u2net_train.py new file mode 100644 index 0000000000000000000000000000000000000000..8f19491feadcb2e37580ab9e4eeb82e1fdbfbbc9 --- /dev/null +++ b/u2net_train.py @@ -0,0 +1,164 @@ +import os +import torch +import torchvision +from torch.autograd import Variable +import torch.nn as nn +import torch.nn.functional as F + +from torch.utils.data import Dataset, DataLoader +from torchvision import transforms, utils +import torch.optim as optim +import torchvision.transforms as standard_transforms + +import numpy as np +import glob +import os + +from data_loader import Rescale +from data_loader import RescaleT +from data_loader import RandomCrop +from data_loader import ToTensor +from data_loader import ToTensorLab +from data_loader import SalObjDataset + +from model import U2NET +from model import U2NETP + +# ------- 1. define loss function -------- + +bce_loss = nn.BCELoss(size_average=True) + +def muti_bce_loss_fusion(d0, d1, d2, d3, d4, d5, d6, labels_v): + + loss0 = bce_loss(d0,labels_v) + loss1 = bce_loss(d1,labels_v) + loss2 = bce_loss(d2,labels_v) + loss3 = bce_loss(d3,labels_v) + loss4 = bce_loss(d4,labels_v) + loss5 = bce_loss(d5,labels_v) + loss6 = bce_loss(d6,labels_v) + + loss = loss0 + loss1 + loss2 + loss3 + loss4 + loss5 + loss6 + print("l0: %3f, l1: %3f, l2: %3f, l3: %3f, l4: %3f, l5: %3f, l6: %3f\n"%(loss0.data.item(),loss1.data.item(),loss2.data.item(),loss3.data.item(),loss4.data.item(),loss5.data.item(),loss6.data.item())) + + return loss0, loss + + +# ------- 2. set the directory of training dataset -------- + +model_name = 'u2net' #'u2netp' + +data_dir = os.path.join(os.getcwd(), 'train_data' + os.sep) +tra_image_dir = os.path.join('DUTS', 'DUTS-TR', 'DUTS-TR', 'im_aug' + os.sep) +tra_label_dir = os.path.join('DUTS', 'DUTS-TR', 'DUTS-TR', 'gt_aug' + os.sep) + +image_ext = '.jpg' +label_ext = '.png' + +model_dir = os.path.join(os.getcwd(), 'saved_models', model_name + os.sep) + +epoch_num = 100000 +batch_size_train = 12 +batch_size_val = 1 +train_num = 0 +val_num = 0 + +tra_img_name_list = glob.glob(data_dir + tra_image_dir + '*' + image_ext) + +tra_lbl_name_list = [] +for img_path in tra_img_name_list: + img_name = img_path.split(os.sep)[-1] + + aaa = img_name.split(".") + bbb = aaa[0:-1] + imidx = bbb[0] + for i in range(1,len(bbb)): + imidx = imidx + "." + bbb[i] + + tra_lbl_name_list.append(data_dir + tra_label_dir + imidx + label_ext) + +print("---") +print("train images: ", len(tra_img_name_list)) +print("train labels: ", len(tra_lbl_name_list)) +print("---") + +train_num = len(tra_img_name_list) + +salobj_dataset = SalObjDataset( + img_name_list=tra_img_name_list, + lbl_name_list=tra_lbl_name_list, + transform=transforms.Compose([ + RescaleT(320), + RandomCrop(288), + ToTensorLab(flag=0)])) +salobj_dataloader = DataLoader(salobj_dataset, batch_size=batch_size_train, shuffle=True, num_workers=1) + +# ------- 3. define model -------- +# define the net +if(model_name=='u2net'): + net = U2NET(3, 1) +elif(model_name=='u2netp'): + net = U2NETP(3,1) + +if torch.cuda.is_available(): + net.cuda() + +# ------- 4. define optimizer -------- +print("---define optimizer...") +optimizer = optim.Adam(net.parameters(), lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0) + +# ------- 5. training process -------- +print("---start training...") +ite_num = 0 +running_loss = 0.0 +running_tar_loss = 0.0 +ite_num4val = 0 +save_frq = 2000 # save the model every 2000 iterations + +for epoch in range(0, epoch_num): + net.train() + + for i, data in enumerate(salobj_dataloader): + ite_num = ite_num + 1 + ite_num4val = ite_num4val + 1 + + inputs, labels = data['image'], data['label'] + + inputs = inputs.type(torch.FloatTensor) + labels = labels.type(torch.FloatTensor) + + # wrap them in Variable + if torch.cuda.is_available(): + inputs_v, labels_v = Variable(inputs.cuda(), requires_grad=False), Variable(labels.cuda(), + requires_grad=False) + else: + inputs_v, labels_v = Variable(inputs, requires_grad=False), Variable(labels, requires_grad=False) + + # y zero the parameter gradients + optimizer.zero_grad() + + # forward + backward + optimize + d0, d1, d2, d3, d4, d5, d6 = net(inputs_v) + loss2, loss = muti_bce_loss_fusion(d0, d1, d2, d3, d4, d5, d6, labels_v) + + loss.backward() + optimizer.step() + + # # print statistics + running_loss += loss.data.item() + running_tar_loss += loss2.data.item() + + # del temporary outputs and loss + del d0, d1, d2, d3, d4, d5, d6, loss2, loss + + print("[epoch: %3d/%3d, batch: %5d/%5d, ite: %d] train loss: %3f, tar: %3f " % ( + epoch + 1, epoch_num, (i + 1) * batch_size_train, train_num, ite_num, running_loss / ite_num4val, running_tar_loss / ite_num4val)) + + if ite_num % save_frq == 0: + + torch.save(net.state_dict(), model_dir + model_name+"_bce_itr_%d_train_%3f_tar_%3f.pth" % (ite_num, running_loss / ite_num4val, running_tar_loss / ite_num4val)) + running_loss = 0.0 + running_tar_loss = 0.0 + net.train() # resume train + ite_num4val = 0 + diff --git a/util/image_pool.py b/util/image_pool.py new file mode 100644 index 0000000000000000000000000000000000000000..deeb7cca923330113afa0f460723b9d86e321c08 --- /dev/null +++ b/util/image_pool.py @@ -0,0 +1,33 @@ +import random +import torch +from torch.autograd import Variable + + +class ImagePool(): + def __init__(self, pool_size): + self.pool_size = pool_size + if self.pool_size > 0: + self.num_imgs = 0 + self.images = [] + + def query(self, images): + if self.pool_size == 0: + return images + return_images = [] + for image in images.data: + image = torch.unsqueeze(image, 0) + if self.num_imgs < self.pool_size: + self.num_imgs = self.num_imgs + 1 + self.images.append(image) + return_images.append(image) + else: + p = random.uniform(0, 1) + if p > 0.5: + random_id = random.randint(0, self.pool_size-1) + tmp = self.images[random_id].clone() + self.images[random_id] = image + return_images.append(tmp) + else: + return_images.append(image) + return_images = Variable(torch.cat(return_images, 0)) + return return_images diff --git a/util/util.py b/util/util.py new file mode 100644 index 0000000000000000000000000000000000000000..550560aac8dc82fe4f896fd0c37e36fab3e15dd2 --- /dev/null +++ b/util/util.py @@ -0,0 +1,145 @@ +from __future__ import print_function +import os +from PIL import Image +import numpy as np +import torch + +print('?') + +# Converts a Tensor into a Numpy array +# |imtype|: the desired type of the converted numpy array + + +def tensor2im(image_tensor, imtype=np.uint8, normalize=True): + if isinstance(image_tensor, list): + image_numpy = [] + for i in range(len(image_tensor)): + image_numpy.append(tensor2im(image_tensor[i], imtype, normalize)) + return image_numpy + image_numpy = image_tensor.cpu().float().numpy() + # if normalize: + # image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 + # else: + # image_numpy = np.transpose(image_numpy, (1, 2, 0)) * 255.0 + image_numpy = (image_numpy + 1) / 2.0 + image_numpy = np.clip(image_numpy, 0, 1) + if image_numpy.shape[2] == 1 or image_numpy.shape[2] > 3: + image_numpy = image_numpy[:, :, 0] + + return image_numpy + +# Converts a one-hot tensor into a colorful label map + + +def tensor2label(label_tensor, n_label, imtype=np.uint8): + if n_label == 0: + return tensor2im(label_tensor, imtype) + label_tensor = label_tensor.cpu().float() + if label_tensor.size()[0] > 1: + label_tensor = label_tensor.max(0, keepdim=True)[1] + label_tensor = Colorize(n_label)(label_tensor) + #label_numpy = np.transpose(label_tensor.numpy(), (1, 2, 0)) + label_numpy = label_tensor.numpy() + label_numpy = label_numpy / 255.0 + + return label_numpy + + +def save_image(image_numpy, image_path, grayscale=False): + image_pil = Image.fromarray(image_numpy) + image_pil.save(image_path) + + +def save_tensor_as_image(image_tensor, image_path, grayscale=False): + image_numpy = tensor_to_image(image_tensor, grayscale) + save_image(image_numpy, image_path, grayscale) + + +def tensor_to_image(img_tensor, grayscale=False): + if grayscale: + tensor = img_tensor.cpu().clamp(0, 255) + else: + tensor = (img_tensor.clone() + 1) * 0.5 * 255 + tensor = tensor.cpu().clamp(0, 255) + + try: + array = tensor.numpy().astype('uint8') + except: + array = tensor.detach().numpy().astype('uint8') + + if array.shape[0] == 1: + array = array.squeeze(0) + elif array.shape[0] == 3: + array = array.swapaxes(0, 1).swapaxes(1, 2) + + return array + + +def mkdirs(paths): + if isinstance(paths, list) and not isinstance(paths, str): + for path in paths: + mkdir(path) + else: + mkdir(paths) + + +def mkdir(path): + if not os.path.exists(path): + os.makedirs(path) + +############################################################################### +# Code from +# https://github.com/ycszen/pytorch-seg/blob/master/transform.py +# Modified so it complies with the Citscape label map colors +############################################################################### + + +def uint82bin(n, count=8): + """returns the binary of integer n, count refers to amount of bits""" + return ''.join([str((n >> y) & 1) for y in range(count-1, -1, -1)]) + + +def labelcolormap(N): + if N == 35: # cityscape + cmap = np.array([(0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), (111, 74, 0), (81, 0, 81), + (128, 64, 128), (244, 35, 232), (250, 170, 160), (230, + 150, 140), (70, 70, 70), (102, 102, 156), (190, 153, 153), + (180, 165, 180), (150, 100, 100), (150, 120, 90), (153, + 153, 153), (153, 153, 153), (250, 170, 30), (220, 220, 0), + (107, 142, 35), (152, 251, 152), (70, 130, 180), (220, + 20, 60), (255, 0, 0), (0, 0, 142), (0, 0, 70), + (0, 60, 100), (0, 0, 90), (0, 0, 110), (0, 80, 100), (0, 0, 230), (119, 11, 32), (0, 0, 142)], + dtype=np.uint8) + else: + cmap = np.zeros((N, 3), dtype=np.uint8) + for i in range(N): + r, g, b = 0, 0, 0 + id = i + for j in range(7): + str_id = uint82bin(id) + r = r ^ (np.uint8(str_id[-1]) << (7-j)) + g = g ^ (np.uint8(str_id[-2]) << (7-j)) + b = b ^ (np.uint8(str_id[-3]) << (7-j)) + id = id >> 3 + cmap[i, 0] = r + cmap[i, 1] = g + cmap[i, 2] = b + return cmap + + +class Colorize(object): + def __init__(self, n=35): + self.cmap = labelcolormap(n) + self.cmap = torch.from_numpy(self.cmap[:n]) + + def __call__(self, gray_image): + size = gray_image.size() + color_image = torch.ByteTensor(3, size[1], size[2]).fill_(0) + + for label in range(0, len(self.cmap)): + mask = (label == gray_image[0]).cpu() + color_image[0][mask] = self.cmap[label][0] + color_image[1][mask] = self.cmap[label][1] + color_image[2][mask] = self.cmap[label][2] + + return color_image